) args[1];
+ if(arr.size()>0){
+ MessageObject msg=arr.get(0);
+ if(!TextUtils.isEmpty(msg.messageText)){
+ Matcher matcher=Pattern.compile("[0-9]+").matcher(msg.messageText);
+ if(matcher.find()){
+ code[0]=matcher.group();
+ try{barrier.await(10, TimeUnit.MILLISECONDS);}catch(Exception x){}
+ }
+ }
+ }
+ }
+ }
+ }
+ };
+ AndroidUtilities.runOnUIThread(new Runnable(){
+ @Override
+ public void run(){
+ NotificationCenter.getInstance().addObserver(listener, NotificationCenter.didReceivedNewMessages);
+ }
+ });
+ try{barrier.await(10, TimeUnit.SECONDS);}catch(Exception x){}
+ AndroidUtilities.runOnUIThread(new Runnable(){
+ @Override
+ public void run(){
+ NotificationCenter.getInstance().removeObserver(listener, NotificationCenter.didReceivedNewMessages);
+ }
+ });
+ DataOutputStream out=new DataOutputStream(ch.getOutputStream(apiClient).await().getOutputStream());
+ if(code!=null)
+ out.writeUTF(code[0]);
+ else
+ out.writeUTF("");
+ out.flush();
+ out.close();
+ }
+ }catch(Exception x){
+ FileLog.e("error processing wear request", x);
+ }
+ ch.close(apiClient).await();
+ apiClient.disconnect();
+ }
+ }).start();
+ }
+}
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/browser/Browser.java b/TMessagesProj/src/main/java/org/telegram/messenger/browser/Browser.java
index ef3469b19..b98fc84ab 100644
--- a/TMessagesProj/src/main/java/org/telegram/messenger/browser/Browser.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/browser/Browser.java
@@ -196,6 +196,14 @@ public class Browser {
public static boolean isInternalUri(Uri uri) {
String host = uri.getHost();
host = host != null ? host.toLowerCase() : "";
- return "tg".equals(uri.getScheme()) || "telegram.me".equals(host) || "t.me".equals(host) || "telegram.dog".equals(host);
+ if ("tg".equals(uri.getScheme())) {
+ return true;
+ } else if ("telegram.me".equals(host) || "t.me".equals(host) || "telegram.dog".equals(host) || "telesco.pe".equals(host)) {
+ String path = uri.getPath();
+ if (path != null && path.length() > 1) {
+ return true;
+ }
+ }
+ return false;
}
}
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraController.java b/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraController.java
index 0cb970d8a..4365545ae 100644
--- a/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraController.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraController.java
@@ -53,6 +53,7 @@ public class CameraController implements MediaRecorder.OnInfoListener {
private VideoTakeCallback onVideoTakeCallback;
private boolean recordingSmallVideo;
private boolean cameraInitied;
+ private boolean loadingCameras;
private static volatile CameraController Instance = null;
@@ -78,9 +79,10 @@ public class CameraController implements MediaRecorder.OnInfoListener {
}
public void initCamera() {
- if (cameraInitied) {
+ if (loadingCameras || cameraInitied) {
return;
}
+ loadingCameras = true;
threadPool.execute(new Runnable() {
@Override
public void run() {
@@ -102,6 +104,7 @@ public class CameraController implements MediaRecorder.OnInfoListener {
Camera.Size size = list.get(a);
if (size.height < 2160 && size.width < 2160) {
cameraInfo.previewSizes.add(new Size(size.width, size.height));
+ FileLog.e("preview size = " + size.width + " " + size.height);
}
}
@@ -110,6 +113,7 @@ public class CameraController implements MediaRecorder.OnInfoListener {
Camera.Size size = list.get(a);
if (!"samsung".equals(Build.MANUFACTURER) || !"jflteuc".equals(Build.PRODUCT) || size.width < 2048) {
cameraInfo.pictureSizes.add(new Size(size.width, size.height));
+ FileLog.e("picture size = " + size.width + " " + size.height);
}
}
@@ -121,11 +125,19 @@ public class CameraController implements MediaRecorder.OnInfoListener {
AndroidUtilities.runOnUIThread(new Runnable() {
@Override
public void run() {
+ loadingCameras = false;
cameraInitied = true;
NotificationCenter.getInstance().postNotificationName(NotificationCenter.cameraInitied);
}
});
} catch (Exception e) {
+ AndroidUtilities.runOnUIThread(new Runnable() {
+ @Override
+ public void run() {
+ loadingCameras = false;
+ cameraInitied = false;
+ }
+ });
FileLog.e(e);
}
}
@@ -392,6 +404,44 @@ public class CameraController implements MediaRecorder.OnInfoListener {
});
}
+ public void openRound(final CameraSession session, final SurfaceTexture texture, final Runnable callback, final Runnable configureCallback) {
+ if (session == null || texture == null) {
+ FileLog.e("failed to open round " + session + " tex = " + texture);
+ return;
+ }
+ threadPool.execute(new Runnable() {
+ @SuppressLint("NewApi")
+ @Override
+ public void run() {
+ Camera camera = session.cameraInfo.camera;
+ try {
+ FileLog.e("start creating round camera session");
+ if (camera == null) {
+ camera = session.cameraInfo.camera = Camera.open(session.cameraInfo.cameraId);
+ }
+ Camera.Parameters params = camera.getParameters();
+
+ session.configureRoundCamera();
+ if (configureCallback != null) {
+ configureCallback.run();
+ }
+ camera.setPreviewTexture(texture);
+ camera.startPreview();
+ if (callback != null) {
+ AndroidUtilities.runOnUIThread(callback);
+ }
+ FileLog.e("round camera session created");
+ } catch (Exception e) {
+ session.cameraInfo.camera = null;
+ if (camera != null) {
+ camera.release();
+ }
+ FileLog.e(e);
+ }
+ }
+ });
+ }
+
public void open(final CameraSession session, final SurfaceTexture texture, final Runnable callback, final Runnable prestartCallback) {
if (session == null || texture == null) {
return;
@@ -477,7 +527,9 @@ public class CameraController implements MediaRecorder.OnInfoListener {
if (recordingSmallVideo) {
pictureSize = new Size(4, 3);
pictureSize = CameraController.chooseOptimalSize(info.getPictureSizes(), 640, 480, pictureSize);
- recorder.setVideoEncodingBitRate(900000);
+ recorder.setVideoEncodingBitRate(900000 * 2);
+ recorder.setAudioEncodingBitRate(32000);
+ recorder.setAudioChannels(1);
} else {
pictureSize = new Size(16, 9);
pictureSize = CameraController.chooseOptimalSize(info.getPictureSizes(), 720, 480, pictureSize);
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraInfo.java b/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraInfo.java
index 7395b5e81..f98f8e3cb 100644
--- a/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraInfo.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraInfo.java
@@ -41,6 +41,10 @@ public class CameraInfo {
return pictureSizes;
}
+ public boolean isFrontface() {
+ return frontCamera != 0;
+ }
+
/*private int getScore(CameraSelectionCriteria criteria) {
int score = 10;
if (criteria != null) {
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraSession.java b/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraSession.java
index 91b3910a6..faaa2dd7a 100644
--- a/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraSession.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraSession.java
@@ -24,6 +24,7 @@ import org.telegram.messenger.ApplicationLoader;
import org.telegram.messenger.FileLog;
import java.util.ArrayList;
+import java.util.List;
public class CameraSession {
@@ -39,6 +40,7 @@ public class CameraSession {
private boolean initied;
private boolean meteringAreaSupported;
private int currentOrientation;
+ private int diffOrientation;
private int jpegOrientation;
private boolean sameTakePictureOrientation;
@@ -143,11 +145,11 @@ public class CameraSession {
return currentFlashMode;
}
- protected void setInitied() {
+ public void setInitied() {
initied = true;
}
- protected boolean isInitied() {
+ public boolean isInitied() {
return initied;
}
@@ -155,10 +157,113 @@ public class CameraSession {
return currentOrientation;
}
+ public int getWorldAngle() {
+ return diffOrientation;
+ }
+
public boolean isSameTakePictureOrientation() {
return sameTakePictureOrientation;
}
+ protected void configureRoundCamera() {
+ try {
+ isVideo = true;
+ Camera camera = cameraInfo.camera;
+ if (camera != null) {
+ Camera.CameraInfo info = new Camera.CameraInfo();
+ Camera.Parameters params = null;
+ try {
+ params = camera.getParameters();
+ } catch (Exception e) {
+ FileLog.e(e);
+ }
+
+ Camera.getCameraInfo(cameraInfo.getCameraId(), info);
+
+ int displayOrientation = getDisplayOrientation(info, true);
+ int cameraDisplayOrientation;
+
+ if ("samsung".equals(Build.MANUFACTURER) && "sf2wifixx".equals(Build.PRODUCT)) {
+ cameraDisplayOrientation = 0;
+ } else {
+ int degrees = 0;
+ int temp = displayOrientation;
+ switch (temp) {
+ case Surface.ROTATION_0:
+ degrees = 0;
+ break;
+ case Surface.ROTATION_90:
+ degrees = 90;
+ break;
+ case Surface.ROTATION_180:
+ degrees = 180;
+ break;
+ case Surface.ROTATION_270:
+ degrees = 270;
+ break;
+ }
+ if (info.orientation % 90 != 0) {
+ info.orientation = 0;
+ }
+ if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
+ temp = (info.orientation + degrees) % 360;
+ temp = (360 - temp) % 360;
+ } else {
+ temp = (info.orientation - degrees + 360) % 360;
+ }
+ cameraDisplayOrientation = temp;
+ }
+ camera.setDisplayOrientation(currentOrientation = cameraDisplayOrientation);
+ diffOrientation = currentOrientation - displayOrientation;
+
+ if (params != null) {
+ FileLog.e("set preview size = " + previewSize.getWidth() + " " + previewSize.getHeight());
+ params.setPreviewSize(previewSize.getWidth(), previewSize.getHeight());
+ FileLog.e("set picture size = " + pictureSize.getWidth() + " " + pictureSize.getHeight());
+ params.setPictureSize(pictureSize.getWidth(), pictureSize.getHeight());
+ params.setPictureFormat(pictureFormat);
+ params.setRecordingHint(true);
+
+ String desiredMode = Camera.Parameters.FOCUS_MODE_AUTO;
+ if (params.getSupportedFocusModes().contains(desiredMode)) {
+ params.setFocusMode(desiredMode);
+ }
+
+ int outputOrientation = 0;
+ if (jpegOrientation != OrientationEventListener.ORIENTATION_UNKNOWN) {
+ if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
+ outputOrientation = (info.orientation - jpegOrientation + 360) % 360;
+ } else {
+ outputOrientation = (info.orientation + jpegOrientation) % 360;
+ }
+ }
+ try {
+ params.setRotation(outputOrientation);
+ if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
+ sameTakePictureOrientation = (360 - displayOrientation) % 360 == outputOrientation;
+ } else {
+ sameTakePictureOrientation = displayOrientation == outputOrientation;
+ }
+ } catch (Exception e) {
+ //
+ }
+ params.setFlashMode(currentFlashMode);
+ try {
+ camera.setParameters(params);
+ } catch (Exception e) {
+ //
+ }
+
+ if (params.getMaxNumMeteringAreas() > 0) {
+ meteringAreaSupported = true;
+ }
+ }
+ }
+ } catch (Throwable e) {
+ FileLog.e(e);
+ }
+ }
+
protected void configurePhotoCamera() {
try {
Camera camera = cameraInfo.camera;
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraView.java b/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraView.java
index d302b2f0b..400ab1183 100644
--- a/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraView.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/camera/CameraView.java
@@ -51,6 +51,7 @@ public class CameraView extends FrameLayout implements TextureView.SurfaceTextur
private float focusProgress = 1.0f;
private float innerAlpha;
private float outerAlpha;
+ private boolean initialFrontface;
private int cx;
private int cy;
private Paint outerPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
@@ -65,7 +66,7 @@ public class CameraView extends FrameLayout implements TextureView.SurfaceTextur
public CameraView(Context context, boolean frontface) {
super(context, null);
- isFrontface = frontface;
+ initialFrontface = isFrontface = frontface;
textureView = new TextureView(context);
textureView.setSurfaceTextureListener(this);
addView(textureView);
@@ -132,14 +133,20 @@ public class CameraView extends FrameLayout implements TextureView.SurfaceTextur
org.telegram.messenger.camera.Size aspectRatio;
int wantedWidth;
int wantedHeight;
- if (Math.abs(screenSize - size4to3) < 0.1f) {
- aspectRatio = new Size(4, 3);
- wantedWidth = 1280;
- wantedHeight = 960;
- } else {
+ if (initialFrontface) {
aspectRatio = new Size(16, 9);
- wantedWidth = 1280;
- wantedHeight = 720;
+ wantedWidth = 480;
+ wantedHeight = 270;
+ } else {
+ if (Math.abs(screenSize - size4to3) < 0.1f) {
+ aspectRatio = new Size(4, 3);
+ wantedWidth = 1280;
+ wantedHeight = 960;
+ } else {
+ aspectRatio = new Size(16, 9);
+ wantedWidth = 1280;
+ wantedHeight = 720;
+ }
}
if (textureView.getWidth() > 0 && textureView.getHeight() > 0) {
int width = Math.min(AndroidUtilities.displaySize.x, AndroidUtilities.displaySize.y);
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/camera/Size.java b/TMessagesProj/src/main/java/org/telegram/messenger/camera/Size.java
index 9805500c1..3cdc7e1c3 100644
--- a/TMessagesProj/src/main/java/org/telegram/messenger/camera/Size.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/camera/Size.java
@@ -67,6 +67,6 @@ public final class Size {
return mHeight ^ ((mWidth << (Integer.SIZE / 2)) | (mWidth >>> (Integer.SIZE / 2)));
}
- private final int mWidth;
- private final int mHeight;
+ public final int mWidth;
+ public final int mHeight;
}
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/BaseRenderer.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/BaseRenderer.java
index b04a0d640..7d8aeed57 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/BaseRenderer.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/BaseRenderer.java
@@ -28,6 +28,7 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
private final int trackType;
+ private RendererConfiguration configuration;
private int index;
private int state;
private SampleStream stream;
@@ -70,9 +71,11 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
}
@Override
- public final void enable(Format[] formats, SampleStream stream, long positionUs,
- boolean joining, long offsetUs) throws ExoPlaybackException {
+ public final void enable(RendererConfiguration configuration, Format[] formats,
+ SampleStream stream, long positionUs, boolean joining, long offsetUs)
+ throws ExoPlaybackException {
Assertions.checkState(state == STATE_DISABLED);
+ this.configuration = configuration;
state = STATE_ENABLED;
onEnabled(joining);
replaceStream(formats, stream, offsetUs);
@@ -107,10 +110,15 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
}
@Override
- public final void setCurrentStreamIsFinal() {
+ public final void setCurrentStreamFinal() {
streamIsFinal = true;
}
+ @Override
+ public final boolean isCurrentStreamFinal() {
+ return streamIsFinal;
+ }
+
@Override
public final void maybeThrowStreamError() throws IOException {
stream.maybeThrowError();
@@ -119,6 +127,7 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
@Override
public final void resetPosition(long positionUs) throws ExoPlaybackException {
streamIsFinal = false;
+ readEndOfStream = false;
onPositionReset(positionUs, false);
}
@@ -194,8 +203,7 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
* @param joining Whether this renderer is being enabled to join an ongoing playback.
* @throws ExoPlaybackException If an error occurs.
*/
- protected void onPositionReset(long positionUs, boolean joining)
- throws ExoPlaybackException {
+ protected void onPositionReset(long positionUs, boolean joining) throws ExoPlaybackException {
// Do nothing.
}
@@ -232,10 +240,15 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
// Methods to be called by subclasses.
+ /**
+ * Returns the configuration set when the renderer was most recently enabled.
+ */
+ protected final RendererConfiguration getConfiguration() {
+ return configuration;
+ }
+
/**
* Returns the index of the renderer within the player.
- *
- * @return The index of the renderer within the player.
*/
protected final int getIndex() {
return index;
@@ -243,29 +256,48 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
/**
* Reads from the enabled upstream source. If the upstream source has been read to the end then
- * {@link C#RESULT_BUFFER_READ} is only returned if {@link #setCurrentStreamIsFinal()} has been
+ * {@link C#RESULT_BUFFER_READ} is only returned if {@link #setCurrentStreamFinal()} has been
* called. {@link C#RESULT_NOTHING_READ} is returned otherwise.
*
- * @see SampleStream#readData(FormatHolder, DecoderInputBuffer)
* @param formatHolder A {@link FormatHolder} to populate in the case of reading a format.
* @param buffer A {@link DecoderInputBuffer} to populate in the case of reading a sample or the
* end of the stream. If the end of the stream has been reached, the
* {@link C#BUFFER_FLAG_END_OF_STREAM} flag will be set on the buffer.
+ * @param formatRequired Whether the caller requires that the format of the stream be read even if
+ * it's not changing. A sample will never be read if set to true, however it is still possible
+ * for the end of stream or nothing to be read.
* @return The result, which can be {@link C#RESULT_NOTHING_READ}, {@link C#RESULT_FORMAT_READ} or
* {@link C#RESULT_BUFFER_READ}.
*/
- protected final int readSource(FormatHolder formatHolder, DecoderInputBuffer buffer) {
- int result = stream.readData(formatHolder, buffer);
+ protected final int readSource(FormatHolder formatHolder, DecoderInputBuffer buffer,
+ boolean formatRequired) {
+ int result = stream.readData(formatHolder, buffer, formatRequired);
if (result == C.RESULT_BUFFER_READ) {
if (buffer.isEndOfStream()) {
readEndOfStream = true;
return streamIsFinal ? C.RESULT_BUFFER_READ : C.RESULT_NOTHING_READ;
}
buffer.timeUs += streamOffsetUs;
+ } else if (result == C.RESULT_FORMAT_READ) {
+ Format format = formatHolder.format;
+ if (format.subsampleOffsetUs != Format.OFFSET_SAMPLE_RELATIVE) {
+ format = format.copyWithSubsampleOffsetUs(format.subsampleOffsetUs + streamOffsetUs);
+ formatHolder.format = format;
+ }
}
return result;
}
+ /**
+ * Attempts to skip to the keyframe before the specified position, or to the end of the stream if
+ * {@code positionUs} is beyond it.
+ *
+ * @param positionUs The position in microseconds.
+ */
+ protected void skipSource(long positionUs) {
+ stream.skipData(positionUs - streamOffsetUs);
+ }
+
/**
* Returns whether the upstream source is ready.
*
@@ -275,13 +307,4 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
return readEndOfStream ? streamIsFinal : stream.isReady();
}
- /**
- * Attempts to skip to the keyframe before the specified time.
- *
- * @param timeUs The specified time.
- */
- protected void skipToKeyframeBefore(long timeUs) {
- stream.skipToKeyframeBefore(timeUs);
- }
-
}
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/C.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/C.java
index 6de53ea22..91236b59c 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/C.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/C.java
@@ -15,9 +15,12 @@
*/
package org.telegram.messenger.exoplayer2;
+import android.annotation.TargetApi;
+import android.content.Context;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.MediaCodec;
+import android.media.MediaFormat;
import android.support.annotation.IntDef;
import android.view.Surface;
import org.telegram.messenger.exoplayer2.util.Util;
@@ -74,6 +77,21 @@ public final class C {
*/
public static final String UTF8_NAME = "UTF-8";
+ /**
+ * The name of the UTF-16 charset.
+ */
+ public static final String UTF16_NAME = "UTF-16";
+
+ /**
+ * * The name of the serif font family.
+ */
+ public static final String SERIF_NAME = "serif";
+
+ /**
+ * * The name of the sans-serif font family.
+ */
+ public static final String SANS_SERIF_NAME = "sans-serif";
+
/**
* Crypto modes for a codec.
*/
@@ -96,6 +114,13 @@ public final class C {
@SuppressWarnings("InlinedApi")
public static final int CRYPTO_MODE_AES_CBC = MediaCodec.CRYPTO_MODE_AES_CBC;
+ /**
+ * Represents an unset {@link android.media.AudioTrack} session identifier. Equal to
+ * {@link AudioManager#AUDIO_SESSION_ID_GENERATE}.
+ */
+ @SuppressWarnings("InlinedApi")
+ public static final int AUDIO_SESSION_ID_UNSET = AudioManager.AUDIO_SESSION_ID_GENERATE;
+
/**
* Represents an audio encoding, or an invalid or unset value.
*/
@@ -434,9 +459,16 @@ public final class C {
*/
public static final UUID UUID_NIL = new UUID(0L, 0L);
+ /**
+ * UUID for the ClearKey DRM scheme.
+ *
+ * ClearKey is supported on Android devices running Android 5.0 (API Level 21) and up.
+ */
+ public static final UUID CLEARKEY_UUID = new UUID(0x1077EFECC0B24D02L, 0xACE33C1E52E2FB4BL);
+
/**
* UUID for the Widevine DRM scheme.
- *
+ *
* Widevine is supported on Android devices running Android 4.3 (API Level 18) and up.
*/
public static final UUID WIDEVINE_UUID = new UUID(0xEDEF8BA979D64ACEL, 0xA3C827DCD51D21EDL);
@@ -463,15 +495,6 @@ public final class C {
*/
public static final int MSG_SET_VOLUME = 2;
- /**
- * A type of a message that can be passed to an audio {@link Renderer} via
- * {@link ExoPlayer#sendMessages} or {@link ExoPlayer#blockingSendMessages}. The message object
- * should be a {@link android.media.PlaybackParams}, or null, which will be used to configure the
- * underlying {@link android.media.AudioTrack}. The message object should not be modified by the
- * caller after it has been passed
- */
- public static final int MSG_SET_PLAYBACK_PARAMS = 3;
-
/**
* A type of a message that can be passed to an audio {@link Renderer} via
* {@link ExoPlayer#sendMessages} or {@link ExoPlayer#blockingSendMessages}. The message object
@@ -484,7 +507,7 @@ public final class C {
* introduce a brief gap in audio output. Note also that tracks in the same audio session must
* share the same routing, so a new audio session id will be generated.
*/
- public static final int MSG_SET_STREAM_TYPE = 4;
+ public static final int MSG_SET_STREAM_TYPE = 3;
/**
* The type of a message that can be passed to a {@link MediaCodec}-based video {@link Renderer}
@@ -494,7 +517,7 @@ public final class C {
* Note that the scaling mode only applies if the {@link Surface} targeted by the renderer is
* owned by a {@link android.view.SurfaceView}.
*/
- public static final int MSG_SET_SCALING_MODE = 5;
+ public static final int MSG_SET_SCALING_MODE = 4;
/**
* Applications or extensions may define custom {@code MSG_*} constants greater than or equal to
@@ -506,7 +529,13 @@ public final class C {
* The stereo mode for 360/3D/VR videos.
*/
@Retention(RetentionPolicy.SOURCE)
- @IntDef({Format.NO_VALUE, STEREO_MODE_MONO, STEREO_MODE_TOP_BOTTOM, STEREO_MODE_LEFT_RIGHT})
+ @IntDef({
+ Format.NO_VALUE,
+ STEREO_MODE_MONO,
+ STEREO_MODE_TOP_BOTTOM,
+ STEREO_MODE_LEFT_RIGHT,
+ STEREO_MODE_STEREO_MESH
+ })
public @interface StereoMode {}
/**
* Indicates Monoscopic stereo layout, used with 360/3D/VR videos.
@@ -520,6 +549,86 @@ public final class C {
* Indicates Left-Right stereo layout, used with 360/3D/VR videos.
*/
public static final int STEREO_MODE_LEFT_RIGHT = 2;
+ /**
+ * Indicates a stereo layout where the left and right eyes have separate meshes,
+ * used with 360/3D/VR videos.
+ */
+ public static final int STEREO_MODE_STEREO_MESH = 3;
+
+ /**
+ * Video colorspaces.
+ */
+ @Retention(RetentionPolicy.SOURCE)
+ @IntDef({Format.NO_VALUE, COLOR_SPACE_BT709, COLOR_SPACE_BT601, COLOR_SPACE_BT2020})
+ public @interface ColorSpace {}
+ /**
+ * @see MediaFormat#COLOR_STANDARD_BT709
+ */
+ @SuppressWarnings("InlinedApi")
+ public static final int COLOR_SPACE_BT709 = MediaFormat.COLOR_STANDARD_BT709;
+ /**
+ * @see MediaFormat#COLOR_STANDARD_BT601_PAL
+ */
+ @SuppressWarnings("InlinedApi")
+ public static final int COLOR_SPACE_BT601 = MediaFormat.COLOR_STANDARD_BT601_PAL;
+ /**
+ * @see MediaFormat#COLOR_STANDARD_BT2020
+ */
+ @SuppressWarnings("InlinedApi")
+ public static final int COLOR_SPACE_BT2020 = MediaFormat.COLOR_STANDARD_BT2020;
+
+ /**
+ * Video color transfer characteristics.
+ */
+ @Retention(RetentionPolicy.SOURCE)
+ @IntDef({Format.NO_VALUE, COLOR_TRANSFER_SDR, COLOR_TRANSFER_ST2084, COLOR_TRANSFER_HLG})
+ public @interface ColorTransfer {}
+ /**
+ * @see MediaFormat#COLOR_TRANSFER_SDR_VIDEO
+ */
+ @SuppressWarnings("InlinedApi")
+ public static final int COLOR_TRANSFER_SDR = MediaFormat.COLOR_TRANSFER_SDR_VIDEO;
+ /**
+ * @see MediaFormat#COLOR_TRANSFER_ST2084
+ */
+ @SuppressWarnings("InlinedApi")
+ public static final int COLOR_TRANSFER_ST2084 = MediaFormat.COLOR_TRANSFER_ST2084;
+ /**
+ * @see MediaFormat#COLOR_TRANSFER_HLG
+ */
+ @SuppressWarnings("InlinedApi")
+ public static final int COLOR_TRANSFER_HLG = MediaFormat.COLOR_TRANSFER_HLG;
+
+ /**
+ * Video color range.
+ */
+ @Retention(RetentionPolicy.SOURCE)
+ @IntDef({Format.NO_VALUE, COLOR_RANGE_LIMITED, COLOR_RANGE_FULL})
+ public @interface ColorRange {}
+ /**
+ * @see MediaFormat#COLOR_RANGE_LIMITED
+ */
+ @SuppressWarnings("InlinedApi")
+ public static final int COLOR_RANGE_LIMITED = MediaFormat.COLOR_RANGE_LIMITED;
+ /**
+ * @see MediaFormat#COLOR_RANGE_FULL
+ */
+ @SuppressWarnings("InlinedApi")
+ public static final int COLOR_RANGE_FULL = MediaFormat.COLOR_RANGE_FULL;
+
+ /**
+ * Priority for media playback.
+ *
+ *
Larger values indicate higher priorities.
+ */
+ public static final int PRIORITY_PLAYBACK = 0;
+
+ /**
+ * Priority for media downloading.
+ *
+ *
Larger values indicate higher priorities.
+ */
+ public static final int PRIORITY_DOWNLOAD = PRIORITY_PLAYBACK - 1000;
/**
* Converts a time in microseconds to the corresponding time in milliseconds, preserving
@@ -543,4 +652,13 @@ public final class C {
return timeMs == TIME_UNSET ? TIME_UNSET : (timeMs * 1000);
}
+ /**
+ * Returns a newly generated {@link android.media.AudioTrack} session identifier.
+ */
+ @TargetApi(21)
+ public static int generateAudioSessionIdV21(Context context) {
+ return ((AudioManager) context.getSystemService(Context.AUDIO_SERVICE))
+ .generateAudioSessionId();
+ }
+
}
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/DefaultLoadControl.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/DefaultLoadControl.java
index 1293e95bb..17072445e 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/DefaultLoadControl.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/DefaultLoadControl.java
@@ -19,6 +19,7 @@ import org.telegram.messenger.exoplayer2.source.TrackGroupArray;
import org.telegram.messenger.exoplayer2.trackselection.TrackSelectionArray;
import org.telegram.messenger.exoplayer2.upstream.Allocator;
import org.telegram.messenger.exoplayer2.upstream.DefaultAllocator;
+import org.telegram.messenger.exoplayer2.util.PriorityTaskManager;
import org.telegram.messenger.exoplayer2.util.Util;
/**
@@ -60,6 +61,7 @@ public final class DefaultLoadControl implements LoadControl {
private final long maxBufferUs;
private final long bufferForPlaybackUs;
private final long bufferForPlaybackAfterRebufferUs;
+ private final PriorityTaskManager priorityTaskManager;
private int targetBufferSize;
private boolean isBuffering;
@@ -97,11 +99,36 @@ public final class DefaultLoadControl implements LoadControl {
*/
public DefaultLoadControl(DefaultAllocator allocator, int minBufferMs, int maxBufferMs,
long bufferForPlaybackMs, long bufferForPlaybackAfterRebufferMs) {
+ this(allocator, minBufferMs, maxBufferMs, bufferForPlaybackMs, bufferForPlaybackAfterRebufferMs,
+ null);
+ }
+
+ /**
+ * Constructs a new instance.
+ *
+ * @param allocator The {@link DefaultAllocator} used by the loader.
+ * @param minBufferMs The minimum duration of media that the player will attempt to ensure is
+ * buffered at all times, in milliseconds.
+ * @param maxBufferMs The maximum duration of media that the player will attempt buffer, in
+ * milliseconds.
+ * @param bufferForPlaybackMs The duration of media that must be buffered for playback to start or
+ * resume following a user action such as a seek, in milliseconds.
+ * @param bufferForPlaybackAfterRebufferMs The default duration of media that must be buffered for
+ * playback to resume after a rebuffer, in milliseconds. A rebuffer is defined to be caused by
+ * buffer depletion rather than a user action.
+ * @param priorityTaskManager If not null, registers itself as a task with priority
+ * {@link C#PRIORITY_PLAYBACK} during loading periods, and unregisters itself during draining
+ * periods.
+ */
+ public DefaultLoadControl(DefaultAllocator allocator, int minBufferMs, int maxBufferMs,
+ long bufferForPlaybackMs, long bufferForPlaybackAfterRebufferMs,
+ PriorityTaskManager priorityTaskManager) {
this.allocator = allocator;
minBufferUs = minBufferMs * 1000L;
maxBufferUs = maxBufferMs * 1000L;
bufferForPlaybackUs = bufferForPlaybackMs * 1000L;
bufferForPlaybackAfterRebufferUs = bufferForPlaybackAfterRebufferMs * 1000L;
+ this.priorityTaskManager = priorityTaskManager;
}
@Override
@@ -146,8 +173,16 @@ public final class DefaultLoadControl implements LoadControl {
public boolean shouldContinueLoading(long bufferedDurationUs) {
int bufferTimeState = getBufferTimeState(bufferedDurationUs);
boolean targetBufferSizeReached = allocator.getTotalBytesAllocated() >= targetBufferSize;
+ boolean wasBuffering = isBuffering;
isBuffering = bufferTimeState == BELOW_LOW_WATERMARK
|| (bufferTimeState == BETWEEN_WATERMARKS && isBuffering && !targetBufferSizeReached);
+ if (priorityTaskManager != null && isBuffering != wasBuffering) {
+ if (isBuffering) {
+ priorityTaskManager.add(C.PRIORITY_PLAYBACK);
+ } else {
+ priorityTaskManager.remove(C.PRIORITY_PLAYBACK);
+ }
+ }
return isBuffering;
}
@@ -158,6 +193,9 @@ public final class DefaultLoadControl implements LoadControl {
private void reset(boolean resetAllocator) {
targetBufferSize = 0;
+ if (priorityTaskManager != null && isBuffering) {
+ priorityTaskManager.remove(C.PRIORITY_PLAYBACK);
+ }
isBuffering = false;
if (resetAllocator) {
allocator.reset();
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/DefaultRenderersFactory.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/DefaultRenderersFactory.java
new file mode 100755
index 000000000..0c09499e0
--- /dev/null
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/DefaultRenderersFactory.java
@@ -0,0 +1,327 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.telegram.messenger.exoplayer2;
+
+import android.content.Context;
+import android.os.Handler;
+import android.os.Looper;
+import android.support.annotation.IntDef;
+import android.util.Log;
+import org.telegram.messenger.exoplayer2.audio.AudioCapabilities;
+import org.telegram.messenger.exoplayer2.audio.AudioProcessor;
+import org.telegram.messenger.exoplayer2.audio.AudioRendererEventListener;
+import org.telegram.messenger.exoplayer2.audio.MediaCodecAudioRenderer;
+import org.telegram.messenger.exoplayer2.drm.DrmSessionManager;
+import org.telegram.messenger.exoplayer2.drm.FrameworkMediaCrypto;
+import org.telegram.messenger.exoplayer2.mediacodec.MediaCodecSelector;
+import org.telegram.messenger.exoplayer2.metadata.MetadataRenderer;
+import org.telegram.messenger.exoplayer2.text.TextRenderer;
+import org.telegram.messenger.exoplayer2.trackselection.TrackSelector;
+import org.telegram.messenger.exoplayer2.video.MediaCodecVideoRenderer;
+import org.telegram.messenger.exoplayer2.video.VideoRendererEventListener;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.reflect.Constructor;
+import java.util.ArrayList;
+
+/**
+ * Default {@link RenderersFactory} implementation.
+ */
+public class DefaultRenderersFactory implements RenderersFactory {
+
+ /**
+ * The default maximum duration for which a video renderer can attempt to seamlessly join an
+ * ongoing playback.
+ */
+ public static final long DEFAULT_ALLOWED_VIDEO_JOINING_TIME_MS = 5000;
+
+ /**
+ * Modes for using extension renderers.
+ */
+ @Retention(RetentionPolicy.SOURCE)
+ @IntDef({EXTENSION_RENDERER_MODE_OFF, EXTENSION_RENDERER_MODE_ON,
+ EXTENSION_RENDERER_MODE_PREFER})
+ public @interface ExtensionRendererMode {}
+ /**
+ * Do not allow use of extension renderers.
+ */
+ public static final int EXTENSION_RENDERER_MODE_OFF = 0;
+ /**
+ * Allow use of extension renderers. Extension renderers are indexed after core renderers of the
+ * same type. A {@link TrackSelector} that prefers the first suitable renderer will therefore
+ * prefer to use a core renderer to an extension renderer in the case that both are able to play
+ * a given track.
+ */
+ public static final int EXTENSION_RENDERER_MODE_ON = 1;
+ /**
+ * Allow use of extension renderers. Extension renderers are indexed before core renderers of the
+ * same type. A {@link TrackSelector} that prefers the first suitable renderer will therefore
+ * prefer to use an extension renderer to a core renderer in the case that both are able to play
+ * a given track.
+ */
+ public static final int EXTENSION_RENDERER_MODE_PREFER = 2;
+
+ private static final String TAG = "DefaultRenderersFactory";
+
+ protected static final int MAX_DROPPED_VIDEO_FRAME_COUNT_TO_NOTIFY = 50;
+
+ private final Context context;
+ private final DrmSessionManager drmSessionManager;
+ private final @ExtensionRendererMode int extensionRendererMode;
+ private final long allowedVideoJoiningTimeMs;
+
+ /**
+ * @param context A {@link Context}.
+ */
+ public DefaultRenderersFactory(Context context) {
+ this(context, null);
+ }
+
+ /**
+ * @param context A {@link Context}.
+ * @param drmSessionManager An optional {@link DrmSessionManager}. May be null if DRM protected
+ * playbacks are not required.
+ */
+ public DefaultRenderersFactory(Context context,
+ DrmSessionManager drmSessionManager) {
+ this(context, drmSessionManager, EXTENSION_RENDERER_MODE_OFF);
+ }
+
+ /**
+ * @param context A {@link Context}.
+ * @param drmSessionManager An optional {@link DrmSessionManager}. May be null if DRM protected
+ * playbacks are not required..
+ * @param extensionRendererMode The extension renderer mode, which determines if and how
+ * available extension renderers are used. Note that extensions must be included in the
+ * application build for them to be considered available.
+ */
+ public DefaultRenderersFactory(Context context,
+ DrmSessionManager drmSessionManager,
+ @ExtensionRendererMode int extensionRendererMode) {
+ this(context, drmSessionManager, extensionRendererMode,
+ DEFAULT_ALLOWED_VIDEO_JOINING_TIME_MS);
+ }
+
+ /**
+ * @param context A {@link Context}.
+ * @param drmSessionManager An optional {@link DrmSessionManager}. May be null if DRM protected
+ * playbacks are not required..
+ * @param extensionRendererMode The extension renderer mode, which determines if and how
+ * available extension renderers are used. Note that extensions must be included in the
+ * application build for them to be considered available.
+ * @param allowedVideoJoiningTimeMs The maximum duration for which video renderers can attempt
+ * to seamlessly join an ongoing playback.
+ */
+ public DefaultRenderersFactory(Context context,
+ DrmSessionManager drmSessionManager,
+ @ExtensionRendererMode int extensionRendererMode, long allowedVideoJoiningTimeMs) {
+ this.context = context;
+ this.drmSessionManager = drmSessionManager;
+ this.extensionRendererMode = extensionRendererMode;
+ this.allowedVideoJoiningTimeMs = allowedVideoJoiningTimeMs;
+ }
+
+ @Override
+ public Renderer[] createRenderers(Handler eventHandler,
+ VideoRendererEventListener videoRendererEventListener,
+ AudioRendererEventListener audioRendererEventListener,
+ TextRenderer.Output textRendererOutput, MetadataRenderer.Output metadataRendererOutput) {
+ ArrayList renderersList = new ArrayList<>();
+ buildVideoRenderers(context, drmSessionManager, allowedVideoJoiningTimeMs,
+ eventHandler, videoRendererEventListener, extensionRendererMode, renderersList);
+ buildAudioRenderers(context, drmSessionManager, buildAudioProcessors(),
+ eventHandler, audioRendererEventListener, extensionRendererMode, renderersList);
+ buildTextRenderers(context, textRendererOutput, eventHandler.getLooper(),
+ extensionRendererMode, renderersList);
+ buildMetadataRenderers(context, metadataRendererOutput, eventHandler.getLooper(),
+ extensionRendererMode, renderersList);
+ buildMiscellaneousRenderers(context, eventHandler, extensionRendererMode, renderersList);
+ return renderersList.toArray(new Renderer[renderersList.size()]);
+ }
+
+ /**
+ * Builds video renderers for use by the player.
+ *
+ * @param context The {@link Context} associated with the player.
+ * @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the player
+ * will not be used for DRM protected playbacks.
+ * @param allowedVideoJoiningTimeMs The maximum duration in milliseconds for which video
+ * renderers can attempt to seamlessly join an ongoing playback.
+ * @param eventHandler A handler associated with the main thread's looper.
+ * @param eventListener An event listener.
+ * @param extensionRendererMode The extension renderer mode.
+ * @param out An array to which the built renderers should be appended.
+ */
+ protected void buildVideoRenderers(Context context,
+ DrmSessionManager drmSessionManager, long allowedVideoJoiningTimeMs,
+ Handler eventHandler, VideoRendererEventListener eventListener,
+ @ExtensionRendererMode int extensionRendererMode, ArrayList out) {
+ out.add(new MediaCodecVideoRenderer(context, MediaCodecSelector.DEFAULT,
+ allowedVideoJoiningTimeMs, drmSessionManager, false, eventHandler, eventListener,
+ MAX_DROPPED_VIDEO_FRAME_COUNT_TO_NOTIFY));
+
+ if (extensionRendererMode == EXTENSION_RENDERER_MODE_OFF) {
+ return;
+ }
+ int extensionRendererIndex = out.size();
+ if (extensionRendererMode == EXTENSION_RENDERER_MODE_PREFER) {
+ extensionRendererIndex--;
+ }
+
+ try {
+ Class> clazz =
+ Class.forName("org.telegram.messenger.exoplayer2.ext.vp9.LibvpxVideoRenderer");
+ Constructor> constructor = clazz.getConstructor(boolean.class, long.class, Handler.class,
+ VideoRendererEventListener.class, int.class);
+ Renderer renderer = (Renderer) constructor.newInstance(true, allowedVideoJoiningTimeMs,
+ eventHandler, eventListener, MAX_DROPPED_VIDEO_FRAME_COUNT_TO_NOTIFY);
+ out.add(extensionRendererIndex++, renderer);
+ Log.i(TAG, "Loaded LibvpxVideoRenderer.");
+ } catch (ClassNotFoundException e) {
+ // Expected if the app was built without the extension.
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ /**
+ * Builds audio renderers for use by the player.
+ *
+ * @param context The {@link Context} associated with the player.
+ * @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the player
+ * will not be used for DRM protected playbacks.
+ * @param audioProcessors An array of {@link AudioProcessor}s that will process PCM audio
+ * buffers before output. May be empty.
+ * @param eventHandler A handler to use when invoking event listeners and outputs.
+ * @param eventListener An event listener.
+ * @param extensionRendererMode The extension renderer mode.
+ * @param out An array to which the built renderers should be appended.
+ */
+ protected void buildAudioRenderers(Context context,
+ DrmSessionManager drmSessionManager,
+ AudioProcessor[] audioProcessors, Handler eventHandler,
+ AudioRendererEventListener eventListener, @ExtensionRendererMode int extensionRendererMode,
+ ArrayList out) {
+ out.add(new MediaCodecAudioRenderer(MediaCodecSelector.DEFAULT, drmSessionManager, true,
+ eventHandler, eventListener, AudioCapabilities.getCapabilities(context), audioProcessors));
+
+ if (extensionRendererMode == EXTENSION_RENDERER_MODE_OFF) {
+ return;
+ }
+ int extensionRendererIndex = out.size();
+ if (extensionRendererMode == EXTENSION_RENDERER_MODE_PREFER) {
+ extensionRendererIndex--;
+ }
+
+ try {
+ Class> clazz =
+ Class.forName("org.telegram.messenger.exoplayer2.ext.opus.LibopusAudioRenderer");
+ Constructor> constructor = clazz.getConstructor(Handler.class,
+ AudioRendererEventListener.class, AudioProcessor[].class);
+ Renderer renderer = (Renderer) constructor.newInstance(eventHandler, eventListener,
+ audioProcessors);
+ out.add(extensionRendererIndex++, renderer);
+ Log.i(TAG, "Loaded LibopusAudioRenderer.");
+ } catch (ClassNotFoundException e) {
+ // Expected if the app was built without the extension.
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+
+ try {
+ Class> clazz =
+ Class.forName("org.telegram.messenger.exoplayer2.ext.flac.LibflacAudioRenderer");
+ Constructor> constructor = clazz.getConstructor(Handler.class,
+ AudioRendererEventListener.class, AudioProcessor[].class);
+ Renderer renderer = (Renderer) constructor.newInstance(eventHandler, eventListener,
+ audioProcessors);
+ out.add(extensionRendererIndex++, renderer);
+ Log.i(TAG, "Loaded LibflacAudioRenderer.");
+ } catch (ClassNotFoundException e) {
+ // Expected if the app was built without the extension.
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+
+ try {
+ Class> clazz =
+ Class.forName("org.telegram.messenger.exoplayer2.ext.ffmpeg.FfmpegAudioRenderer");
+ Constructor> constructor = clazz.getConstructor(Handler.class,
+ AudioRendererEventListener.class, AudioProcessor[].class);
+ Renderer renderer = (Renderer) constructor.newInstance(eventHandler, eventListener,
+ audioProcessors);
+ out.add(extensionRendererIndex++, renderer);
+ Log.i(TAG, "Loaded FfmpegAudioRenderer.");
+ } catch (ClassNotFoundException e) {
+ // Expected if the app was built without the extension.
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ /**
+ * Builds text renderers for use by the player.
+ *
+ * @param context The {@link Context} associated with the player.
+ * @param output An output for the renderers.
+ * @param outputLooper The looper associated with the thread on which the output should be
+ * called.
+ * @param extensionRendererMode The extension renderer mode.
+ * @param out An array to which the built renderers should be appended.
+ */
+ protected void buildTextRenderers(Context context, TextRenderer.Output output,
+ Looper outputLooper, @ExtensionRendererMode int extensionRendererMode,
+ ArrayList out) {
+ out.add(new TextRenderer(output, outputLooper));
+ }
+
+ /**
+ * Builds metadata renderers for use by the player.
+ *
+ * @param context The {@link Context} associated with the player.
+ * @param output An output for the renderers.
+ * @param outputLooper The looper associated with the thread on which the output should be
+ * called.
+ * @param extensionRendererMode The extension renderer mode.
+ * @param out An array to which the built renderers should be appended.
+ */
+ protected void buildMetadataRenderers(Context context, MetadataRenderer.Output output,
+ Looper outputLooper, @ExtensionRendererMode int extensionRendererMode,
+ ArrayList out) {
+ out.add(new MetadataRenderer(output, outputLooper));
+ }
+
+ /**
+ * Builds any miscellaneous renderers used by the player.
+ *
+ * @param context The {@link Context} associated with the player.
+ * @param eventHandler A handler to use when invoking event listeners and outputs.
+ * @param extensionRendererMode The extension renderer mode.
+ * @param out An array to which the built renderers should be appended.
+ */
+ protected void buildMiscellaneousRenderers(Context context, Handler eventHandler,
+ @ExtensionRendererMode int extensionRendererMode, ArrayList out) {
+ // Do nothing.
+ }
+
+ /**
+ * Builds an array of {@link AudioProcessor}s that will process PCM audio before output.
+ */
+ protected AudioProcessor[] buildAudioProcessors() {
+ return new AudioProcessor[0];
+ }
+
+}
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/ExoPlaybackException.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/ExoPlaybackException.java
index 391b6c415..4084984e4 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/ExoPlaybackException.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/ExoPlaybackException.java
@@ -56,8 +56,7 @@ public final class ExoPlaybackException extends Exception {
* The type of the playback failure. One of {@link #TYPE_SOURCE}, {@link #TYPE_RENDERER} and
* {@link #TYPE_UNEXPECTED}.
*/
- @Type
- public final int type;
+ @Type public final int type;
/**
* If {@link #type} is {@link #TYPE_RENDERER}, this is the index of the renderer.
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/ExoPlayer.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/ExoPlayer.java
index d85ff79a2..2b3795e21 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/ExoPlayer.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/ExoPlayer.java
@@ -15,6 +15,7 @@
*/
package org.telegram.messenger.exoplayer2;
+import android.support.annotation.Nullable;
import org.telegram.messenger.exoplayer2.audio.MediaCodecAudioRenderer;
import org.telegram.messenger.exoplayer2.metadata.MetadataRenderer;
import org.telegram.messenger.exoplayer2.source.ConcatenatingMediaSource;
@@ -23,9 +24,6 @@ import org.telegram.messenger.exoplayer2.source.MediaSource;
import org.telegram.messenger.exoplayer2.source.MergingMediaSource;
import org.telegram.messenger.exoplayer2.source.SingleSampleMediaSource;
import org.telegram.messenger.exoplayer2.source.TrackGroupArray;
-import org.telegram.messenger.exoplayer2.source.dash.DashMediaSource;
-import org.telegram.messenger.exoplayer2.source.hls.HlsMediaSource;
-import org.telegram.messenger.exoplayer2.source.smoothstreaming.SsMediaSource;
import org.telegram.messenger.exoplayer2.text.TextRenderer;
import org.telegram.messenger.exoplayer2.trackselection.DefaultTrackSelector;
import org.telegram.messenger.exoplayer2.trackselection.TrackSelectionArray;
@@ -47,12 +45,11 @@ import org.telegram.messenger.exoplayer2.video.MediaCodecVideoRenderer;
*
* - A {@link MediaSource} that defines the media to be played, loads the media, and from
* which the loaded media can be read. A MediaSource is injected via {@link #prepare} at the start
- * of playback. The library provides default implementations for regular media files
- * ({@link ExtractorMediaSource}), DASH ({@link DashMediaSource}), SmoothStreaming
- * ({@link SsMediaSource}) and HLS ({@link HlsMediaSource}), implementations for merging
- * ({@link MergingMediaSource}) and concatenating ({@link ConcatenatingMediaSource}) other
- * MediaSources, and an implementation for loading single samples
- * ({@link SingleSampleMediaSource}) most often used for side-loaded subtitle and closed
+ * of playback. The library modules provide default implementations for regular media files
+ * ({@link ExtractorMediaSource}), DASH (DashMediaSource), SmoothStreaming (SsMediaSource) and HLS
+ * (HlsMediaSource), implementations for merging ({@link MergingMediaSource}) and concatenating
+ * ({@link ConcatenatingMediaSource}) other MediaSources, and an implementation for loading single
+ * samples ({@link SingleSampleMediaSource}) most often used for side-loaded subtitle and closed
* caption files.
* - {@link Renderer}s that render individual components of the media. The library
* provides default implementations for common media types ({@link MediaCodecVideoRenderer},
@@ -120,8 +117,8 @@ public interface ExoPlayer {
* removed from the timeline. The will not be reported via a separate call to
* {@link #onPositionDiscontinuity()}.
*
- * @param timeline The latest timeline, or null if the timeline is being cleared.
- * @param manifest The latest manifest, or null if the manifest is being cleared.
+ * @param timeline The latest timeline. Never null, but may be empty.
+ * @param manifest The latest manifest. May be null.
*/
void onTimelineChanged(Timeline timeline, Object manifest);
@@ -172,6 +169,16 @@ public interface ExoPlayer {
*/
void onPositionDiscontinuity();
+ /**
+ * Called when the current playback parameters change. The playback parameters may change due to
+ * a call to {@link ExoPlayer#setPlaybackParameters(PlaybackParameters)}, or the player itself
+ * may change them (for example, if audio playback switches to passthrough mode, where speed
+ * adjustment is no longer possible).
+ *
+ * @param playbackParameters The playback parameters.
+ */
+ void onPlaybackParametersChanged(PlaybackParameters playbackParameters);
+
}
/**
@@ -330,17 +337,41 @@ public interface ExoPlayer {
/**
* Seeks to a position specified in milliseconds in the current window.
*
- * @param windowPositionMs The seek position in the current window.
+ * @param positionMs The seek position in the current window, or {@link C#TIME_UNSET} to seek to
+ * the window's default position.
*/
- void seekTo(long windowPositionMs);
+ void seekTo(long positionMs);
/**
* Seeks to a position specified in milliseconds in the specified window.
*
* @param windowIndex The index of the window.
- * @param windowPositionMs The seek position in the specified window.
+ * @param positionMs The seek position in the specified window, or {@link C#TIME_UNSET} to seek to
+ * the window's default position.
*/
- void seekTo(int windowIndex, long windowPositionMs);
+ void seekTo(int windowIndex, long positionMs);
+
+ /**
+ * Attempts to set the playback parameters. Passing {@code null} sets the parameters to the
+ * default, {@link PlaybackParameters#DEFAULT}, which means there is no speed or pitch adjustment.
+ *
+ * Playback parameters changes may cause the player to buffer.
+ * {@link EventListener#onPlaybackParametersChanged(PlaybackParameters)} will be called whenever
+ * the currently active playback parameters change. When that listener is called, the parameters
+ * passed to it may not match {@code playbackParameters}. For example, the chosen speed or pitch
+ * may be out of range, in which case they are constrained to a set of permitted values. If it is
+ * not possible to change the playback parameters, the listener will not be invoked.
+ *
+ * @param playbackParameters The playback parameters, or {@code null} to use the defaults.
+ */
+ void setPlaybackParameters(@Nullable PlaybackParameters playbackParameters);
+
+ /**
+ * Returns the currently active playback parameters.
+ *
+ * @see EventListener#onPlaybackParametersChanged(PlaybackParameters)
+ */
+ PlaybackParameters getPlaybackParameters();
/**
* Stops playback. Use {@code setPlayWhenReady(false)} rather than this method if the intention
@@ -445,4 +476,20 @@ public interface ExoPlayer {
*/
int getBufferedPercentage();
+ /**
+ * Returns whether the current window is dynamic, or {@code false} if the {@link Timeline} is
+ * empty.
+ *
+ * @see Timeline.Window#isDynamic
+ */
+ boolean isCurrentWindowDynamic();
+
+ /**
+ * Returns whether the current window is seekable, or {@code false} if the {@link Timeline} is
+ * empty.
+ *
+ * @see Timeline.Window#isSeekable
+ */
+ boolean isCurrentWindowSeekable();
+
}
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/ExoPlayerFactory.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/ExoPlayerFactory.java
index 1817fbb69..7d4c1de19 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/ExoPlayerFactory.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/ExoPlayerFactory.java
@@ -26,12 +26,6 @@ import org.telegram.messenger.exoplayer2.trackselection.TrackSelector;
*/
public final class ExoPlayerFactory {
- /**
- * The default maximum duration for which a video renderer can attempt to seamlessly join an
- * ongoing playback.
- */
- public static final long DEFAULT_ALLOWED_VIDEO_JOINING_TIME_MS = 5000;
-
private ExoPlayerFactory() {}
/**
@@ -41,10 +35,13 @@ public final class ExoPlayerFactory {
* @param context A {@link Context}.
* @param trackSelector The {@link TrackSelector} that will be used by the instance.
* @param loadControl The {@link LoadControl} that will be used by the instance.
+ * @deprecated Use {@link #newSimpleInstance(RenderersFactory, TrackSelector, LoadControl)}.
*/
+ @Deprecated
public static SimpleExoPlayer newSimpleInstance(Context context, TrackSelector trackSelector,
LoadControl loadControl) {
- return newSimpleInstance(context, trackSelector, loadControl, null);
+ RenderersFactory renderersFactory = new DefaultRenderersFactory(context);
+ return newSimpleInstance(renderersFactory, trackSelector, loadControl);
}
/**
@@ -56,11 +53,13 @@ public final class ExoPlayerFactory {
* @param loadControl The {@link LoadControl} that will be used by the instance.
* @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the instance
* will not be used for DRM protected playbacks.
+ * @deprecated Use {@link #newSimpleInstance(RenderersFactory, TrackSelector, LoadControl)}.
*/
+ @Deprecated
public static SimpleExoPlayer newSimpleInstance(Context context, TrackSelector trackSelector,
LoadControl loadControl, DrmSessionManager drmSessionManager) {
- return newSimpleInstance(context, trackSelector, loadControl,
- drmSessionManager, SimpleExoPlayer.EXTENSION_RENDERER_MODE_OFF);
+ RenderersFactory renderersFactory = new DefaultRenderersFactory(context, drmSessionManager);
+ return newSimpleInstance(renderersFactory, trackSelector, loadControl);
}
/**
@@ -75,12 +74,15 @@ public final class ExoPlayerFactory {
* @param extensionRendererMode The extension renderer mode, which determines if and how available
* extension renderers are used. Note that extensions must be included in the application
* build for them to be considered available.
+ * @deprecated Use {@link #newSimpleInstance(RenderersFactory, TrackSelector, LoadControl)}.
*/
+ @Deprecated
public static SimpleExoPlayer newSimpleInstance(Context context, TrackSelector trackSelector,
LoadControl loadControl, DrmSessionManager drmSessionManager,
- @SimpleExoPlayer.ExtensionRendererMode int extensionRendererMode) {
- return newSimpleInstance(context, trackSelector, loadControl, drmSessionManager,
- extensionRendererMode, DEFAULT_ALLOWED_VIDEO_JOINING_TIME_MS);
+ @DefaultRenderersFactory.ExtensionRendererMode int extensionRendererMode) {
+ RenderersFactory renderersFactory = new DefaultRenderersFactory(context, drmSessionManager,
+ extensionRendererMode);
+ return newSimpleInstance(renderersFactory, trackSelector, loadControl);
}
/**
@@ -97,13 +99,52 @@ public final class ExoPlayerFactory {
* build for them to be considered available.
* @param allowedVideoJoiningTimeMs The maximum duration for which a video renderer can attempt to
* seamlessly join an ongoing playback.
+ * @deprecated Use {@link #newSimpleInstance(RenderersFactory, TrackSelector, LoadControl)}.
*/
+ @Deprecated
public static SimpleExoPlayer newSimpleInstance(Context context, TrackSelector trackSelector,
LoadControl loadControl, DrmSessionManager drmSessionManager,
- @SimpleExoPlayer.ExtensionRendererMode int extensionRendererMode,
+ @DefaultRenderersFactory.ExtensionRendererMode int extensionRendererMode,
long allowedVideoJoiningTimeMs) {
- return new SimpleExoPlayer(context, trackSelector, loadControl, drmSessionManager,
+ RenderersFactory renderersFactory = new DefaultRenderersFactory(context, drmSessionManager,
extensionRendererMode, allowedVideoJoiningTimeMs);
+ return newSimpleInstance(renderersFactory, trackSelector, loadControl);
+ }
+
+ /**
+ * Creates a {@link SimpleExoPlayer} instance. Must be called from a thread that has an associated
+ * {@link Looper}.
+ *
+ * @param context A {@link Context}.
+ * @param trackSelector The {@link TrackSelector} that will be used by the instance.
+ */
+ public static SimpleExoPlayer newSimpleInstance(Context context, TrackSelector trackSelector) {
+ return newSimpleInstance(new DefaultRenderersFactory(context), trackSelector);
+ }
+
+ /**
+ * Creates a {@link SimpleExoPlayer} instance. Must be called from a thread that has an associated
+ * {@link Looper}.
+ *
+ * @param renderersFactory A factory for creating {@link Renderer}s to be used by the instance.
+ * @param trackSelector The {@link TrackSelector} that will be used by the instance.
+ */
+ public static SimpleExoPlayer newSimpleInstance(RenderersFactory renderersFactory,
+ TrackSelector trackSelector) {
+ return newSimpleInstance(renderersFactory, trackSelector, new DefaultLoadControl());
+ }
+
+ /**
+ * Creates a {@link SimpleExoPlayer} instance. Must be called from a thread that has an associated
+ * {@link Looper}.
+ *
+ * @param renderersFactory A factory for creating {@link Renderer}s to be used by the instance.
+ * @param trackSelector The {@link TrackSelector} that will be used by the instance.
+ * @param loadControl The {@link LoadControl} that will be used by the instance.
+ */
+ public static SimpleExoPlayer newSimpleInstance(RenderersFactory renderersFactory,
+ TrackSelector trackSelector, LoadControl loadControl) {
+ return new SimpleExoPlayer(renderersFactory, trackSelector, loadControl);
}
/**
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/ExoPlayerImpl.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/ExoPlayerImpl.java
index 2a1ebd7f4..ed7afd4c7 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/ExoPlayerImpl.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/ExoPlayerImpl.java
@@ -19,15 +19,16 @@ import android.annotation.SuppressLint;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
+import android.support.annotation.Nullable;
import android.util.Log;
import org.telegram.messenger.exoplayer2.ExoPlayerImplInternal.PlaybackInfo;
import org.telegram.messenger.exoplayer2.ExoPlayerImplInternal.SourceInfo;
-import org.telegram.messenger.exoplayer2.ExoPlayerImplInternal.TrackInfo;
import org.telegram.messenger.exoplayer2.source.MediaSource;
import org.telegram.messenger.exoplayer2.source.TrackGroupArray;
import org.telegram.messenger.exoplayer2.trackselection.TrackSelection;
import org.telegram.messenger.exoplayer2.trackselection.TrackSelectionArray;
import org.telegram.messenger.exoplayer2.trackselection.TrackSelector;
+import org.telegram.messenger.exoplayer2.trackselection.TrackSelectorResult;
import org.telegram.messenger.exoplayer2.util.Assertions;
import org.telegram.messenger.exoplayer2.util.Util;
import java.util.concurrent.CopyOnWriteArraySet;
@@ -52,17 +53,20 @@ import java.util.concurrent.CopyOnWriteArraySet;
private boolean playWhenReady;
private int playbackState;
private int pendingSeekAcks;
+ private int pendingPrepareAcks;
private boolean isLoading;
private Timeline timeline;
private Object manifest;
private TrackGroupArray trackGroups;
private TrackSelectionArray trackSelections;
+ private PlaybackParameters playbackParameters;
// Playback information when there is no pending seek/set source operation.
private PlaybackInfo playbackInfo;
// Playback information when there is a pending seek/set source operation.
private int maskingWindowIndex;
+ private int maskingPeriodIndex;
private long maskingWindowPositionMs;
/**
@@ -74,7 +78,7 @@ import java.util.concurrent.CopyOnWriteArraySet;
*/
@SuppressLint("HandlerLeak")
public ExoPlayerImpl(Renderer[] renderers, TrackSelector trackSelector, LoadControl loadControl) {
- Log.i(TAG, "Init " + ExoPlayerLibraryInfo.VERSION + " [" + Util.DEVICE_DEBUG_INFO + "]");
+ Log.i(TAG, "Init " + ExoPlayerLibraryInfo.VERSION_SLASHY + " [" + Util.DEVICE_DEBUG_INFO + "]");
Assertions.checkState(renderers.length > 0);
this.renderers = Assertions.checkNotNull(renderers);
this.trackSelector = Assertions.checkNotNull(trackSelector);
@@ -87,6 +91,7 @@ import java.util.concurrent.CopyOnWriteArraySet;
period = new Timeline.Period();
trackGroups = TrackGroupArray.EMPTY;
trackSelections = emptyTrackSelections;
+ playbackParameters = PlaybackParameters.DEFAULT;
eventHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
@@ -95,7 +100,7 @@ import java.util.concurrent.CopyOnWriteArraySet;
};
playbackInfo = new ExoPlayerImplInternal.PlaybackInfo(0, 0);
internalPlayer = new ExoPlayerImplInternal(renderers, trackSelector, loadControl, playWhenReady,
- eventHandler, playbackInfo);
+ eventHandler, playbackInfo, this);
}
@Override
@@ -125,7 +130,7 @@ import java.util.concurrent.CopyOnWriteArraySet;
timeline = Timeline.EMPTY;
manifest = null;
for (EventListener listener : listeners) {
- listener.onTimelineChanged(null, null);
+ listener.onTimelineChanged(timeline, manifest);
}
}
if (tracksSelected) {
@@ -138,6 +143,7 @@ import java.util.concurrent.CopyOnWriteArraySet;
}
}
}
+ pendingPrepareAcks++;
internalPlayer.prepare(mediaSource, resetPosition);
}
@@ -180,10 +186,26 @@ import java.util.concurrent.CopyOnWriteArraySet;
@Override
public void seekTo(int windowIndex, long positionMs) {
if (windowIndex < 0 || (!timeline.isEmpty() && windowIndex >= timeline.getWindowCount())) {
- throw new IndexOutOfBoundsException();
+ throw new IllegalSeekPositionException(timeline, windowIndex, positionMs);
}
pendingSeekAcks++;
maskingWindowIndex = windowIndex;
+ if (timeline.isEmpty()) {
+ maskingPeriodIndex = 0;
+ } else {
+ timeline.getWindow(windowIndex, window);
+ long resolvedPositionMs =
+ positionMs == C.TIME_UNSET ? window.getDefaultPositionUs() : positionMs;
+ int periodIndex = window.firstPeriodIndex;
+ long periodPositionUs = window.getPositionInFirstPeriodUs() + C.msToUs(resolvedPositionMs);
+ long periodDurationUs = timeline.getPeriod(periodIndex, period).getDurationUs();
+ while (periodDurationUs != C.TIME_UNSET && periodPositionUs >= periodDurationUs
+ && periodIndex < window.lastPeriodIndex) {
+ periodPositionUs -= periodDurationUs;
+ periodDurationUs = timeline.getPeriod(++periodIndex, period).getDurationUs();
+ }
+ maskingPeriodIndex = periodIndex;
+ }
if (positionMs == C.TIME_UNSET) {
maskingWindowPositionMs = 0;
internalPlayer.seekTo(timeline, windowIndex, C.TIME_UNSET);
@@ -196,6 +218,19 @@ import java.util.concurrent.CopyOnWriteArraySet;
}
}
+ @Override
+ public void setPlaybackParameters(@Nullable PlaybackParameters playbackParameters) {
+ if (playbackParameters == null) {
+ playbackParameters = PlaybackParameters.DEFAULT;
+ }
+ internalPlayer.setPlaybackParameters(playbackParameters);
+ }
+
+ @Override
+ public PlaybackParameters getPlaybackParameters() {
+ return playbackParameters;
+ }
+
@Override
public void stop() {
internalPlayer.stop();
@@ -219,7 +254,11 @@ import java.util.concurrent.CopyOnWriteArraySet;
@Override
public int getCurrentPeriodIndex() {
- return playbackInfo.periodIndex;
+ if (timeline.isEmpty() || pendingSeekAcks > 0) {
+ return maskingPeriodIndex;
+ } else {
+ return playbackInfo.periodIndex;
+ }
}
@Override
@@ -271,6 +310,16 @@ import java.util.concurrent.CopyOnWriteArraySet;
: (int) (duration == 0 ? 100 : (bufferedPosition * 100) / duration);
}
+ @Override
+ public boolean isCurrentWindowDynamic() {
+ return !timeline.isEmpty() && timeline.getWindow(getCurrentWindowIndex(), window).isDynamic;
+ }
+
+ @Override
+ public boolean isCurrentWindowSeekable() {
+ return !timeline.isEmpty() && timeline.getWindow(getCurrentWindowIndex(), window).isSeekable;
+ }
+
@Override
public int getRendererCount() {
return renderers.length;
@@ -304,6 +353,10 @@ import java.util.concurrent.CopyOnWriteArraySet;
// Not private so it can be called from an inner class without going through a thunk method.
/* package */ void handleEvent(Message msg) {
switch (msg.what) {
+ case ExoPlayerImplInternal.MSG_PREPARE_ACK: {
+ pendingPrepareAcks--;
+ break;
+ }
case ExoPlayerImplInternal.MSG_STATE_CHANGED: {
playbackState = msg.arg1;
for (EventListener listener : listeners) {
@@ -319,21 +372,25 @@ import java.util.concurrent.CopyOnWriteArraySet;
break;
}
case ExoPlayerImplInternal.MSG_TRACKS_CHANGED: {
- TrackInfo trackInfo = (TrackInfo) msg.obj;
- tracksSelected = true;
- trackGroups = trackInfo.groups;
- trackSelections = trackInfo.selections;
- trackSelector.onSelectionActivated(trackInfo.info);
- for (EventListener listener : listeners) {
- listener.onTracksChanged(trackGroups, trackSelections);
+ if (pendingPrepareAcks == 0) {
+ TrackSelectorResult trackSelectorResult = (TrackSelectorResult) msg.obj;
+ tracksSelected = true;
+ trackGroups = trackSelectorResult.groups;
+ trackSelections = trackSelectorResult.selections;
+ trackSelector.onSelectionActivated(trackSelectorResult.info);
+ for (EventListener listener : listeners) {
+ listener.onTracksChanged(trackGroups, trackSelections);
+ }
}
break;
}
case ExoPlayerImplInternal.MSG_SEEK_ACK: {
if (--pendingSeekAcks == 0) {
playbackInfo = (ExoPlayerImplInternal.PlaybackInfo) msg.obj;
- for (EventListener listener : listeners) {
- listener.onPositionDiscontinuity();
+ if (msg.arg1 != 0) {
+ for (EventListener listener : listeners) {
+ listener.onPositionDiscontinuity();
+ }
}
}
break;
@@ -349,12 +406,24 @@ import java.util.concurrent.CopyOnWriteArraySet;
}
case ExoPlayerImplInternal.MSG_SOURCE_INFO_REFRESHED: {
SourceInfo sourceInfo = (SourceInfo) msg.obj;
- timeline = sourceInfo.timeline;
- manifest = sourceInfo.manifest;
- playbackInfo = sourceInfo.playbackInfo;
pendingSeekAcks -= sourceInfo.seekAcks;
- for (EventListener listener : listeners) {
- listener.onTimelineChanged(timeline, manifest);
+ if (pendingPrepareAcks == 0) {
+ timeline = sourceInfo.timeline;
+ manifest = sourceInfo.manifest;
+ playbackInfo = sourceInfo.playbackInfo;
+ for (EventListener listener : listeners) {
+ listener.onTimelineChanged(timeline, manifest);
+ }
+ }
+ break;
+ }
+ case ExoPlayerImplInternal.MSG_PLAYBACK_PARAMETERS_CHANGED: {
+ PlaybackParameters playbackParameters = (PlaybackParameters) msg.obj;
+ if (!this.playbackParameters.equals(playbackParameters)) {
+ this.playbackParameters = playbackParameters;
+ for (EventListener listener : listeners) {
+ listener.onPlaybackParametersChanged(playbackParameters);
+ }
}
break;
}
@@ -365,6 +434,8 @@ import java.util.concurrent.CopyOnWriteArraySet;
}
break;
}
+ default:
+ throw new IllegalStateException();
}
}
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/ExoPlayerImplInternal.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/ExoPlayerImplInternal.java
index 3cc2c5a7e..db0c13557 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/ExoPlayerImplInternal.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/ExoPlayerImplInternal.java
@@ -26,16 +26,14 @@ import org.telegram.messenger.exoplayer2.ExoPlayer.ExoPlayerMessage;
import org.telegram.messenger.exoplayer2.source.MediaPeriod;
import org.telegram.messenger.exoplayer2.source.MediaSource;
import org.telegram.messenger.exoplayer2.source.SampleStream;
-import org.telegram.messenger.exoplayer2.source.TrackGroupArray;
import org.telegram.messenger.exoplayer2.trackselection.TrackSelection;
import org.telegram.messenger.exoplayer2.trackselection.TrackSelectionArray;
import org.telegram.messenger.exoplayer2.trackselection.TrackSelector;
+import org.telegram.messenger.exoplayer2.trackselection.TrackSelectorResult;
import org.telegram.messenger.exoplayer2.util.Assertions;
import org.telegram.messenger.exoplayer2.util.MediaClock;
-import org.telegram.messenger.exoplayer2.util.PriorityHandlerThread;
import org.telegram.messenger.exoplayer2.util.StandaloneMediaClock;
import org.telegram.messenger.exoplayer2.util.TraceUtil;
-import org.telegram.messenger.exoplayer2.util.Util;
import java.io.IOException;
/**
@@ -72,20 +70,6 @@ import java.io.IOException;
}
- public static final class TrackInfo {
-
- public final TrackGroupArray groups;
- public final TrackSelectionArray selections;
- public final Object info;
-
- public TrackInfo(TrackGroupArray groups, TrackSelectionArray selections, Object info) {
- this.groups = groups;
- this.selections = selections;
- this.info = info;
- }
-
- }
-
public static final class SourceInfo {
public final Timeline timeline;
@@ -105,26 +89,29 @@ import java.io.IOException;
private static final String TAG = "ExoPlayerImplInternal";
// External messages
+ public static final int MSG_PREPARE_ACK = 0;
public static final int MSG_STATE_CHANGED = 1;
public static final int MSG_LOADING_CHANGED = 2;
public static final int MSG_TRACKS_CHANGED = 3;
public static final int MSG_SEEK_ACK = 4;
public static final int MSG_POSITION_DISCONTINUITY = 5;
public static final int MSG_SOURCE_INFO_REFRESHED = 6;
- public static final int MSG_ERROR = 7;
+ public static final int MSG_PLAYBACK_PARAMETERS_CHANGED = 7;
+ public static final int MSG_ERROR = 8;
// Internal messages
private static final int MSG_PREPARE = 0;
private static final int MSG_SET_PLAY_WHEN_READY = 1;
private static final int MSG_DO_SOME_WORK = 2;
private static final int MSG_SEEK_TO = 3;
- private static final int MSG_STOP = 4;
- private static final int MSG_RELEASE = 5;
- private static final int MSG_REFRESH_SOURCE_INFO = 6;
- private static final int MSG_PERIOD_PREPARED = 7;
- private static final int MSG_SOURCE_CONTINUE_LOADING_REQUESTED = 8;
- private static final int MSG_TRACK_SELECTION_INVALIDATED = 9;
- private static final int MSG_CUSTOM = 10;
+ private static final int MSG_SET_PLAYBACK_PARAMETERS = 4;
+ private static final int MSG_STOP = 5;
+ private static final int MSG_RELEASE = 6;
+ private static final int MSG_REFRESH_SOURCE_INFO = 7;
+ private static final int MSG_PERIOD_PREPARED = 8;
+ private static final int MSG_SOURCE_CONTINUE_LOADING_REQUESTED = 9;
+ private static final int MSG_TRACK_SELECTION_INVALIDATED = 10;
+ private static final int MSG_CUSTOM = 11;
private static final int PREPARING_SOURCE_INTERVAL_MS = 10;
private static final int RENDERING_INTERVAL_MS = 10;
@@ -137,6 +124,14 @@ import java.io.IOException;
*/
private static final int MAXIMUM_BUFFER_AHEAD_PERIODS = 100;
+ /**
+ * Offset added to all sample timestamps read by renderers to make them non-negative. This is
+ * provided for convenience of sources that may return negative timestamps due to prerolling
+ * samples from a keyframe before their first sample with timestamp zero, so it must be set to a
+ * value greater than or equal to the maximum key-frame interval in seekable periods.
+ */
+ private static final int RENDERER_TIMESTAMP_OFFSET_US = 60000000;
+
private final Renderer[] renderers;
private final RendererCapabilities[] rendererCapabilities;
private final TrackSelector trackSelector;
@@ -145,10 +140,12 @@ import java.io.IOException;
private final Handler handler;
private final HandlerThread internalPlaybackThread;
private final Handler eventHandler;
+ private final ExoPlayer player;
private final Timeline.Window window;
private final Timeline.Period period;
private PlaybackInfo playbackInfo;
+ private PlaybackParameters playbackParameters;
private Renderer rendererMediaClockSource;
private MediaClock rendererMediaClock;
private MediaSource mediaSource;
@@ -174,7 +171,7 @@ import java.io.IOException;
public ExoPlayerImplInternal(Renderer[] renderers, TrackSelector trackSelector,
LoadControl loadControl, boolean playWhenReady, Handler eventHandler,
- PlaybackInfo playbackInfo) {
+ PlaybackInfo playbackInfo, ExoPlayer player) {
this.renderers = renderers;
this.trackSelector = trackSelector;
this.loadControl = loadControl;
@@ -182,6 +179,7 @@ import java.io.IOException;
this.eventHandler = eventHandler;
this.state = ExoPlayer.STATE_IDLE;
this.playbackInfo = playbackInfo;
+ this.player = player;
rendererCapabilities = new RendererCapabilities[renderers.length];
for (int i = 0; i < renderers.length; i++) {
@@ -193,10 +191,11 @@ import java.io.IOException;
window = new Timeline.Window();
period = new Timeline.Period();
trackSelector.init(this);
+ playbackParameters = PlaybackParameters.DEFAULT;
// Note: The documentation for Process.THREAD_PRIORITY_AUDIO that states "Applications can
// not normally change to this priority" is incorrect.
- internalPlaybackThread = new PriorityHandlerThread("ExoPlayerImplInternal:Handler",
+ internalPlaybackThread = new HandlerThread("ExoPlayerImplInternal:Handler",
Process.THREAD_PRIORITY_AUDIO);
internalPlaybackThread.start();
handler = new Handler(internalPlaybackThread.getLooper(), this);
@@ -216,6 +215,10 @@ import java.io.IOException;
.sendToTarget();
}
+ public void setPlaybackParameters(PlaybackParameters playbackParameters) {
+ handler.obtainMessage(MSG_SET_PLAYBACK_PARAMETERS, playbackParameters).sendToTarget();
+ }
+
public void stop() {
handler.sendEmptyMessage(MSG_STOP);
}
@@ -309,6 +312,10 @@ import java.io.IOException;
seekToInternal((SeekPosition) msg.obj);
return true;
}
+ case MSG_SET_PLAYBACK_PARAMETERS: {
+ setPlaybackParametersInternal((PlaybackParameters) msg.obj);
+ return true;
+ }
case MSG_STOP: {
stopInternal();
return true;
@@ -376,13 +383,14 @@ import java.io.IOException;
}
private void prepareInternal(MediaSource mediaSource, boolean resetPosition) {
- resetInternal();
+ eventHandler.sendEmptyMessage(MSG_PREPARE_ACK);
+ resetInternal(true);
loadControl.onPrepared();
if (resetPosition) {
playbackInfo = new PlaybackInfo(0, C.TIME_UNSET);
}
this.mediaSource = mediaSource;
- mediaSource.prepareSource(this);
+ mediaSource.prepareSource(player, true, this);
setState(ExoPlayer.STATE_BUFFERING);
handler.sendEmptyMessage(MSG_DO_SOME_WORK);
}
@@ -460,6 +468,8 @@ import java.io.IOException;
TraceUtil.beginSection("doSomeWork");
updatePlaybackPositions();
+ playingPeriodHolder.mediaPeriod.discardBuffer(playbackInfo.positionUs);
+
boolean allRenderersEnded = true;
boolean allRenderersReadyOrEnded = true;
for (Renderer renderer : enabledRenderers) {
@@ -481,6 +491,19 @@ import java.io.IOException;
maybeThrowPeriodPrepareError();
}
+ // The standalone media clock never changes playback parameters, so just check the renderer.
+ if (rendererMediaClock != null) {
+ PlaybackParameters playbackParameters = rendererMediaClock.getPlaybackParameters();
+ if (!playbackParameters.equals(this.playbackParameters)) {
+ // TODO: Make LoadControl, period transition position projection, adaptive track selection
+ // and potentially any time-related code in renderers take into account the playback speed.
+ this.playbackParameters = playbackParameters;
+ standaloneMediaClock.synchronize(rendererMediaClock);
+ eventHandler.obtainMessage(MSG_PLAYBACK_PARAMETERS_CHANGED, playbackParameters)
+ .sendToTarget();
+ }
+ }
+
long playingPeriodDurationUs = timeline.getPeriod(playingPeriodHolder.index, period)
.getDurationUs();
if (allRenderersEnded
@@ -546,12 +569,20 @@ import java.io.IOException;
Pair periodPosition = resolveSeekPosition(seekPosition);
if (periodPosition == null) {
- // TODO: We should probably propagate an error here.
- // We failed to resolve the seek position. Stop the player.
- stopInternal();
+ // The seek position was valid for the timeline that it was performed into, but the
+ // timeline has changed and a suitable seek position could not be resolved in the new one.
+ playbackInfo = new PlaybackInfo(0, 0);
+ eventHandler.obtainMessage(MSG_SEEK_ACK, 1, 0, playbackInfo).sendToTarget();
+ // Set the internal position to (0,TIME_UNSET) so that a subsequent seek to (0,0) isn't
+ // ignored.
+ playbackInfo = new PlaybackInfo(0, C.TIME_UNSET);
+ setState(ExoPlayer.STATE_ENDED);
+ // Reset, but retain the source so that it can still be used should a seek occur.
+ resetInternal(false);
return;
}
+ boolean seekPositionAdjusted = seekPosition.windowPositionUs == C.TIME_UNSET;
int periodIndex = periodPosition.first;
long periodPositionUs = periodPosition.second;
@@ -561,10 +592,13 @@ import java.io.IOException;
// Seek position equals the current position. Do nothing.
return;
}
- periodPositionUs = seekToPeriodPosition(periodIndex, periodPositionUs);
+ long newPeriodPositionUs = seekToPeriodPosition(periodIndex, periodPositionUs);
+ seekPositionAdjusted |= periodPositionUs != newPeriodPositionUs;
+ periodPositionUs = newPeriodPositionUs;
} finally {
playbackInfo = new PlaybackInfo(periodIndex, periodPositionUs);
- eventHandler.obtainMessage(MSG_SEEK_ACK, playbackInfo).sendToTarget();
+ eventHandler.obtainMessage(MSG_SEEK_ACK, seekPositionAdjusted ? 1 : 0, 0, playbackInfo)
+ .sendToTarget();
}
}
@@ -603,6 +637,7 @@ import java.io.IOException;
enabledRenderers = new Renderer[0];
rendererMediaClock = null;
rendererMediaClockSource = null;
+ playingPeriodHolder = null;
}
// Update the holders.
@@ -628,7 +663,8 @@ import java.io.IOException;
}
private void resetRendererPosition(long periodPositionUs) throws ExoPlaybackException {
- rendererPositionUs = playingPeriodHolder == null ? periodPositionUs
+ rendererPositionUs = playingPeriodHolder == null
+ ? periodPositionUs + RENDERER_TIMESTAMP_OFFSET_US
: playingPeriodHolder.toRendererTime(periodPositionUs);
standaloneMediaClock.setPositionUs(rendererPositionUs);
for (Renderer renderer : enabledRenderers) {
@@ -636,14 +672,22 @@ import java.io.IOException;
}
}
+ private void setPlaybackParametersInternal(PlaybackParameters playbackParameters) {
+ playbackParameters = rendererMediaClock != null
+ ? rendererMediaClock.setPlaybackParameters(playbackParameters)
+ : standaloneMediaClock.setPlaybackParameters(playbackParameters);
+ this.playbackParameters = playbackParameters;
+ eventHandler.obtainMessage(MSG_PLAYBACK_PARAMETERS_CHANGED, playbackParameters).sendToTarget();
+ }
+
private void stopInternal() {
- resetInternal();
+ resetInternal(true);
loadControl.onStopped();
setState(ExoPlayer.STATE_IDLE);
}
private void releaseInternal() {
- resetInternal();
+ resetInternal(true);
loadControl.onReleased();
setState(ExoPlayer.STATE_IDLE);
synchronized (this) {
@@ -652,12 +696,13 @@ import java.io.IOException;
}
}
- private void resetInternal() {
+ private void resetInternal(boolean releaseMediaSource) {
handler.removeMessages(MSG_DO_SOME_WORK);
rebuffering = false;
standaloneMediaClock.stop();
rendererMediaClock = null;
rendererMediaClockSource = null;
+ rendererPositionUs = RENDERER_TIMESTAMP_OFFSET_US;
for (Renderer renderer : enabledRenderers) {
try {
ensureStopped(renderer);
@@ -670,15 +715,17 @@ import java.io.IOException;
enabledRenderers = new Renderer[0];
releasePeriodHoldersFrom(playingPeriodHolder != null ? playingPeriodHolder
: loadingPeriodHolder);
- if (mediaSource != null) {
- mediaSource.releaseSource();
- mediaSource = null;
- }
loadingPeriodHolder = null;
readingPeriodHolder = null;
playingPeriodHolder = null;
- timeline = null;
setIsLoading(false);
+ if (releaseMediaSource) {
+ if (mediaSource != null) {
+ mediaSource.releaseSource();
+ mediaSource = null;
+ }
+ timeline = null;
+ }
}
private void sendMessagesInternal(ExoPlayerMessage[] messages) throws ExoPlaybackException {
@@ -761,7 +808,7 @@ import java.io.IOException;
if (sampleStream == null) {
// The renderer won't be re-enabled. Sync standaloneMediaClock so that it can take
// over timing responsibilities.
- standaloneMediaClock.setPositionUs(rendererMediaClock.getPositionUs());
+ standaloneMediaClock.synchronize(rendererMediaClock);
}
rendererMediaClock = null;
rendererMediaClockSource = null;
@@ -774,7 +821,8 @@ import java.io.IOException;
}
}
}
- eventHandler.obtainMessage(MSG_TRACKS_CHANGED, periodHolder.getTrackInfo()).sendToTarget();
+ eventHandler.obtainMessage(MSG_TRACKS_CHANGED, periodHolder.trackSelectorResult)
+ .sendToTarget();
enableRenderers(rendererWasEnabledFlags, enabledRendererCount);
} else {
// Release and re-prepare/buffer periods after the one whose selection changed.
@@ -803,9 +851,6 @@ import java.io.IOException;
}
private boolean haveSufficientBuffer(boolean rebuffering) {
- if (loadingPeriodHolder == null) {
- return false;
- }
long loadingPeriodBufferedPositionUs = !loadingPeriodHolder.prepared
? loadingPeriodHolder.startPositionUs
: loadingPeriodHolder.mediaPeriod.getBufferedPositionUs();
@@ -843,18 +888,21 @@ import java.io.IOException;
if (oldTimeline == null) {
if (pendingInitialSeekCount > 0) {
Pair periodPosition = resolveSeekPosition(pendingSeekPosition);
- if (periodPosition == null) {
- // We failed to resolve the seek position. Stop the player.
- notifySourceInfoRefresh(manifest, 0);
- // TODO: We should probably propagate an error here.
- stopInternal();
- return;
- }
- playbackInfo = new PlaybackInfo(periodPosition.first, periodPosition.second);
processedInitialSeekCount = pendingInitialSeekCount;
pendingInitialSeekCount = 0;
pendingSeekPosition = null;
+ if (periodPosition == null) {
+ // The seek position was valid for the timeline that it was performed into, but the
+ // timeline has changed and a suitable seek position could not be resolved in the new one.
+ handleSourceInfoRefreshEndedPlayback(manifest, processedInitialSeekCount);
+ return;
+ }
+ playbackInfo = new PlaybackInfo(periodPosition.first, periodPosition.second);
} else if (playbackInfo.startPositionUs == C.TIME_UNSET) {
+ if (timeline.isEmpty()) {
+ handleSourceInfoRefreshEndedPlayback(manifest, processedInitialSeekCount);
+ return;
+ }
Pair defaultPosition = getPeriodPosition(0, C.TIME_UNSET);
playbackInfo = new PlaybackInfo(defaultPosition.first, defaultPosition.second);
}
@@ -874,10 +922,8 @@ import java.io.IOException;
// period whose window we can restart from.
int newPeriodIndex = resolveSubsequentPeriod(periodHolder.index, oldTimeline, timeline);
if (newPeriodIndex == C.INDEX_UNSET) {
- // We failed to resolve a subsequent period. Stop the player.
- notifySourceInfoRefresh(manifest, processedInitialSeekCount);
- // TODO: We should probably propagate an error here.
- stopInternal();
+ // We failed to resolve a suitable restart position.
+ handleSourceInfoRefreshEndedPlayback(manifest, processedInitialSeekCount);
return;
}
// We resolved a subsequent period. Seek to the default position in the corresponding window.
@@ -947,6 +993,18 @@ import java.io.IOException;
notifySourceInfoRefresh(manifest, processedInitialSeekCount);
}
+ private void handleSourceInfoRefreshEndedPlayback(Object manifest,
+ int processedInitialSeekCount) {
+ // Set the playback position to (0,0) for notifying the eventHandler.
+ playbackInfo = new PlaybackInfo(0, 0);
+ notifySourceInfoRefresh(manifest, processedInitialSeekCount);
+ // Set the internal position to (0,TIME_UNSET) so that a subsequent seek to (0,0) isn't ignored.
+ playbackInfo = new PlaybackInfo(0, C.TIME_UNSET);
+ setState(ExoPlayer.STATE_ENDED);
+ // Reset, but retain the source so that it can still be used should a seek occur.
+ resetInternal(false);
+ }
+
private void notifySourceInfoRefresh(Object manifest, int processedInitialSeekCount) {
eventHandler.obtainMessage(MSG_SOURCE_INFO_REFRESHED,
new SourceInfo(timeline, manifest, playbackInfo, processedInitialSeekCount)).sendToTarget();
@@ -978,6 +1036,8 @@ import java.io.IOException;
*
* @param seekPosition The position to resolve.
* @return The resolved position, or null if resolution was not successful.
+ * @throws IllegalSeekPositionException If the window index of the seek position is outside the
+ * bounds of the timeline.
*/
private Pair resolveSeekPosition(SeekPosition seekPosition) {
Timeline seekTimeline = seekPosition.timeline;
@@ -985,11 +1045,17 @@ import java.io.IOException;
// The application performed a blind seek without a non-empty timeline (most likely based on
// knowledge of what the future timeline will be). Use the internal timeline.
seekTimeline = timeline;
- Assertions.checkIndex(seekPosition.windowIndex, 0, timeline.getWindowCount());
}
// Map the SeekPosition to a position in the corresponding timeline.
- Pair periodPosition = getPeriodPosition(seekTimeline, seekPosition.windowIndex,
- seekPosition.windowPositionUs);
+ Pair periodPosition;
+ try {
+ periodPosition = getPeriodPosition(seekTimeline, seekPosition.windowIndex,
+ seekPosition.windowPositionUs);
+ } catch (IndexOutOfBoundsException e) {
+ // The window index of the seek position was outside the bounds of the timeline.
+ throw new IllegalSeekPositionException(timeline, seekPosition.windowIndex,
+ seekPosition.windowPositionUs);
+ }
if (timeline == seekTimeline) {
// Our internal timeline is the seek timeline, so the mapped position is correct.
return periodPosition;
@@ -1042,6 +1108,7 @@ import java.io.IOException;
*/
private Pair getPeriodPosition(Timeline timeline, int windowIndex,
long windowPositionUs, long defaultPositionProjectionUs) {
+ Assertions.checkIndex(windowIndex, 0, timeline.getWindowCount());
timeline.getWindow(windowIndex, window, false, defaultPositionProjectionUs);
if (windowPositionUs == C.TIME_UNSET) {
windowPositionUs = window.getDefaultPositionUs();
@@ -1067,66 +1134,8 @@ import java.io.IOException;
return;
}
- if (loadingPeriodHolder == null
- || (loadingPeriodHolder.isFullyBuffered() && !loadingPeriodHolder.isLast
- && (playingPeriodHolder == null
- || loadingPeriodHolder.index - playingPeriodHolder.index < MAXIMUM_BUFFER_AHEAD_PERIODS))) {
- // We don't have a loading period or it's fully loaded, so try and create the next one.
- int newLoadingPeriodIndex = loadingPeriodHolder == null ? playbackInfo.periodIndex
- : loadingPeriodHolder.index + 1;
- if (newLoadingPeriodIndex >= timeline.getPeriodCount()) {
- // The period is not available yet.
- mediaSource.maybeThrowSourceInfoRefreshError();
- } else {
- int windowIndex = timeline.getPeriod(newLoadingPeriodIndex, period).windowIndex;
- boolean isFirstPeriodInWindow = newLoadingPeriodIndex
- == timeline.getWindow(windowIndex, window).firstPeriodIndex;
- long periodStartPositionUs;
- if (loadingPeriodHolder == null) {
- periodStartPositionUs = playbackInfo.startPositionUs;
- } else if (!isFirstPeriodInWindow) {
- // We're starting to buffer a new period in the current window. Always start from the
- // beginning of the period.
- periodStartPositionUs = 0;
- } else {
- // We're starting to buffer a new window. When playback transitions to this window we'll
- // want it to be from its default start position. The expected delay until playback
- // transitions is equal the duration of media that's currently buffered (assuming no
- // interruptions). Hence we project the default start position forward by the duration of
- // the buffer, and start buffering from this point.
- long defaultPositionProjectionUs = loadingPeriodHolder.getRendererOffset()
- + timeline.getPeriod(loadingPeriodHolder.index, period).getDurationUs()
- - rendererPositionUs;
- Pair defaultPosition = getPeriodPosition(timeline, windowIndex,
- C.TIME_UNSET, Math.max(0, defaultPositionProjectionUs));
- if (defaultPosition == null) {
- newLoadingPeriodIndex = C.INDEX_UNSET;
- periodStartPositionUs = C.TIME_UNSET;
- } else {
- newLoadingPeriodIndex = defaultPosition.first;
- periodStartPositionUs = defaultPosition.second;
- }
- }
- if (newLoadingPeriodIndex != C.INDEX_UNSET) {
- long rendererPositionOffsetUs = loadingPeriodHolder == null ? periodStartPositionUs
- : (loadingPeriodHolder.getRendererOffset()
- + timeline.getPeriod(loadingPeriodHolder.index, period).getDurationUs());
- timeline.getPeriod(newLoadingPeriodIndex, period, true);
- boolean isLastPeriod = newLoadingPeriodIndex == timeline.getPeriodCount() - 1
- && !timeline.getWindow(period.windowIndex, window).isDynamic;
- MediaPeriodHolder newPeriodHolder = new MediaPeriodHolder(renderers, rendererCapabilities,
- rendererPositionOffsetUs, trackSelector, loadControl, mediaSource, period.uid,
- newLoadingPeriodIndex, isLastPeriod, periodStartPositionUs);
- if (loadingPeriodHolder != null) {
- loadingPeriodHolder.next = newPeriodHolder;
- }
- loadingPeriodHolder = newPeriodHolder;
- loadingPeriodHolder.mediaPeriod.prepare(this);
- setIsLoading(true);
- }
- }
- }
-
+ // Update the loading period if required.
+ maybeUpdateLoadingPeriod();
if (loadingPeriodHolder == null || loadingPeriodHolder.isFullyBuffered()) {
setIsLoading(false);
} else if (loadingPeriodHolder != null && loadingPeriodHolder.needsContinueLoading) {
@@ -1152,28 +1161,49 @@ import java.io.IOException;
}
if (readingPeriodHolder.isLast) {
- // The renderers have their final SampleStreams.
- for (Renderer renderer : enabledRenderers) {
- renderer.setCurrentStreamIsFinal();
+ for (int i = 0; i < renderers.length; i++) {
+ Renderer renderer = renderers[i];
+ SampleStream sampleStream = readingPeriodHolder.sampleStreams[i];
+ // Defer setting the stream as final until the renderer has actually consumed the whole
+ // stream in case of playlist changes that cause the stream to be no longer final.
+ if (sampleStream != null && renderer.getStream() == sampleStream
+ && renderer.hasReadStreamToEnd()) {
+ renderer.setCurrentStreamFinal();
+ }
}
return;
}
- for (Renderer renderer : enabledRenderers) {
- if (!renderer.hasReadStreamToEnd()) {
+ for (int i = 0; i < renderers.length; i++) {
+ Renderer renderer = renderers[i];
+ SampleStream sampleStream = readingPeriodHolder.sampleStreams[i];
+ if (renderer.getStream() != sampleStream
+ || (sampleStream != null && !renderer.hasReadStreamToEnd())) {
return;
}
}
+
if (readingPeriodHolder.next != null && readingPeriodHolder.next.prepared) {
- TrackSelectionArray oldTrackSelections = readingPeriodHolder.trackSelections;
+ TrackSelectorResult oldTrackSelectorResult = readingPeriodHolder.trackSelectorResult;
readingPeriodHolder = readingPeriodHolder.next;
- TrackSelectionArray newTrackSelections = readingPeriodHolder.trackSelections;
+ TrackSelectorResult newTrackSelectorResult = readingPeriodHolder.trackSelectorResult;
+
+ boolean initialDiscontinuity =
+ readingPeriodHolder.mediaPeriod.readDiscontinuity() != C.TIME_UNSET;
for (int i = 0; i < renderers.length; i++) {
Renderer renderer = renderers[i];
- TrackSelection oldSelection = oldTrackSelections.get(i);
- TrackSelection newSelection = newTrackSelections.get(i);
- if (oldSelection != null) {
- if (newSelection != null) {
+ TrackSelection oldSelection = oldTrackSelectorResult.selections.get(i);
+ if (oldSelection == null) {
+ // The renderer has no current stream and will be enabled when we play the next period.
+ } else if (initialDiscontinuity) {
+ // The new period starts with a discontinuity, so the renderer will play out all data then
+ // be disabled and re-enabled when it starts playing the next period.
+ renderer.setCurrentStreamFinal();
+ } else if (!renderer.isCurrentStreamFinal()) {
+ TrackSelection newSelection = newTrackSelectorResult.selections.get(i);
+ RendererConfiguration oldConfig = oldTrackSelectorResult.rendererConfigurations[i];
+ RendererConfiguration newConfig = newTrackSelectorResult.rendererConfigurations[i];
+ if (newSelection != null && newConfig.equals(oldConfig)) {
// Replace the renderer's SampleStream so the transition to playing the next period can
// be seamless.
Format[] formats = new Format[newSelection.length()];
@@ -1183,15 +1213,90 @@ import java.io.IOException;
renderer.replaceStream(formats, readingPeriodHolder.sampleStreams[i],
readingPeriodHolder.getRendererOffset());
} else {
- // The renderer will be disabled when transitioning to playing the next period. Mark the
- // SampleStream as final to play out any remaining data.
- renderer.setCurrentStreamIsFinal();
+ // The renderer will be disabled when transitioning to playing the next period, either
+ // because there's no new selection or because a configuration change is required. Mark
+ // the SampleStream as final to play out any remaining data.
+ renderer.setCurrentStreamFinal();
}
}
}
}
}
+ private void maybeUpdateLoadingPeriod() throws IOException {
+ int newLoadingPeriodIndex;
+ if (loadingPeriodHolder == null) {
+ newLoadingPeriodIndex = playbackInfo.periodIndex;
+ } else {
+ int loadingPeriodIndex = loadingPeriodHolder.index;
+ if (loadingPeriodHolder.isLast || !loadingPeriodHolder.isFullyBuffered()
+ || timeline.getPeriod(loadingPeriodIndex, period).getDurationUs() == C.TIME_UNSET) {
+ // Either the existing loading period is the last period, or we are not ready to advance to
+ // loading the next period because it hasn't been fully buffered or its duration is unknown.
+ return;
+ }
+ if (playingPeriodHolder != null
+ && loadingPeriodIndex - playingPeriodHolder.index == MAXIMUM_BUFFER_AHEAD_PERIODS) {
+ // We are already buffering the maximum number of periods ahead.
+ return;
+ }
+ newLoadingPeriodIndex = loadingPeriodHolder.index + 1;
+ }
+
+ if (newLoadingPeriodIndex >= timeline.getPeriodCount()) {
+ // The next period is not available yet.
+ mediaSource.maybeThrowSourceInfoRefreshError();
+ return;
+ }
+
+ long newLoadingPeriodStartPositionUs;
+ if (loadingPeriodHolder == null) {
+ newLoadingPeriodStartPositionUs = playbackInfo.positionUs;
+ } else {
+ int newLoadingWindowIndex = timeline.getPeriod(newLoadingPeriodIndex, period).windowIndex;
+ if (newLoadingPeriodIndex
+ != timeline.getWindow(newLoadingWindowIndex, window).firstPeriodIndex) {
+ // We're starting to buffer a new period in the current window. Always start from the
+ // beginning of the period.
+ newLoadingPeriodStartPositionUs = 0;
+ } else {
+ // We're starting to buffer a new window. When playback transitions to this window we'll
+ // want it to be from its default start position. The expected delay until playback
+ // transitions is equal the duration of media that's currently buffered (assuming no
+ // interruptions). Hence we project the default start position forward by the duration of
+ // the buffer, and start buffering from this point.
+ long defaultPositionProjectionUs = loadingPeriodHolder.getRendererOffset()
+ + timeline.getPeriod(loadingPeriodHolder.index, period).getDurationUs()
+ - rendererPositionUs;
+ Pair defaultPosition = getPeriodPosition(timeline, newLoadingWindowIndex,
+ C.TIME_UNSET, Math.max(0, defaultPositionProjectionUs));
+ if (defaultPosition == null) {
+ return;
+ }
+
+ newLoadingPeriodIndex = defaultPosition.first;
+ newLoadingPeriodStartPositionUs = defaultPosition.second;
+ }
+ }
+
+ long rendererPositionOffsetUs = loadingPeriodHolder == null
+ ? newLoadingPeriodStartPositionUs + RENDERER_TIMESTAMP_OFFSET_US
+ : (loadingPeriodHolder.getRendererOffset()
+ + timeline.getPeriod(loadingPeriodHolder.index, period).getDurationUs());
+ timeline.getPeriod(newLoadingPeriodIndex, period, true);
+ boolean isLastPeriod = newLoadingPeriodIndex == timeline.getPeriodCount() - 1
+ && !timeline.getWindow(period.windowIndex, window).isDynamic;
+ MediaPeriodHolder newPeriodHolder = new MediaPeriodHolder(renderers, rendererCapabilities,
+ rendererPositionOffsetUs, trackSelector, loadControl, mediaSource, period.uid,
+ newLoadingPeriodIndex, isLastPeriod, newLoadingPeriodStartPositionUs);
+ if (loadingPeriodHolder != null) {
+ loadingPeriodHolder.next = newPeriodHolder;
+ }
+ loadingPeriodHolder = newPeriodHolder;
+ loadingPeriodHolder.mediaPeriod.prepare(this);
+ setIsLoading(true);
+ }
+
private void handlePeriodPrepared(MediaPeriod period) throws ExoPlaybackException {
if (loadingPeriodHolder == null || loadingPeriodHolder.mediaPeriod != period) {
// Stale event.
@@ -1216,7 +1321,8 @@ import java.io.IOException;
}
private void maybeContinueLoading() {
- long nextLoadPositionUs = loadingPeriodHolder.mediaPeriod.getNextLoadPositionUs();
+ long nextLoadPositionUs = !loadingPeriodHolder.prepared ? 0
+ : loadingPeriodHolder.mediaPeriod.getNextLoadPositionUs();
if (nextLoadPositionUs == C.TIME_END_OF_SOURCE) {
setIsLoading(false);
} else {
@@ -1241,21 +1347,28 @@ import java.io.IOException;
}
private void setPlayingPeriodHolder(MediaPeriodHolder periodHolder) throws ExoPlaybackException {
- playingPeriodHolder = periodHolder;
+ if (playingPeriodHolder == periodHolder) {
+ return;
+ }
+
int enabledRendererCount = 0;
boolean[] rendererWasEnabledFlags = new boolean[renderers.length];
for (int i = 0; i < renderers.length; i++) {
Renderer renderer = renderers[i];
rendererWasEnabledFlags[i] = renderer.getState() != Renderer.STATE_DISABLED;
- TrackSelection newSelection = periodHolder.trackSelections.get(i);
+ TrackSelection newSelection = periodHolder.trackSelectorResult.selections.get(i);
if (newSelection != null) {
- // The renderer should be enabled when playing the new period.
enabledRendererCount++;
- } else if (rendererWasEnabledFlags[i]) {
- // The renderer should be disabled when playing the new period.
+ }
+ if (rendererWasEnabledFlags[i] && (newSelection == null
+ || (renderer.isCurrentStreamFinal()
+ && renderer.getStream() == playingPeriodHolder.sampleStreams[i]))) {
+ // The renderer should be disabled before playing the next period, either because it's not
+ // needed to play the next period, or because we need to re-enable it as its current stream
+ // is final and it's not reading ahead.
if (renderer == rendererMediaClockSource) {
// Sync standaloneMediaClock so that it can take over timing responsibilities.
- standaloneMediaClock.setPositionUs(rendererMediaClock.getPositionUs());
+ standaloneMediaClock.synchronize(rendererMediaClock);
rendererMediaClock = null;
rendererMediaClockSource = null;
}
@@ -1264,7 +1377,8 @@ import java.io.IOException;
}
}
- eventHandler.obtainMessage(MSG_TRACKS_CHANGED, periodHolder.getTrackInfo()).sendToTarget();
+ playingPeriodHolder = periodHolder;
+ eventHandler.obtainMessage(MSG_TRACKS_CHANGED, periodHolder.trackSelectorResult).sendToTarget();
enableRenderers(rendererWasEnabledFlags, enabledRendererCount);
}
@@ -1274,10 +1388,12 @@ import java.io.IOException;
enabledRendererCount = 0;
for (int i = 0; i < renderers.length; i++) {
Renderer renderer = renderers[i];
- TrackSelection newSelection = playingPeriodHolder.trackSelections.get(i);
+ TrackSelection newSelection = playingPeriodHolder.trackSelectorResult.selections.get(i);
if (newSelection != null) {
enabledRenderers[enabledRendererCount++] = renderer;
if (renderer.getState() == Renderer.STATE_DISABLED) {
+ RendererConfiguration rendererConfiguration =
+ playingPeriodHolder.trackSelectorResult.rendererConfigurations[i];
// The renderer needs enabling with its new track selection.
boolean playing = playWhenReady && state == ExoPlayer.STATE_READY;
// Consider as joining only if the renderer was previously disabled.
@@ -1288,8 +1404,8 @@ import java.io.IOException;
formats[j] = newSelection.getFormat(j);
}
// Enable the renderer.
- renderer.enable(formats, playingPeriodHolder.sampleStreams[i], rendererPositionUs,
- joining, playingPeriodHolder.getRendererOffset());
+ renderer.enable(rendererConfiguration, formats, playingPeriodHolder.sampleStreams[i],
+ rendererPositionUs, joining, playingPeriodHolder.getRendererOffset());
MediaClock mediaClock = renderer.getMediaClock();
if (mediaClock != null) {
if (rendererMediaClock != null) {
@@ -1298,6 +1414,7 @@ import java.io.IOException;
}
rendererMediaClock = mediaClock;
rendererMediaClockSource = renderer;
+ rendererMediaClock.setPlaybackParameters(playbackParameters);
}
// Start the renderer if playing.
if (playing) {
@@ -1326,6 +1443,7 @@ import java.io.IOException;
public boolean hasEnabledTracks;
public MediaPeriodHolder next;
public boolean needsContinueLoading;
+ public TrackSelectorResult trackSelectorResult;
private final Renderer[] renderers;
private final RendererCapabilities[] rendererCapabilities;
@@ -1333,10 +1451,7 @@ import java.io.IOException;
private final LoadControl loadControl;
private final MediaSource mediaSource;
- private Object trackSelectionsInfo;
- private TrackGroupArray trackGroups;
- private TrackSelectionArray trackSelections;
- private TrackSelectionArray periodTrackSelections;
+ private TrackSelectorResult periodTrackSelectorResult;
public MediaPeriodHolder(Renderer[] renderers, RendererCapabilities[] rendererCapabilities,
long rendererPositionOffsetUs, TrackSelector trackSelector, LoadControl loadControl,
@@ -1382,20 +1497,17 @@ import java.io.IOException;
public void handlePrepared() throws ExoPlaybackException {
prepared = true;
- trackGroups = mediaPeriod.getTrackGroups();
selectTracks();
startPositionUs = updatePeriodTrackSelection(startPositionUs, false);
}
public boolean selectTracks() throws ExoPlaybackException {
- Pair selectorResult = trackSelector.selectTracks(
- rendererCapabilities, trackGroups);
- TrackSelectionArray newTrackSelections = selectorResult.first;
- if (newTrackSelections.equals(periodTrackSelections)) {
+ TrackSelectorResult selectorResult = trackSelector.selectTracks(rendererCapabilities,
+ mediaPeriod.getTrackGroups());
+ if (selectorResult.isEquivalent(periodTrackSelectorResult)) {
return false;
}
- trackSelections = newTrackSelections;
- trackSelectionsInfo = selectorResult.second;
+ trackSelectorResult = selectorResult;
return true;
}
@@ -1406,16 +1518,16 @@ import java.io.IOException;
public long updatePeriodTrackSelection(long positionUs, boolean forceRecreateStreams,
boolean[] streamResetFlags) {
+ TrackSelectionArray trackSelections = trackSelectorResult.selections;
for (int i = 0; i < trackSelections.length; i++) {
mayRetainStreamFlags[i] = !forceRecreateStreams
- && Util.areEqual(periodTrackSelections == null ? null : periodTrackSelections.get(i),
- trackSelections.get(i));
+ && trackSelectorResult.isEquivalent(periodTrackSelectorResult, i);
}
// Disable streams on the period and get new streams for updated/newly-enabled tracks.
positionUs = mediaPeriod.selectTracks(trackSelections.getAll(), mayRetainStreamFlags,
sampleStreams, streamResetFlags, positionUs);
- periodTrackSelections = trackSelections;
+ periodTrackSelectorResult = trackSelectorResult;
// Update whether we have enabled tracks and sanity check the expected streams are non-null.
hasEnabledTracks = false;
@@ -1429,14 +1541,10 @@ import java.io.IOException;
}
// The track selection has changed.
- loadControl.onTracksSelected(renderers, trackGroups, trackSelections);
+ loadControl.onTracksSelected(renderers, trackSelectorResult.groups, trackSelections);
return positionUs;
}
- public TrackInfo getTrackInfo() {
- return new TrackInfo(trackGroups, trackSelections, trackSelectionsInfo);
- }
-
public void release() {
try {
mediaSource.releasePeriod(mediaPeriod);
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/ExoPlayerLibraryInfo.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/ExoPlayerLibraryInfo.java
index 302c7562a..0e25366cb 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/ExoPlayerLibraryInfo.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/ExoPlayerLibraryInfo.java
@@ -21,18 +21,26 @@ package org.telegram.messenger.exoplayer2;
public interface ExoPlayerLibraryInfo {
/**
- * The version of the library, expressed as a string.
+ * The version of the library expressed as a string, for example "1.2.3".
*/
- String VERSION = "2.0.4";
+ // Intentionally hardcoded. Do not derive from other constants (e.g. VERSION_INT) or vice versa.
+ String VERSION = "2.4.0";
/**
- * The version of the library, expressed as an integer.
+ * The version of the library expressed as {@code "ExoPlayerLib/" + VERSION}.
+ */
+ // Intentionally hardcoded. Do not derive from other constants (e.g. VERSION) or vice versa.
+ String VERSION_SLASHY = "ExoPlayerLib/2.4.0";
+
+ /**
+ * The version of the library expressed as an integer, for example 1002003.
*
* Three digits are used for each component of {@link #VERSION}. For example "1.2.3" has the
* corresponding integer version 1002003 (001-002-003), and "123.45.6" has the corresponding
* integer version 123045006 (123-045-006).
*/
- int VERSION_INT = 2000004;
+ // Intentionally hardcoded. Do not derive from other constants (e.g. VERSION) or vice versa.
+ int VERSION_INT = 2004000;
/**
* Whether the library was compiled with {@link org.telegram.messenger.exoplayer2.util.Assertions}
@@ -45,5 +53,5 @@ public interface ExoPlayerLibraryInfo {
* trace enabled.
*/
boolean TRACE_ENABLED = true;
-
+
}
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/Format.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/Format.java
index 2868758d9..dda02f3d9 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/Format.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/Format.java
@@ -24,6 +24,7 @@ import org.telegram.messenger.exoplayer2.drm.DrmInitData;
import org.telegram.messenger.exoplayer2.metadata.Metadata;
import org.telegram.messenger.exoplayer2.util.MimeTypes;
import org.telegram.messenger.exoplayer2.util.Util;
+import org.telegram.messenger.exoplayer2.video.ColorInfo;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
@@ -120,7 +121,7 @@ public final class Format implements Parcelable {
/**
* The stereo layout for 360/3D/VR video, or {@link #NO_VALUE} if not applicable. Valid stereo
* modes are {@link C#STEREO_MODE_MONO}, {@link C#STEREO_MODE_TOP_BOTTOM}, {@link
- * C#STEREO_MODE_LEFT_RIGHT}.
+ * C#STEREO_MODE_LEFT_RIGHT}, {@link C#STEREO_MODE_STEREO_MESH}.
*/
@C.StereoMode
public final int stereoMode;
@@ -128,6 +129,10 @@ public final class Format implements Parcelable {
* The projection data for 360/VR video, or null if not applicable.
*/
public final byte[] projectionData;
+ /**
+ * The color metadata associated with the video, helps with accurate color reproduction.
+ */
+ public final ColorInfo colorInfo;
// Audio specific.
@@ -183,20 +188,18 @@ public final class Format implements Parcelable {
*/
public final int accessibilityChannel;
- // Lazily initialized hashcode and framework media format.
-
+ // Lazily initialized hashcode.
private int hashCode;
- private MediaFormat frameworkMediaFormat;
// Video.
public static Format createVideoContainerFormat(String id, String containerMimeType,
String sampleMimeType, String codecs, int bitrate, int width, int height,
- float frameRate, List initializationData) {
+ float frameRate, List initializationData, @C.SelectionFlags int selectionFlags) {
return new Format(id, containerMimeType, sampleMimeType, codecs, bitrate, NO_VALUE, width,
- height, frameRate, NO_VALUE, NO_VALUE, null, NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE,
- NO_VALUE, NO_VALUE, 0, null, NO_VALUE, OFFSET_SAMPLE_RELATIVE, initializationData, null,
- null);
+ height, frameRate, NO_VALUE, NO_VALUE, null, NO_VALUE, null, NO_VALUE, NO_VALUE, NO_VALUE,
+ NO_VALUE, NO_VALUE, selectionFlags, null, NO_VALUE, OFFSET_SAMPLE_RELATIVE,
+ initializationData, null, null);
}
public static Format createVideoSampleFormat(String id, String sampleMimeType, String codecs,
@@ -212,17 +215,18 @@ public final class Format implements Parcelable {
DrmInitData drmInitData) {
return createVideoSampleFormat(id, sampleMimeType, codecs, bitrate, maxInputSize, width,
height, frameRate, initializationData, rotationDegrees, pixelWidthHeightRatio, null,
- NO_VALUE, drmInitData);
+ NO_VALUE, null, drmInitData);
}
public static Format createVideoSampleFormat(String id, String sampleMimeType, String codecs,
int bitrate, int maxInputSize, int width, int height, float frameRate,
List initializationData, int rotationDegrees, float pixelWidthHeightRatio,
- byte[] projectionData, @C.StereoMode int stereoMode, DrmInitData drmInitData) {
+ byte[] projectionData, @C.StereoMode int stereoMode, ColorInfo colorInfo,
+ DrmInitData drmInitData) {
return new Format(id, null, sampleMimeType, codecs, bitrate, maxInputSize, width, height,
- frameRate, rotationDegrees, pixelWidthHeightRatio, projectionData, stereoMode, NO_VALUE,
- NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE, 0, null, NO_VALUE, OFFSET_SAMPLE_RELATIVE,
- initializationData, drmInitData, null);
+ frameRate, rotationDegrees, pixelWidthHeightRatio, projectionData, stereoMode,
+ colorInfo, NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE, 0, null, NO_VALUE,
+ OFFSET_SAMPLE_RELATIVE, initializationData, drmInitData, null);
}
// Audio.
@@ -231,8 +235,8 @@ public final class Format implements Parcelable {
String sampleMimeType, String codecs, int bitrate, int channelCount, int sampleRate,
List initializationData, @C.SelectionFlags int selectionFlags, String language) {
return new Format(id, containerMimeType, sampleMimeType, codecs, bitrate, NO_VALUE, NO_VALUE,
- NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE, null, NO_VALUE, channelCount, sampleRate, NO_VALUE,
- NO_VALUE, NO_VALUE, selectionFlags, language, NO_VALUE, OFFSET_SAMPLE_RELATIVE,
+ NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE, null, NO_VALUE, null, channelCount, sampleRate,
+ NO_VALUE, NO_VALUE, NO_VALUE, selectionFlags, language, NO_VALUE, OFFSET_SAMPLE_RELATIVE,
initializationData, null, null);
}
@@ -259,7 +263,7 @@ public final class Format implements Parcelable {
List initializationData, DrmInitData drmInitData,
@C.SelectionFlags int selectionFlags, String language, Metadata metadata) {
return new Format(id, null, sampleMimeType, codecs, bitrate, maxInputSize, NO_VALUE, NO_VALUE,
- NO_VALUE, NO_VALUE, NO_VALUE, null, NO_VALUE, channelCount, sampleRate, pcmEncoding,
+ NO_VALUE, NO_VALUE, NO_VALUE, null, NO_VALUE, null, channelCount, sampleRate, pcmEncoding,
encoderDelay, encoderPadding, selectionFlags, language, NO_VALUE, OFFSET_SAMPLE_RELATIVE,
initializationData, drmInitData, metadata);
}
@@ -277,38 +281,39 @@ public final class Format implements Parcelable {
String sampleMimeType, String codecs, int bitrate, @C.SelectionFlags int selectionFlags,
String language, int accessibilityChannel) {
return new Format(id, containerMimeType, sampleMimeType, codecs, bitrate, NO_VALUE, NO_VALUE,
- NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE, null, NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE,
- NO_VALUE, NO_VALUE, selectionFlags, language, accessibilityChannel,
+ NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE, null, NO_VALUE, null, NO_VALUE, NO_VALUE,
+ NO_VALUE, NO_VALUE, NO_VALUE, selectionFlags, language, accessibilityChannel,
OFFSET_SAMPLE_RELATIVE, null, null, null);
}
public static Format createTextSampleFormat(String id, String sampleMimeType, String codecs,
int bitrate, @C.SelectionFlags int selectionFlags, String language, DrmInitData drmInitData) {
return createTextSampleFormat(id, sampleMimeType, codecs, bitrate, selectionFlags, language,
- NO_VALUE, drmInitData, OFFSET_SAMPLE_RELATIVE);
+ NO_VALUE, drmInitData, OFFSET_SAMPLE_RELATIVE, Collections.emptyList());
}
public static Format createTextSampleFormat(String id, String sampleMimeType, String codecs,
- int bitrate, @C.SelectionFlags int selectionFlags, String language,
- int accessibilityChannel, DrmInitData drmInitData) {
+ int bitrate, @C.SelectionFlags int selectionFlags, String language, int accessibilityChannel,
+ DrmInitData drmInitData) {
return createTextSampleFormat(id, sampleMimeType, codecs, bitrate, selectionFlags, language,
- accessibilityChannel, drmInitData, OFFSET_SAMPLE_RELATIVE);
+ accessibilityChannel, drmInitData, OFFSET_SAMPLE_RELATIVE, Collections.emptyList());
}
public static Format createTextSampleFormat(String id, String sampleMimeType, String codecs,
int bitrate, @C.SelectionFlags int selectionFlags, String language, DrmInitData drmInitData,
long subsampleOffsetUs) {
return createTextSampleFormat(id, sampleMimeType, codecs, bitrate, selectionFlags, language,
- NO_VALUE, drmInitData, subsampleOffsetUs);
+ NO_VALUE, drmInitData, subsampleOffsetUs, Collections.emptyList());
}
public static Format createTextSampleFormat(String id, String sampleMimeType, String codecs,
int bitrate, @C.SelectionFlags int selectionFlags, String language,
- int accessibilityChannel, DrmInitData drmInitData, long subsampleOffsetUs) {
+ int accessibilityChannel, DrmInitData drmInitData, long subsampleOffsetUs,
+ List initializationData) {
return new Format(id, null, sampleMimeType, codecs, bitrate, NO_VALUE, NO_VALUE, NO_VALUE,
- NO_VALUE, NO_VALUE, NO_VALUE, null, NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE,
- NO_VALUE, selectionFlags, language, accessibilityChannel, subsampleOffsetUs, null,
- drmInitData, null);
+ NO_VALUE, NO_VALUE, NO_VALUE, null, NO_VALUE, null, NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE,
+ NO_VALUE, selectionFlags, language, accessibilityChannel, subsampleOffsetUs,
+ initializationData, drmInitData, null);
}
// Image.
@@ -316,32 +321,41 @@ public final class Format implements Parcelable {
public static Format createImageSampleFormat(String id, String sampleMimeType, String codecs,
int bitrate, List initializationData, String language, DrmInitData drmInitData) {
return new Format(id, null, sampleMimeType, codecs, bitrate, NO_VALUE, NO_VALUE, NO_VALUE,
- NO_VALUE, NO_VALUE, NO_VALUE, null, NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE,
+ NO_VALUE, NO_VALUE, NO_VALUE, null, NO_VALUE, null, NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE,
NO_VALUE, 0, language, NO_VALUE, OFFSET_SAMPLE_RELATIVE, initializationData, drmInitData,
null);
}
// Generic.
- public static Format createContainerFormat(String id, String containerMimeType, String codecs,
- String sampleMimeType, int bitrate) {
+ public static Format createContainerFormat(String id, String containerMimeType,
+ String sampleMimeType, String codecs, int bitrate, @C.SelectionFlags int selectionFlags,
+ String language) {
return new Format(id, containerMimeType, sampleMimeType, codecs, bitrate, NO_VALUE, NO_VALUE,
- NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE, null, NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE,
- NO_VALUE, NO_VALUE, 0, null, NO_VALUE, OFFSET_SAMPLE_RELATIVE, null, null, null);
+ NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE, null, NO_VALUE, null, NO_VALUE, NO_VALUE, NO_VALUE,
+ NO_VALUE, NO_VALUE, selectionFlags, language, NO_VALUE, OFFSET_SAMPLE_RELATIVE, null, null,
+ null);
+ }
+
+ public static Format createSampleFormat(String id, String sampleMimeType,
+ long subsampleOffsetUs) {
+ return new Format(id, null, sampleMimeType, null, NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE,
+ NO_VALUE, NO_VALUE, NO_VALUE, null, NO_VALUE, null, NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE,
+ NO_VALUE, 0, null, NO_VALUE, subsampleOffsetUs, null, null, null);
}
public static Format createSampleFormat(String id, String sampleMimeType, String codecs,
int bitrate, DrmInitData drmInitData) {
return new Format(id, null, sampleMimeType, codecs, bitrate, NO_VALUE, NO_VALUE, NO_VALUE,
- NO_VALUE, NO_VALUE, NO_VALUE, null, NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE,
+ NO_VALUE, NO_VALUE, NO_VALUE, null, NO_VALUE, null, NO_VALUE, NO_VALUE, NO_VALUE, NO_VALUE,
NO_VALUE, 0, null, NO_VALUE, OFFSET_SAMPLE_RELATIVE, null, drmInitData, null);
}
/* package */ Format(String id, String containerMimeType, String sampleMimeType, String codecs,
int bitrate, int maxInputSize, int width, int height, float frameRate, int rotationDegrees,
float pixelWidthHeightRatio, byte[] projectionData, @C.StereoMode int stereoMode,
- int channelCount, int sampleRate, @C.PcmEncoding int pcmEncoding, int encoderDelay,
- int encoderPadding, @C.SelectionFlags int selectionFlags, String language,
+ ColorInfo colorInfo, int channelCount, int sampleRate, @C.PcmEncoding int pcmEncoding,
+ int encoderDelay, int encoderPadding, @C.SelectionFlags int selectionFlags, String language,
int accessibilityChannel, long subsampleOffsetUs, List initializationData,
DrmInitData drmInitData, Metadata metadata) {
this.id = id;
@@ -357,6 +371,7 @@ public final class Format implements Parcelable {
this.pixelWidthHeightRatio = pixelWidthHeightRatio;
this.projectionData = projectionData;
this.stereoMode = stereoMode;
+ this.colorInfo = colorInfo;
this.channelCount = channelCount;
this.sampleRate = sampleRate;
this.pcmEncoding = pcmEncoding;
@@ -388,6 +403,7 @@ public final class Format implements Parcelable {
boolean hasProjectionData = in.readInt() != 0;
projectionData = hasProjectionData ? in.createByteArray() : null;
stereoMode = in.readInt();
+ colorInfo = in.readParcelable(ColorInfo.class.getClassLoader());
channelCount = in.readInt();
sampleRate = in.readInt();
pcmEncoding = in.readInt();
@@ -409,67 +425,71 @@ public final class Format implements Parcelable {
public Format copyWithMaxInputSize(int maxInputSize) {
return new Format(id, containerMimeType, sampleMimeType, codecs, bitrate, maxInputSize,
width, height, frameRate, rotationDegrees, pixelWidthHeightRatio, projectionData,
- stereoMode, channelCount, sampleRate, pcmEncoding, encoderDelay, encoderPadding,
- selectionFlags, language, accessibilityChannel, subsampleOffsetUs, initializationData,
- drmInitData, metadata);
+ stereoMode, colorInfo, channelCount, sampleRate, pcmEncoding, encoderDelay,
+ encoderPadding, selectionFlags, language, accessibilityChannel, subsampleOffsetUs,
+ initializationData, drmInitData, metadata);
}
public Format copyWithSubsampleOffsetUs(long subsampleOffsetUs) {
return new Format(id, containerMimeType, sampleMimeType, codecs, bitrate, maxInputSize,
width, height, frameRate, rotationDegrees, pixelWidthHeightRatio, projectionData,
- stereoMode, channelCount, sampleRate, pcmEncoding, encoderDelay, encoderPadding,
- selectionFlags, language, accessibilityChannel, subsampleOffsetUs, initializationData,
- drmInitData, metadata);
+ stereoMode, colorInfo, channelCount, sampleRate, pcmEncoding, encoderDelay,
+ encoderPadding, selectionFlags, language, accessibilityChannel, subsampleOffsetUs,
+ initializationData, drmInitData, metadata);
}
public Format copyWithContainerInfo(String id, String codecs, int bitrate, int width, int height,
@C.SelectionFlags int selectionFlags, String language) {
return new Format(id, containerMimeType, sampleMimeType, codecs, bitrate, maxInputSize,
width, height, frameRate, rotationDegrees, pixelWidthHeightRatio, projectionData,
- stereoMode, channelCount, sampleRate, pcmEncoding, encoderDelay, encoderPadding,
- selectionFlags, language, accessibilityChannel, subsampleOffsetUs, initializationData,
- drmInitData, metadata);
+ stereoMode, colorInfo, channelCount, sampleRate, pcmEncoding, encoderDelay,
+ encoderPadding, selectionFlags, language, accessibilityChannel, subsampleOffsetUs,
+ initializationData, drmInitData, metadata);
}
- public Format copyWithManifestFormatInfo(Format manifestFormat,
- boolean preferManifestDrmInitData) {
+ @SuppressWarnings("ReferenceEquality")
+ public Format copyWithManifestFormatInfo(Format manifestFormat) {
+ if (this == manifestFormat) {
+ // No need to copy from ourselves.
+ return this;
+ }
String id = manifestFormat.id;
String codecs = this.codecs == null ? manifestFormat.codecs : this.codecs;
int bitrate = this.bitrate == NO_VALUE ? manifestFormat.bitrate : this.bitrate;
float frameRate = this.frameRate == NO_VALUE ? manifestFormat.frameRate : this.frameRate;
@C.SelectionFlags int selectionFlags = this.selectionFlags | manifestFormat.selectionFlags;
String language = this.language == null ? manifestFormat.language : this.language;
- DrmInitData drmInitData = (preferManifestDrmInitData && manifestFormat.drmInitData != null)
- || this.drmInitData == null ? manifestFormat.drmInitData : this.drmInitData;
+ DrmInitData drmInitData = manifestFormat.drmInitData != null ? manifestFormat.drmInitData
+ : this.drmInitData;
return new Format(id, containerMimeType, sampleMimeType, codecs, bitrate, maxInputSize, width,
height, frameRate, rotationDegrees, pixelWidthHeightRatio, projectionData, stereoMode,
- channelCount, sampleRate, pcmEncoding, encoderDelay, encoderPadding, selectionFlags,
- language, accessibilityChannel, subsampleOffsetUs, initializationData, drmInitData,
- metadata);
+ colorInfo, channelCount, sampleRate, pcmEncoding, encoderDelay, encoderPadding,
+ selectionFlags, language, accessibilityChannel, subsampleOffsetUs, initializationData,
+ drmInitData, metadata);
}
public Format copyWithGaplessInfo(int encoderDelay, int encoderPadding) {
return new Format(id, containerMimeType, sampleMimeType, codecs, bitrate, maxInputSize,
width, height, frameRate, rotationDegrees, pixelWidthHeightRatio, projectionData,
- stereoMode, channelCount, sampleRate, pcmEncoding, encoderDelay, encoderPadding,
- selectionFlags, language, accessibilityChannel, subsampleOffsetUs, initializationData,
- drmInitData, metadata);
+ stereoMode, colorInfo, channelCount, sampleRate, pcmEncoding, encoderDelay,
+ encoderPadding, selectionFlags, language, accessibilityChannel, subsampleOffsetUs,
+ initializationData, drmInitData, metadata);
}
public Format copyWithDrmInitData(DrmInitData drmInitData) {
return new Format(id, containerMimeType, sampleMimeType, codecs, bitrate, maxInputSize,
width, height, frameRate, rotationDegrees, pixelWidthHeightRatio, projectionData,
- stereoMode, channelCount, sampleRate, pcmEncoding, encoderDelay, encoderPadding,
- selectionFlags, language, accessibilityChannel, subsampleOffsetUs, initializationData,
- drmInitData, metadata);
+ stereoMode, colorInfo, channelCount, sampleRate, pcmEncoding, encoderDelay,
+ encoderPadding, selectionFlags, language, accessibilityChannel, subsampleOffsetUs,
+ initializationData, drmInitData, metadata);
}
public Format copyWithMetadata(Metadata metadata) {
return new Format(id, containerMimeType, sampleMimeType, codecs, bitrate, maxInputSize,
width, height, frameRate, rotationDegrees, pixelWidthHeightRatio, projectionData,
- stereoMode, channelCount, sampleRate, pcmEncoding, encoderDelay, encoderPadding,
- selectionFlags, language, accessibilityChannel, subsampleOffsetUs, initializationData,
- drmInitData, metadata);
+ stereoMode, colorInfo, channelCount, sampleRate, pcmEncoding, encoderDelay,
+ encoderPadding, selectionFlags, language, accessibilityChannel, subsampleOffsetUs,
+ initializationData, drmInitData, metadata);
}
/**
@@ -486,31 +506,29 @@ public final class Format implements Parcelable {
@SuppressLint("InlinedApi")
@TargetApi(16)
public final MediaFormat getFrameworkMediaFormatV16() {
- if (frameworkMediaFormat == null) {
- MediaFormat format = new MediaFormat();
- format.setString(MediaFormat.KEY_MIME, sampleMimeType);
- maybeSetStringV16(format, MediaFormat.KEY_LANGUAGE, language);
- maybeSetIntegerV16(format, MediaFormat.KEY_MAX_INPUT_SIZE, maxInputSize);
- maybeSetIntegerV16(format, MediaFormat.KEY_WIDTH, width);
- maybeSetIntegerV16(format, MediaFormat.KEY_HEIGHT, height);
- maybeSetFloatV16(format, MediaFormat.KEY_FRAME_RATE, frameRate);
- maybeSetIntegerV16(format, "rotation-degrees", rotationDegrees);
- maybeSetIntegerV16(format, MediaFormat.KEY_CHANNEL_COUNT, channelCount);
- maybeSetIntegerV16(format, MediaFormat.KEY_SAMPLE_RATE, sampleRate);
- maybeSetIntegerV16(format, "encoder-delay", encoderDelay);
- maybeSetIntegerV16(format, "encoder-padding", encoderPadding);
- for (int i = 0; i < initializationData.size(); i++) {
- format.setByteBuffer("csd-" + i, ByteBuffer.wrap(initializationData.get(i)));
- }
- frameworkMediaFormat = format;
+ MediaFormat format = new MediaFormat();
+ format.setString(MediaFormat.KEY_MIME, sampleMimeType);
+ maybeSetStringV16(format, MediaFormat.KEY_LANGUAGE, language);
+ maybeSetIntegerV16(format, MediaFormat.KEY_MAX_INPUT_SIZE, maxInputSize);
+ maybeSetIntegerV16(format, MediaFormat.KEY_WIDTH, width);
+ maybeSetIntegerV16(format, MediaFormat.KEY_HEIGHT, height);
+ maybeSetFloatV16(format, MediaFormat.KEY_FRAME_RATE, frameRate);
+ maybeSetIntegerV16(format, "rotation-degrees", rotationDegrees);
+ maybeSetIntegerV16(format, MediaFormat.KEY_CHANNEL_COUNT, channelCount);
+ maybeSetIntegerV16(format, MediaFormat.KEY_SAMPLE_RATE, sampleRate);
+ maybeSetIntegerV16(format, "encoder-delay", encoderDelay);
+ maybeSetIntegerV16(format, "encoder-padding", encoderPadding);
+ for (int i = 0; i < initializationData.size(); i++) {
+ format.setByteBuffer("csd-" + i, ByteBuffer.wrap(initializationData.get(i)));
}
- return frameworkMediaFormat;
+ maybeSetColorInfoV24(format, colorInfo);
+ return format;
}
@Override
public String toString() {
return "Format(" + id + ", " + containerMimeType + ", " + sampleMimeType + ", " + bitrate + ", "
- + ", " + language + ", [" + width + ", " + height + ", " + frameRate + "]"
+ + language + ", [" + width + ", " + height + ", " + frameRate + "]"
+ ", [" + channelCount + ", " + sampleRate + "])";
}
@@ -560,6 +578,7 @@ public final class Format implements Parcelable {
|| !Util.areEqual(codecs, other.codecs)
|| !Util.areEqual(drmInitData, other.drmInitData)
|| !Util.areEqual(metadata, other.metadata)
+ || !Util.areEqual(colorInfo, other.colorInfo)
|| !Arrays.equals(projectionData, other.projectionData)
|| initializationData.size() != other.initializationData.size()) {
return false;
@@ -572,6 +591,17 @@ public final class Format implements Parcelable {
return true;
}
+ @TargetApi(24)
+ private static void maybeSetColorInfoV24(MediaFormat format, ColorInfo colorInfo) {
+ if (colorInfo == null) {
+ return;
+ }
+ maybeSetIntegerV16(format, MediaFormat.KEY_COLOR_TRANSFER, colorInfo.colorTransfer);
+ maybeSetIntegerV16(format, MediaFormat.KEY_COLOR_STANDARD, colorInfo.colorSpace);
+ maybeSetIntegerV16(format, MediaFormat.KEY_COLOR_RANGE, colorInfo.colorRange);
+ maybeSetByteBufferV16(format, MediaFormat.KEY_HDR_STATIC_INFO, colorInfo.hdrStaticInfo);
+ }
+
@TargetApi(16)
private static void maybeSetStringV16(MediaFormat format, String key, String value) {
if (value != null) {
@@ -593,6 +623,45 @@ public final class Format implements Parcelable {
}
}
+ @TargetApi(16)
+ private static void maybeSetByteBufferV16(MediaFormat format, String key, byte[] value) {
+ if (value != null) {
+ format.setByteBuffer(key, ByteBuffer.wrap(value));
+ }
+ }
+
+ // Utility methods
+
+ /**
+ * Returns a prettier {@link String} than {@link #toString()}, intended for logging.
+ */
+ public static String toLogString(Format format) {
+ if (format == null) {
+ return "null";
+ }
+ StringBuilder builder = new StringBuilder();
+ builder.append("id=").append(format.id).append(", mimeType=").append(format.sampleMimeType);
+ if (format.bitrate != Format.NO_VALUE) {
+ builder.append(", bitrate=").append(format.bitrate);
+ }
+ if (format.width != Format.NO_VALUE && format.height != Format.NO_VALUE) {
+ builder.append(", res=").append(format.width).append("x").append(format.height);
+ }
+ if (format.frameRate != Format.NO_VALUE) {
+ builder.append(", fps=").append(format.frameRate);
+ }
+ if (format.channelCount != Format.NO_VALUE) {
+ builder.append(", channels=").append(format.channelCount);
+ }
+ if (format.sampleRate != Format.NO_VALUE) {
+ builder.append(", sample_rate=").append(format.sampleRate);
+ }
+ if (format.language != null) {
+ builder.append(", language=").append(format.language);
+ }
+ return builder.toString();
+ }
+
// Parcelable implementation.
@Override
@@ -618,6 +687,7 @@ public final class Format implements Parcelable {
dest.writeByteArray(projectionData);
}
dest.writeInt(stereoMode);
+ dest.writeParcelable(colorInfo, flags);
dest.writeInt(channelCount);
dest.writeInt(sampleRate);
dest.writeInt(pcmEncoding);
@@ -636,9 +706,6 @@ public final class Format implements Parcelable {
dest.writeParcelable(metadata, 0);
}
- /**
- * {@link Creator} implementation.
- */
public static final Creator CREATOR = new Creator() {
@Override
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/IllegalSeekPositionException.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/IllegalSeekPositionException.java
new file mode 100755
index 000000000..b6de1d66b
--- /dev/null
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/IllegalSeekPositionException.java
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.telegram.messenger.exoplayer2;
+
+/**
+ * Thrown when an attempt is made to seek to a position that does not exist in the player's
+ * {@link Timeline}.
+ */
+public final class IllegalSeekPositionException extends IllegalStateException {
+
+ /**
+ * The {@link Timeline} in which the seek was attempted.
+ */
+ public final Timeline timeline;
+ /**
+ * The index of the window being seeked to.
+ */
+ public final int windowIndex;
+ /**
+ * The seek position in the specified window.
+ */
+ public final long positionMs;
+
+ /**
+ * @param timeline The {@link Timeline} in which the seek was attempted.
+ * @param windowIndex The index of the window being seeked to.
+ * @param positionMs The seek position in the specified window.
+ */
+ public IllegalSeekPositionException(Timeline timeline, int windowIndex, long positionMs) {
+ this.timeline = timeline;
+ this.windowIndex = windowIndex;
+ this.positionMs = positionMs;
+ }
+
+}
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/PlaybackParameters.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/PlaybackParameters.java
new file mode 100755
index 000000000..511820a97
--- /dev/null
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/PlaybackParameters.java
@@ -0,0 +1,83 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.telegram.messenger.exoplayer2;
+
+/**
+ * The parameters that apply to playback.
+ */
+public final class PlaybackParameters {
+
+ /**
+ * The default playback parameters: real-time playback with no pitch modification.
+ */
+ public static final PlaybackParameters DEFAULT = new PlaybackParameters(1f, 1f);
+
+ /**
+ * The factor by which playback will be sped up.
+ */
+ public final float speed;
+
+ /**
+ * The factor by which the audio pitch will be scaled.
+ */
+ public final float pitch;
+
+ private final int scaledUsPerMs;
+
+ /**
+ * Creates new playback parameters.
+ *
+ * @param speed The factor by which playback will be sped up.
+ * @param pitch The factor by which the audio pitch will be scaled.
+ */
+ public PlaybackParameters(float speed, float pitch) {
+ this.speed = speed;
+ this.pitch = pitch;
+ scaledUsPerMs = Math.round(speed * 1000f);
+ }
+
+ /**
+ * Scales the millisecond duration {@code timeMs} by the playback speed, returning the result in
+ * microseconds.
+ *
+ * @param timeMs The time to scale, in milliseconds.
+ * @return The scaled time, in microseconds.
+ */
+ public long getSpeedAdjustedDurationUs(long timeMs) {
+ return timeMs * scaledUsPerMs;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+ PlaybackParameters other = (PlaybackParameters) obj;
+ return this.speed == other.speed && this.pitch == other.pitch;
+ }
+
+ @Override
+ public int hashCode() {
+ int result = 17;
+ result = 31 * result + Float.floatToRawIntBits(speed);
+ result = 31 * result + Float.floatToRawIntBits(pitch);
+ return result;
+ }
+
+}
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/Renderer.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/Renderer.java
index 2c244451b..6b1fc261c 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/Renderer.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/Renderer.java
@@ -92,6 +92,7 @@ public interface Renderer extends ExoPlayerComponent {
* This method may be called when the renderer is in the following states:
* {@link #STATE_DISABLED}.
*
+ * @param configuration The renderer configuration.
* @param formats The enabled formats.
* @param stream The {@link SampleStream} from which the renderer should consume.
* @param positionUs The player's current position.
@@ -100,8 +101,8 @@ public interface Renderer extends ExoPlayerComponent {
* before they are rendered.
* @throws ExoPlaybackException If an error occurs.
*/
- void enable(Format[] formats, SampleStream stream, long positionUs, boolean joining,
- long offsetUs) throws ExoPlaybackException;
+ void enable(RendererConfiguration configuration, Format[] formats, SampleStream stream,
+ long positionUs, boolean joining, long offsetUs) throws ExoPlaybackException;
/**
* Starts the renderer, meaning that calls to {@link #render(long, long)} will cause media to be
@@ -149,7 +150,13 @@ public interface Renderer extends ExoPlayerComponent {
* This method may be called when the renderer is in the following states:
* {@link #STATE_ENABLED}, {@link #STATE_STARTED}.
*/
- void setCurrentStreamIsFinal();
+ void setCurrentStreamFinal();
+
+ /**
+ * Returns whether the current {@link SampleStream} will be the final one supplied before the
+ * renderer is next disabled or reset.
+ */
+ boolean isCurrentStreamFinal();
/**
* Throws an error that's preventing the renderer from reading from its {@link SampleStream}. Does
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/RendererCapabilities.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/RendererCapabilities.java
index 572968c2f..b4f25651d 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/RendererCapabilities.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/RendererCapabilities.java
@@ -79,6 +79,20 @@ public interface RendererCapabilities {
*/
int ADAPTIVE_NOT_SUPPORTED = 0b0000;
+ /**
+ * A mask to apply to the result of {@link #supportsFormat(Format)} to obtain one of
+ * {@link #TUNNELING_SUPPORTED} and {@link #TUNNELING_NOT_SUPPORTED}.
+ */
+ int TUNNELING_SUPPORT_MASK = 0b10000;
+ /**
+ * The {@link Renderer} supports tunneled output.
+ */
+ int TUNNELING_SUPPORTED = 0b10000;
+ /**
+ * The {@link Renderer} does not support tunneled output.
+ */
+ int TUNNELING_NOT_SUPPORTED = 0b00000;
+
/**
* Returns the track type that the {@link Renderer} handles. For example, a video renderer will
* return {@link C#TRACK_TYPE_VIDEO}, an audio renderer will return {@link C#TRACK_TYPE_AUDIO}, a
@@ -91,7 +105,7 @@ public interface RendererCapabilities {
/**
* Returns the extent to which the {@link Renderer} supports a given format. The returned value is
- * the bitwise OR of two properties:
+ * the bitwise OR of three properties:
*
* - The level of support for the format itself. One of {@link #FORMAT_HANDLED},
* {@link #FORMAT_EXCEEDS_CAPABILITIES}, {@link #FORMAT_UNSUPPORTED_SUBTYPE} and
@@ -99,9 +113,12 @@ public interface RendererCapabilities {
*
- The level of support for adapting from the format to another format of the same mime type.
* One of {@link #ADAPTIVE_SEAMLESS}, {@link #ADAPTIVE_NOT_SEAMLESS} and
* {@link #ADAPTIVE_NOT_SUPPORTED}.
+ * - The level of support for tunneling. One of {@link #TUNNELING_SUPPORTED} and
+ * {@link #TUNNELING_NOT_SUPPORTED}.
*
* The individual properties can be retrieved by performing a bitwise AND with
- * {@link #FORMAT_SUPPORT_MASK} and {@link #ADAPTIVE_SUPPORT_MASK} respectively.
+ * {@link #FORMAT_SUPPORT_MASK}, {@link #ADAPTIVE_SUPPORT_MASK} and
+ * {@link #TUNNELING_SUPPORT_MASK} respectively.
*
* @param format The format.
* @return The extent to which the renderer is capable of supporting the given format.
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/RendererConfiguration.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/RendererConfiguration.java
new file mode 100755
index 000000000..2c46a8031
--- /dev/null
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/RendererConfiguration.java
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.telegram.messenger.exoplayer2;
+
+/**
+ * The configuration of a {@link Renderer}.
+ */
+public final class RendererConfiguration {
+
+ /**
+ * The default configuration.
+ */
+ public static final RendererConfiguration DEFAULT =
+ new RendererConfiguration(C.AUDIO_SESSION_ID_UNSET);
+
+ /**
+ * The audio session id to use for tunneling, or {@link C#AUDIO_SESSION_ID_UNSET} if tunneling
+ * should not be enabled.
+ */
+ public final int tunnelingAudioSessionId;
+
+ /**
+ * @param tunnelingAudioSessionId The audio session id to use for tunneling, or
+ * {@link C#AUDIO_SESSION_ID_UNSET} if tunneling should not be enabled.
+ */
+ public RendererConfiguration(int tunnelingAudioSessionId) {
+ this.tunnelingAudioSessionId = tunnelingAudioSessionId;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+ RendererConfiguration other = (RendererConfiguration) obj;
+ return tunnelingAudioSessionId == other.tunnelingAudioSessionId;
+ }
+
+ @Override
+ public int hashCode() {
+ return tunnelingAudioSessionId;
+ }
+
+}
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/RenderersFactory.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/RenderersFactory.java
new file mode 100755
index 000000000..b134d7eab
--- /dev/null
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/RenderersFactory.java
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.telegram.messenger.exoplayer2;
+
+import android.os.Handler;
+import org.telegram.messenger.exoplayer2.audio.AudioRendererEventListener;
+import org.telegram.messenger.exoplayer2.metadata.MetadataRenderer;
+import org.telegram.messenger.exoplayer2.text.TextRenderer;
+import org.telegram.messenger.exoplayer2.video.VideoRendererEventListener;
+
+/**
+ * Builds {@link Renderer} instances for use by a {@link SimpleExoPlayer}.
+ */
+public interface RenderersFactory {
+
+ /**
+ * Builds the {@link Renderer} instances for a {@link SimpleExoPlayer}.
+ *
+ * @param eventHandler A handler to use when invoking event listeners and outputs.
+ * @param videoRendererEventListener An event listener for video renderers.
+ * @param videoRendererEventListener An event listener for audio renderers.
+ * @param textRendererOutput An output for text renderers.
+ * @param metadataRendererOutput An output for metadata renderers.
+ * @return The {@link Renderer instances}.
+ */
+ Renderer[] createRenderers(Handler eventHandler,
+ VideoRendererEventListener videoRendererEventListener,
+ AudioRendererEventListener audioRendererEventListener,
+ TextRenderer.Output textRendererOutput, MetadataRenderer.Output metadataRendererOutput);
+
+}
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/SimpleExoPlayer.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/SimpleExoPlayer.java
index e8d9163b4..7755a54c3 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/SimpleExoPlayer.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/SimpleExoPlayer.java
@@ -16,40 +16,27 @@
package org.telegram.messenger.exoplayer2;
import android.annotation.TargetApi;
-import android.content.Context;
import android.graphics.SurfaceTexture;
import android.media.MediaCodec;
import android.media.PlaybackParams;
import android.os.Handler;
-import android.support.annotation.IntDef;
+import android.support.annotation.Nullable;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.TextureView;
-import org.telegram.messenger.exoplayer2.audio.AudioCapabilities;
import org.telegram.messenger.exoplayer2.audio.AudioRendererEventListener;
-import org.telegram.messenger.exoplayer2.audio.AudioTrack;
-import org.telegram.messenger.exoplayer2.audio.MediaCodecAudioRenderer;
import org.telegram.messenger.exoplayer2.decoder.DecoderCounters;
-import org.telegram.messenger.exoplayer2.drm.DrmSessionManager;
-import org.telegram.messenger.exoplayer2.drm.FrameworkMediaCrypto;
-import org.telegram.messenger.exoplayer2.mediacodec.MediaCodecSelector;
import org.telegram.messenger.exoplayer2.metadata.Metadata;
import org.telegram.messenger.exoplayer2.metadata.MetadataRenderer;
-import org.telegram.messenger.exoplayer2.metadata.id3.Id3Decoder;
import org.telegram.messenger.exoplayer2.source.MediaSource;
import org.telegram.messenger.exoplayer2.source.TrackGroupArray;
import org.telegram.messenger.exoplayer2.text.Cue;
import org.telegram.messenger.exoplayer2.text.TextRenderer;
import org.telegram.messenger.exoplayer2.trackselection.TrackSelectionArray;
import org.telegram.messenger.exoplayer2.trackselection.TrackSelector;
-import org.telegram.messenger.exoplayer2.video.MediaCodecVideoRenderer;
import org.telegram.messenger.exoplayer2.video.VideoRendererEventListener;
-import java.lang.annotation.Retention;
-import java.lang.annotation.RetentionPolicy;
-import java.lang.reflect.Constructor;
-import java.util.ArrayList;
import java.util.List;
/**
@@ -93,38 +80,12 @@ public class SimpleExoPlayer implements ExoPlayer {
void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture);
}
- /**
- * Modes for using extension renderers.
- */
- @Retention(RetentionPolicy.SOURCE)
- @IntDef({EXTENSION_RENDERER_MODE_OFF, EXTENSION_RENDERER_MODE_ON, EXTENSION_RENDERER_MODE_PREFER})
- public @interface ExtensionRendererMode {}
- /**
- * Do not allow use of extension renderers.
- */
- public static final int EXTENSION_RENDERER_MODE_OFF = 0;
- /**
- * Allow use of extension renderers. Extension renderers are indexed after core renderers of the
- * same type. A {@link TrackSelector} that prefers the first suitable renderer will therefore
- * prefer to use a core renderer to an extension renderer in the case that both are able to play
- * a given track.
- */
- public static final int EXTENSION_RENDERER_MODE_ON = 1;
- /**
- * Allow use of extension renderers. Extension renderers are indexed before core renderers of the
- * same type. A {@link TrackSelector} that prefers the first suitable renderer will therefore
- * prefer to use an extension renderer to a core renderer in the case that both are able to play
- * a given track.
- */
- public static final int EXTENSION_RENDERER_MODE_PREFER = 2;
-
private static final String TAG = "SimpleExoPlayer";
- protected static final int MAX_DROPPED_VIDEO_FRAME_COUNT_TO_NOTIFY = 50;
+
+ protected final Renderer[] renderers;
private final ExoPlayer player;
- private final Renderer[] renderers;
private final ComponentListener componentListener;
- private final Handler mainHandler;
private final int videoRendererCount;
private final int audioRendererCount;
@@ -150,19 +111,12 @@ public class SimpleExoPlayer implements ExoPlayer {
@C.StreamType
private int audioStreamType;
private float audioVolume;
- private PlaybackParamsHolder playbackParamsHolder;
- protected SimpleExoPlayer(Context context, TrackSelector trackSelector, LoadControl loadControl,
- DrmSessionManager drmSessionManager,
- @ExtensionRendererMode int extensionRendererMode, long allowedVideoJoiningTimeMs) {
- mainHandler = new Handler();
+ protected SimpleExoPlayer(RenderersFactory renderersFactory, TrackSelector trackSelector,
+ LoadControl loadControl) {
componentListener = new ComponentListener();
-
- // Build the renderers.
- ArrayList renderersList = new ArrayList<>();
- buildRenderers(context, mainHandler, drmSessionManager, extensionRendererMode,
- allowedVideoJoiningTimeMs, renderersList);
- renderers = renderersList.toArray(new Renderer[renderersList.size()]);
+ renderers = renderersFactory.createRenderers(new Handler(), componentListener,
+ componentListener, componentListener, componentListener);
// Obtain counts of video and audio renderers.
int videoRendererCount = 0;
@@ -182,7 +136,7 @@ public class SimpleExoPlayer implements ExoPlayer {
// Set initial values.
audioVolume = 1;
- audioSessionId = AudioTrack.SESSION_ID_NOT_SET;
+ audioSessionId = C.AUDIO_SESSION_ID_UNSET;
audioStreamType = C.STREAM_TYPE_DEFAULT;
videoScalingMode = C.VIDEO_SCALING_MODE_DEFAULT;
@@ -244,6 +198,18 @@ public class SimpleExoPlayer implements ExoPlayer {
setVideoSurfaceInternal(surface, false);
}
+ /**
+ * Clears the {@link Surface} onto which video is being rendered if it matches the one passed.
+ * Else does nothing.
+ *
+ * @param surface The surface to clear.
+ */
+ public void clearVideoSurface(Surface surface) {
+ if (surface != null && surface == this.surface) {
+ setVideoSurface(null);
+ }
+ }
+
/**
* Sets the {@link SurfaceHolder} that holds the {@link Surface} onto which video will be
* rendered. The player will track the lifecycle of the surface automatically.
@@ -261,6 +227,18 @@ public class SimpleExoPlayer implements ExoPlayer {
}
}
+ /**
+ * Clears the {@link SurfaceHolder} that holds the {@link Surface} onto which video is being
+ * rendered if it matches the one passed. Else does nothing.
+ *
+ * @param surfaceHolder The surface holder to clear.
+ */
+ public void clearVideoSurfaceHolder(SurfaceHolder surfaceHolder) {
+ if (surfaceHolder != null && surfaceHolder == this.surfaceHolder) {
+ setVideoSurfaceHolder(null);
+ }
+ }
+
/**
* Sets the {@link SurfaceView} onto which video will be rendered. The player will track the
* lifecycle of the surface automatically.
@@ -268,7 +246,17 @@ public class SimpleExoPlayer implements ExoPlayer {
* @param surfaceView The surface view.
*/
public void setVideoSurfaceView(SurfaceView surfaceView) {
- setVideoSurfaceHolder(surfaceView.getHolder());
+ setVideoSurfaceHolder(surfaceView == null ? null : surfaceView.getHolder());
+ }
+
+ /**
+ * Clears the {@link SurfaceView} onto which video is being rendered if it matches the one passed.
+ * Else does nothing.
+ *
+ * @param surfaceView The texture view to clear.
+ */
+ public void clearVideoSurfaceView(SurfaceView surfaceView) {
+ clearVideoSurfaceHolder(surfaceView == null ? null : surfaceView.getHolder());
}
/**
@@ -277,9 +265,13 @@ public class SimpleExoPlayer implements ExoPlayer {
*
* @param textureView The texture view.
*/
- public ComponentListener setVideoTextureView(TextureView textureView) {
+ public void setVideoTextureView(TextureView textureView) {
+ if (this.textureView == textureView) {
+ return;
+ }
removeSurfaceCallbacks();
this.textureView = textureView;
+ needSetSurface = true;
if (textureView == null) {
setVideoSurfaceInternal(null, true);
} else {
@@ -288,13 +280,20 @@ public class SimpleExoPlayer implements ExoPlayer {
}
SurfaceTexture surfaceTexture = textureView.getSurfaceTexture();
setVideoSurfaceInternal(surfaceTexture == null ? null : new Surface(surfaceTexture), true);
- if (surfaceTexture != null) {
- needSetSurface = false;
- }
-
textureView.setSurfaceTextureListener(componentListener);
}
- return componentListener;
+ }
+
+ /**
+ * Clears the {@link TextureView} onto which video is being rendered if it matches the one passed.
+ * Else does nothing.
+ *
+ * @param textureView The texture view to clear.
+ */
+ public void clearVideoTextureView(TextureView textureView) {
+ if (textureView != null && textureView == this.textureView) {
+ setVideoTextureView(null);
+ }
}
/**
@@ -354,37 +353,20 @@ public class SimpleExoPlayer implements ExoPlayer {
/**
* Sets the {@link PlaybackParams} governing audio playback.
*
+ * @deprecated Use {@link #setPlaybackParameters(PlaybackParameters)}.
* @param params The {@link PlaybackParams}, or null to clear any previously set parameters.
*/
+ @Deprecated
@TargetApi(23)
- public void setPlaybackParams(PlaybackParams params) {
+ public void setPlaybackParams(@Nullable PlaybackParams params) {
+ PlaybackParameters playbackParameters;
if (params != null) {
- // The audio renderers will call this on the playback thread to ensure they can query
- // parameters without failure. We do the same up front, which is redundant except that it
- // ensures an immediate call to getPlaybackParams will retrieve the instance with defaults
- // allowed, rather than this change becoming visible sometime later once the audio renderers
- // receive the parameters.
params.allowDefaults();
- playbackParamsHolder = new PlaybackParamsHolder(params);
+ playbackParameters = new PlaybackParameters(params.getSpeed(), params.getPitch());
} else {
- playbackParamsHolder = null;
+ playbackParameters = null;
}
- ExoPlayerMessage[] messages = new ExoPlayerMessage[audioRendererCount];
- int count = 0;
- for (Renderer renderer : renderers) {
- if (renderer.getTrackType() == C.TRACK_TYPE_AUDIO) {
- messages[count++] = new ExoPlayerMessage(renderer, C.MSG_SET_PLAYBACK_PARAMS, params);
- }
- }
- player.sendMessages(messages);
- }
-
- /**
- * Returns the {@link PlaybackParams} governing audio playback, or null if not set.
- */
- @TargetApi(23)
- public PlaybackParams getPlaybackParams() {
- return playbackParamsHolder == null ? null : playbackParamsHolder.params;
+ setPlaybackParameters(playbackParameters);
}
/**
@@ -402,7 +384,7 @@ public class SimpleExoPlayer implements ExoPlayer {
}
/**
- * Returns the audio session identifier, or {@code AudioTrack.SESSION_ID_NOT_SET} if not set.
+ * Returns the audio session identifier, or {@link C#AUDIO_SESSION_ID_UNSET} if not set.
*/
public int getAudioSessionId() {
return audioSessionId;
@@ -431,6 +413,57 @@ public class SimpleExoPlayer implements ExoPlayer {
videoListener = listener;
}
+ /**
+ * Clears the listener receiving video events if it matches the one passed. Else does nothing.
+ *
+ * @param listener The listener to clear.
+ */
+ public void clearVideoListener(VideoListener listener) {
+ if (videoListener == listener) {
+ videoListener = null;
+ }
+ }
+
+ /**
+ * Sets an output to receive text events.
+ *
+ * @param output The output.
+ */
+ public void setTextOutput(TextRenderer.Output output) {
+ textOutput = output;
+ }
+
+ /**
+ * Clears the output receiving text events if it matches the one passed. Else does nothing.
+ *
+ * @param output The output to clear.
+ */
+ public void clearTextOutput(TextRenderer.Output output) {
+ if (textOutput == output) {
+ textOutput = null;
+ }
+ }
+
+ /**
+ * Sets a listener to receive metadata events.
+ *
+ * @param output The output.
+ */
+ public void setMetadataOutput(MetadataRenderer.Output output) {
+ metadataOutput = output;
+ }
+
+ /**
+ * Clears the output receiving metadata events if it matches the one passed. Else does nothing.
+ *
+ * @param output The output to clear.
+ */
+ public void clearMetadataOutput(MetadataRenderer.Output output) {
+ if (metadataOutput == output) {
+ metadataOutput = null;
+ }
+ }
+
/**
* Sets a listener to receive debug events from the video renderer.
*
@@ -449,33 +482,6 @@ public class SimpleExoPlayer implements ExoPlayer {
audioDebugListener = listener;
}
- /**
- * Sets an output to receive text events.
- *
- * @param output The output.
- */
- public void setTextOutput(TextRenderer.Output output) {
- textOutput = output;
- }
-
- /**
- * @deprecated Use {@link #setMetadataOutput(MetadataRenderer.Output)} instead.
- * @param output The output.
- */
- @Deprecated
- public void setId3Output(MetadataRenderer.Output output) {
- setMetadataOutput(output);
- }
-
- /**
- * Sets a listener to receive metadata events.
- *
- * @param output The output.
- */
- public void setMetadataOutput(MetadataRenderer.Output output) {
- metadataOutput = output;
- }
-
// ExoPlayer implementation
@Override
@@ -499,8 +505,8 @@ public class SimpleExoPlayer implements ExoPlayer {
}
@Override
- public void prepare(MediaSource mediaSource, boolean resetPosition, boolean resetTimeline) {
- player.prepare(mediaSource, resetPosition, resetTimeline);
+ public void prepare(MediaSource mediaSource, boolean resetPosition, boolean resetState) {
+ player.prepare(mediaSource, resetPosition, resetState);
}
@Override
@@ -538,6 +544,16 @@ public class SimpleExoPlayer implements ExoPlayer {
player.seekTo(windowIndex, positionMs);
}
+ @Override
+ public void setPlaybackParameters(PlaybackParameters playbackParameters) {
+ player.setPlaybackParameters(playbackParameters);
+ }
+
+ @Override
+ public PlaybackParameters getPlaybackParameters() {
+ return player.getPlaybackParameters();
+ }
+
@Override
public void stop() {
player.stop();
@@ -565,6 +581,36 @@ public class SimpleExoPlayer implements ExoPlayer {
player.blockingSendMessages(messages);
}
+ @Override
+ public int getRendererCount() {
+ return player.getRendererCount();
+ }
+
+ @Override
+ public int getRendererType(int index) {
+ return player.getRendererType(index);
+ }
+
+ @Override
+ public TrackGroupArray getCurrentTrackGroups() {
+ return player.getCurrentTrackGroups();
+ }
+
+ @Override
+ public TrackSelectionArray getCurrentTrackSelections() {
+ return player.getCurrentTrackSelections();
+ }
+
+ @Override
+ public Timeline getCurrentTimeline() {
+ return player.getCurrentTimeline();
+ }
+
+ @Override
+ public Object getCurrentManifest() {
+ return player.getCurrentManifest();
+ }
+
@Override
public int getCurrentPeriodIndex() {
return player.getCurrentPeriodIndex();
@@ -596,205 +642,13 @@ public class SimpleExoPlayer implements ExoPlayer {
}
@Override
- public int getRendererCount() {
- return player.getRendererCount();
+ public boolean isCurrentWindowDynamic() {
+ return player.isCurrentWindowDynamic();
}
@Override
- public int getRendererType(int index) {
- return player.getRendererType(index);
- }
-
- @Override
- public TrackGroupArray getCurrentTrackGroups() {
- return player.getCurrentTrackGroups();
- }
-
- @Override
- public TrackSelectionArray getCurrentTrackSelections() {
- return player.getCurrentTrackSelections();
- }
-
- @Override
- public Timeline getCurrentTimeline() {
- return player.getCurrentTimeline();
- }
-
- @Override
- public Object getCurrentManifest() {
- return player.getCurrentManifest();
- }
-
- // Renderer building.
-
- private void buildRenderers(Context context, Handler mainHandler,
- DrmSessionManager drmSessionManager,
- @ExtensionRendererMode int extensionRendererMode, long allowedVideoJoiningTimeMs,
- ArrayList out) {
- buildVideoRenderers(context, mainHandler, drmSessionManager, extensionRendererMode,
- componentListener, allowedVideoJoiningTimeMs, out);
- buildAudioRenderers(context, mainHandler, drmSessionManager, extensionRendererMode,
- componentListener, out);
- buildTextRenderers(context, mainHandler, extensionRendererMode, componentListener, out);
- buildMetadataRenderers(context, mainHandler, extensionRendererMode, componentListener, out);
- buildMiscellaneousRenderers(context, mainHandler, extensionRendererMode, out);
- }
-
- /**
- * Builds video renderers for use by the player.
- *
- * @param context The {@link Context} associated with the player.
- * @param mainHandler A handler associated with the main thread's looper.
- * @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the player will
- * not be used for DRM protected playbacks.
- * @param extensionRendererMode The extension renderer mode.
- * @param eventListener An event listener.
- * @param allowedVideoJoiningTimeMs The maximum duration in milliseconds for which video renderers
- * can attempt to seamlessly join an ongoing playback.
- * @param out An array to which the built renderers should be appended.
- */
- protected void buildVideoRenderers(Context context, Handler mainHandler,
- DrmSessionManager drmSessionManager,
- @ExtensionRendererMode int extensionRendererMode, VideoRendererEventListener eventListener,
- long allowedVideoJoiningTimeMs, ArrayList out) {
- out.add(new MediaCodecVideoRenderer(context, MediaCodecSelector.DEFAULT,
- allowedVideoJoiningTimeMs, drmSessionManager, false, mainHandler, eventListener,
- MAX_DROPPED_VIDEO_FRAME_COUNT_TO_NOTIFY));
-
- if (extensionRendererMode == EXTENSION_RENDERER_MODE_OFF) {
- return;
- }
- int extensionRendererIndex = out.size();
- if (extensionRendererMode == EXTENSION_RENDERER_MODE_PREFER) {
- extensionRendererIndex--;
- }
-
- try {
- Class> clazz =
- Class.forName("org.telegram.messenger.exoplayer2.ext.vp9.LibvpxVideoRenderer");
- Constructor> constructor = clazz.getConstructor(boolean.class, long.class, Handler.class,
- VideoRendererEventListener.class, int.class);
- Renderer renderer = (Renderer) constructor.newInstance(true, allowedVideoJoiningTimeMs,
- mainHandler, componentListener, MAX_DROPPED_VIDEO_FRAME_COUNT_TO_NOTIFY);
- out.add(extensionRendererIndex++, renderer);
- Log.i(TAG, "Loaded LibvpxVideoRenderer.");
- } catch (ClassNotFoundException e) {
- // Expected if the app was built without the extension.
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
-
- /**
- * Builds audio renderers for use by the player.
- *
- * @param context The {@link Context} associated with the player.
- * @param mainHandler A handler associated with the main thread's looper.
- * @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the player will
- * not be used for DRM protected playbacks.
- * @param extensionRendererMode The extension renderer mode.
- * @param eventListener An event listener.
- * @param out An array to which the built renderers should be appended.
- */
- protected void buildAudioRenderers(Context context, Handler mainHandler,
- DrmSessionManager drmSessionManager,
- @ExtensionRendererMode int extensionRendererMode, AudioRendererEventListener eventListener,
- ArrayList out) {
- out.add(new MediaCodecAudioRenderer(MediaCodecSelector.DEFAULT, drmSessionManager, true,
- mainHandler, eventListener, AudioCapabilities.getCapabilities(context)));
-
- if (extensionRendererMode == EXTENSION_RENDERER_MODE_OFF) {
- return;
- }
- int extensionRendererIndex = out.size();
- if (extensionRendererMode == EXTENSION_RENDERER_MODE_PREFER) {
- extensionRendererIndex--;
- }
-
- try {
- Class> clazz =
- Class.forName("org.telegram.messenger.exoplayer2.ext.opus.LibopusAudioRenderer");
- Constructor> constructor = clazz.getConstructor(Handler.class,
- AudioRendererEventListener.class);
- Renderer renderer = (Renderer) constructor.newInstance(mainHandler, componentListener);
- out.add(extensionRendererIndex++, renderer);
- Log.i(TAG, "Loaded LibopusAudioRenderer.");
- } catch (ClassNotFoundException e) {
- // Expected if the app was built without the extension.
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
-
- try {
- Class> clazz =
- Class.forName("org.telegram.messenger.exoplayer2.ext.flac.LibflacAudioRenderer");
- Constructor> constructor = clazz.getConstructor(Handler.class,
- AudioRendererEventListener.class);
- Renderer renderer = (Renderer) constructor.newInstance(mainHandler, componentListener);
- out.add(extensionRendererIndex++, renderer);
- Log.i(TAG, "Loaded LibflacAudioRenderer.");
- } catch (ClassNotFoundException e) {
- // Expected if the app was built without the extension.
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
-
- try {
- Class> clazz =
- Class.forName("org.telegram.messenger.exoplayer2.ext.ffmpeg.FfmpegAudioRenderer");
- Constructor> constructor = clazz.getConstructor(Handler.class,
- AudioRendererEventListener.class);
- Renderer renderer = (Renderer) constructor.newInstance(mainHandler, componentListener);
- out.add(extensionRendererIndex++, renderer);
- Log.i(TAG, "Loaded FfmpegAudioRenderer.");
- } catch (ClassNotFoundException e) {
- // Expected if the app was built without the extension.
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
-
- /**
- * Builds text renderers for use by the player.
- *
- * @param context The {@link Context} associated with the player.
- * @param mainHandler A handler associated with the main thread's looper.
- * @param extensionRendererMode The extension renderer mode.
- * @param output An output for the renderers.
- * @param out An array to which the built renderers should be appended.
- */
- protected void buildTextRenderers(Context context, Handler mainHandler,
- @ExtensionRendererMode int extensionRendererMode, TextRenderer.Output output,
- ArrayList out) {
- out.add(new TextRenderer(output, mainHandler.getLooper()));
- }
-
- /**
- * Builds metadata renderers for use by the player.
- *
- * @param context The {@link Context} associated with the player.
- * @param mainHandler A handler associated with the main thread's looper.
- * @param extensionRendererMode The extension renderer mode.
- * @param output An output for the renderers.
- * @param out An array to which the built renderers should be appended.
- */
- protected void buildMetadataRenderers(Context context, Handler mainHandler,
- @ExtensionRendererMode int extensionRendererMode, MetadataRenderer.Output output,
- ArrayList out) {
- out.add(new MetadataRenderer(output, mainHandler.getLooper(), new Id3Decoder()));
- }
-
- /**
- * Builds any miscellaneous renderers used by the player.
- *
- * @param context The {@link Context} associated with the player.
- * @param mainHandler A handler associated with the main thread's looper.
- * @param extensionRendererMode The extension renderer mode.
- * @param out An array to which the built renderers should be appended.
- */
- protected void buildMiscellaneousRenderers(Context context, Handler mainHandler,
- @ExtensionRendererMode int extensionRendererMode, ArrayList out) {
- // Do nothing.
+ public boolean isCurrentWindowSeekable() {
+ return player.isCurrentWindowSeekable();
}
// Internal methods.
@@ -838,11 +692,7 @@ public class SimpleExoPlayer implements ExoPlayer {
this.ownsSurface = ownsSurface;
}
- public ComponentListener getComponentListener() {
- return componentListener;
- }
-
- public final class ComponentListener implements VideoRendererEventListener,
+ private final class ComponentListener implements VideoRendererEventListener,
AudioRendererEventListener, TextRenderer.Output, MetadataRenderer.Output,
SurfaceHolder.Callback, TextureView.SurfaceTextureListener {
@@ -962,7 +812,7 @@ public class SimpleExoPlayer implements ExoPlayer {
}
audioFormat = null;
audioDecoderCounters = null;
- audioSessionId = AudioTrack.SESSION_ID_NOT_SET;
+ audioSessionId = C.AUDIO_SESSION_ID_UNSET;
}
// TextRenderer.Output implementation
@@ -1032,15 +882,4 @@ public class SimpleExoPlayer implements ExoPlayer {
}
- @TargetApi(23)
- private static final class PlaybackParamsHolder {
-
- public final PlaybackParams params;
-
- public PlaybackParamsHolder(PlaybackParams params) {
- this.params = params;
- }
-
- }
-
}
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/Timeline.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/Timeline.java
index a99c92452..3adbaaf55 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/Timeline.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/Timeline.java
@@ -262,9 +262,24 @@ public abstract class Timeline {
*/
public int lastPeriodIndex;
- private long defaultPositionUs;
- private long durationUs;
- private long positionInFirstPeriodUs;
+ /**
+ * The default position relative to the start of the window at which to begin playback, in
+ * microseconds. May be {@link C#TIME_UNSET} if and only if the window was populated with a
+ * non-zero default position projection, and if the specified projection cannot be performed
+ * whilst remaining within the bounds of the window.
+ */
+ public long defaultPositionUs;
+
+ /**
+ * The duration of this window in microseconds, or {@link C#TIME_UNSET} if unknown.
+ */
+ public long durationUs;
+
+ /**
+ * The position of the start of this window relative to the start of the first period belonging
+ * to it, in microseconds.
+ */
+ public long positionInFirstPeriodUs;
/**
* Sets the data held by this window.
@@ -363,19 +378,29 @@ public abstract class Timeline {
*/
public int windowIndex;
- private long durationUs;
+ /**
+ * The duration of this period in microseconds, or {@link C#TIME_UNSET} if unknown.
+ */
+ public long durationUs;
+
+ /**
+ * Whether this period contains an ad.
+ */
+ public boolean isAd;
+
private long positionInWindowUs;
/**
* Sets the data held by this period.
*/
public Period set(Object id, Object uid, int windowIndex, long durationUs,
- long positionInWindowUs) {
+ long positionInWindowUs, boolean isAd) {
this.id = id;
this.uid = uid;
this.windowIndex = windowIndex;
this.durationUs = durationUs;
this.positionInWindowUs = positionInWindowUs;
+ this.isAd = isAd;
return this;
}
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/audio/Ac3Util.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/audio/Ac3Util.java
index 73269614c..98ec9e081 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/audio/Ac3Util.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/audio/Ac3Util.java
@@ -28,6 +28,44 @@ import java.nio.ByteBuffer;
*/
public final class Ac3Util {
+ /**
+ * Holds sample format information as presented by a syncframe header.
+ */
+ public static final class Ac3SyncFrameInfo {
+
+ /**
+ * The sample mime type of the bitstream. One of {@link MimeTypes#AUDIO_AC3} and
+ * {@link MimeTypes#AUDIO_E_AC3}.
+ */
+ public final String mimeType;
+ /**
+ * The audio sampling rate in Hz.
+ */
+ public final int sampleRate;
+ /**
+ * The number of audio channels
+ */
+ public final int channelCount;
+ /**
+ * The size of the frame.
+ */
+ public final int frameSize;
+ /**
+ * Number of audio samples in the frame.
+ */
+ public final int sampleCount;
+
+ private Ac3SyncFrameInfo(String mimeType, int channelCount, int sampleRate, int frameSize,
+ int sampleCount) {
+ this.mimeType = mimeType;
+ this.channelCount = channelCount;
+ this.sampleRate = sampleRate;
+ this.frameSize = frameSize;
+ this.sampleCount = sampleCount;
+ }
+
+ }
+
/**
* The number of new samples per (E-)AC-3 audio block.
*/
@@ -114,62 +152,61 @@ public final class Ac3Util {
}
/**
- * Returns the AC-3 format given {@code data} containing a syncframe. The reading position of
- * {@code data} will be modified.
+ * Returns (E-)AC-3 format information given {@code data} containing a syncframe. The reading
+ * position of {@code data} will be modified.
*
* @param data The data to parse, positioned at the start of the syncframe.
- * @param trackId The track identifier to set on the format, or null.
- * @param language The language to set on the format.
- * @param drmInitData {@link DrmInitData} to be included in the format.
- * @return The AC-3 format parsed from data in the header.
+ * @return The (E-)AC-3 format data parsed from the header.
*/
- public static Format parseAc3SyncframeFormat(ParsableBitArray data, String trackId,
- String language, DrmInitData drmInitData) {
- data.skipBits(16 + 16); // syncword, crc1
- int fscod = data.readBits(2);
- data.skipBits(6 + 5 + 3); // frmsizecod, bsid, bsmod
- int acmod = data.readBits(3);
- if ((acmod & 0x01) != 0 && acmod != 1) {
- data.skipBits(2); // cmixlev
- }
- if ((acmod & 0x04) != 0) {
- data.skipBits(2); // surmixlev
- }
- if (acmod == 2) {
- data.skipBits(2); // dsurmod
- }
- boolean lfeon = data.readBit();
- return Format.createAudioSampleFormat(trackId, MimeTypes.AUDIO_AC3, null, Format.NO_VALUE,
- Format.NO_VALUE, CHANNEL_COUNT_BY_ACMOD[acmod] + (lfeon ? 1 : 0),
- SAMPLE_RATE_BY_FSCOD[fscod], null, drmInitData, 0, language);
- }
-
- /**
- * Returns the E-AC-3 format given {@code data} containing a syncframe. The reading position of
- * {@code data} will be modified.
- *
- * @param data The data to parse, positioned at the start of the syncframe.
- * @param trackId The track identifier to set on the format, or null.
- * @param language The language to set on the format.
- * @param drmInitData {@link DrmInitData} to be included in the format.
- * @return The E-AC-3 format parsed from data in the header.
- */
- public static Format parseEac3SyncframeFormat(ParsableBitArray data, String trackId,
- String language, DrmInitData drmInitData) {
- data.skipBits(16 + 2 + 3 + 11); // syncword, strmtype, substreamid, frmsiz
+ public static Ac3SyncFrameInfo parseAc3SyncframeInfo(ParsableBitArray data) {
+ int initialPosition = data.getPosition();
+ data.skipBits(40);
+ boolean isEac3 = data.readBits(5) == 16;
+ data.setPosition(initialPosition);
+ String mimeType;
int sampleRate;
- int fscod = data.readBits(2);
- if (fscod == 3) {
- sampleRate = SAMPLE_RATE_BY_FSCOD2[data.readBits(2)];
- } else {
- data.skipBits(2); // numblkscod
+ int acmod;
+ int frameSize;
+ int sampleCount;
+ if (isEac3) {
+ mimeType = MimeTypes.AUDIO_E_AC3;
+ data.skipBits(16 + 2 + 3); // syncword, strmtype, substreamid
+ frameSize = (data.readBits(11) + 1) * 2;
+ int fscod = data.readBits(2);
+ int audioBlocks;
+ if (fscod == 3) {
+ sampleRate = SAMPLE_RATE_BY_FSCOD2[data.readBits(2)];
+ audioBlocks = 6;
+ } else {
+ int numblkscod = data.readBits(2);
+ audioBlocks = BLOCKS_PER_SYNCFRAME_BY_NUMBLKSCOD[numblkscod];
+ sampleRate = SAMPLE_RATE_BY_FSCOD[fscod];
+ }
+ sampleCount = AUDIO_SAMPLES_PER_AUDIO_BLOCK * audioBlocks;
+ acmod = data.readBits(3);
+ } else /* is AC-3 */ {
+ mimeType = MimeTypes.AUDIO_AC3;
+ data.skipBits(16 + 16); // syncword, crc1
+ int fscod = data.readBits(2);
+ int frmsizecod = data.readBits(6);
+ frameSize = getAc3SyncframeSize(fscod, frmsizecod);
+ data.skipBits(5 + 3); // bsid, bsmod
+ acmod = data.readBits(3);
+ if ((acmod & 0x01) != 0 && acmod != 1) {
+ data.skipBits(2); // cmixlev
+ }
+ if ((acmod & 0x04) != 0) {
+ data.skipBits(2); // surmixlev
+ }
+ if (acmod == 2) {
+ data.skipBits(2); // dsurmod
+ }
sampleRate = SAMPLE_RATE_BY_FSCOD[fscod];
+ sampleCount = AC3_SYNCFRAME_AUDIO_SAMPLE_COUNT;
}
- int acmod = data.readBits(3);
boolean lfeon = data.readBit();
- return Format.createAudioSampleFormat(trackId, MimeTypes.AUDIO_E_AC3, null, Format.NO_VALUE,
- Format.NO_VALUE, CHANNEL_COUNT_BY_ACMOD[acmod] + (lfeon ? 1 : 0), sampleRate, null,
- drmInitData, 0, language);
+ int channelCount = CHANNEL_COUNT_BY_ACMOD[acmod] + (lfeon ? 1 : 0);
+ return new Ac3SyncFrameInfo(mimeType, channelCount, sampleRate, frameSize, sampleCount);
}
/**
@@ -187,16 +224,6 @@ public final class Ac3Util {
return getAc3SyncframeSize(fscod, frmsizecod);
}
- /**
- * Returns the size in bytes of the given E-AC-3 syncframe.
- *
- * @param data The syncframe to parse.
- * @return The syncframe size in bytes.
- */
- public static int parseEAc3SyncframeSize(byte[] data) {
- return 2 * (((data[2] & 0x07) << 8) + (data[3] & 0xFF) + 1); // frmsiz
- }
-
/**
* Returns the number of audio samples in an AC-3 syncframe.
*/
@@ -205,22 +232,10 @@ public final class Ac3Util {
}
/**
- * Returns the number of audio samples represented by the given E-AC-3 syncframe.
+ * Reads the number of audio samples represented by the given E-AC-3 syncframe. The buffer's
+ * position is not modified.
*
- * @param data The syncframe to parse.
- * @return The number of audio samples represented by the syncframe.
- */
- public static int parseEAc3SyncframeAudioSampleCount(byte[] data) {
- // See ETSI TS 102 366 subsection E.1.2.2.
- return AUDIO_SAMPLES_PER_AUDIO_BLOCK * (((data[4] & 0xC0) >> 6) == 0x03 ? 6 // fscod
- : BLOCKS_PER_SYNCFRAME_BY_NUMBLKSCOD[(data[4] & 0x30) >> 4]);
- }
-
- /**
- * Like {@link #parseEAc3SyncframeAudioSampleCount(byte[])} but reads from a {@link ByteBuffer}.
- * The buffer's position is not modified.
- *
- * @param buffer The {@link ByteBuffer} from which to read.
+ * @param buffer The {@link ByteBuffer} from which to read the syncframe.
* @return The number of audio samples represented by the syncframe.
*/
public static int parseEAc3SyncframeAudioSampleCount(ByteBuffer buffer) {
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/audio/AudioProcessor.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/audio/AudioProcessor.java
new file mode 100755
index 000000000..c1aa67522
--- /dev/null
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/audio/AudioProcessor.java
@@ -0,0 +1,123 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.telegram.messenger.exoplayer2.audio;
+
+import org.telegram.messenger.exoplayer2.C;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+/**
+ * Interface for audio processors.
+ */
+public interface AudioProcessor {
+
+ /**
+ * Exception thrown when a processor can't be configured for a given input audio format.
+ */
+ final class UnhandledFormatException extends Exception {
+
+ public UnhandledFormatException(int sampleRateHz, int channelCount, @C.Encoding int encoding) {
+ super("Unhandled format: " + sampleRateHz + " Hz, " + channelCount + " channels in encoding "
+ + encoding);
+ }
+
+ }
+
+ /**
+ * An empty, direct {@link ByteBuffer}.
+ */
+ ByteBuffer EMPTY_BUFFER = ByteBuffer.allocateDirect(0).order(ByteOrder.nativeOrder());
+
+ /**
+ * Configures the processor to process input audio with the specified format. After calling this
+ * method, {@link #isActive()} returns whether the processor needs to handle buffers; if not, the
+ * processor will not accept any buffers until it is reconfigured. Returns {@code true} if the
+ * processor must be flushed, or if the value returned by {@link #isActive()} has changed as a
+ * result of the call. If it's active, {@link #getOutputChannelCount()} and
+ * {@link #getOutputEncoding()} return the processor's output format.
+ *
+ * @param sampleRateHz The sample rate of input audio in Hz.
+ * @param channelCount The number of interleaved channels in input audio.
+ * @param encoding The encoding of input audio.
+ * @return {@code true} if the processor must be flushed or the value returned by
+ * {@link #isActive()} has changed as a result of the call.
+ * @throws UnhandledFormatException Thrown if the specified format can't be handled as input.
+ */
+ boolean configure(int sampleRateHz, int channelCount, @C.Encoding int encoding)
+ throws UnhandledFormatException;
+
+ /**
+ * Returns whether the processor is configured and active.
+ */
+ boolean isActive();
+
+ /**
+ * Returns the number of audio channels in the data output by the processor.
+ */
+ int getOutputChannelCount();
+
+ /**
+ * Returns the audio encoding used in the data output by the processor.
+ */
+ @C.Encoding
+ int getOutputEncoding();
+
+ /**
+ * Queues audio data between the position and limit of the input {@code buffer} for processing.
+ * {@code buffer} must be a direct byte buffer with native byte order. Its contents are treated as
+ * read-only. Its position will be advanced by the number of bytes consumed (which may be zero).
+ * The caller retains ownership of the provided buffer. Calling this method invalidates any
+ * previous buffer returned by {@link #getOutput()}.
+ *
+ * @param buffer The input buffer to process.
+ */
+ void queueInput(ByteBuffer buffer);
+
+ /**
+ * Queues an end of stream signal. After this method has been called,
+ * {@link #queueInput(ByteBuffer)} may not be called until after the next call to
+ * {@link #flush()}. Calling {@link #getOutput()} will return any remaining output data. Multiple
+ * calls may be required to read all of the remaining output data. {@link #isEnded()} will return
+ * {@code true} once all remaining output data has been read.
+ */
+ void queueEndOfStream();
+
+ /**
+ * Returns a buffer containing processed output data between its position and limit. The buffer
+ * will always be a direct byte buffer with native byte order. Calling this method invalidates any
+ * previously returned buffer. The buffer will be empty if no output is available.
+ *
+ * @return A buffer containing processed output data between its position and limit.
+ */
+ ByteBuffer getOutput();
+
+ /**
+ * Returns whether this processor will return no more output from {@link #getOutput()} until it
+ * has been {@link #flush()}ed and more input has been queued.
+ */
+ boolean isEnded();
+
+ /**
+ * Clears any state in preparation for receiving a new stream of input buffers.
+ */
+ void flush();
+
+ /**
+ * Resets the processor to its initial state.
+ */
+ void reset();
+
+}
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/audio/AudioTrack.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/audio/AudioTrack.java
index 4b54ddef4..b08aa001e 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/audio/AudioTrack.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/audio/AudioTrack.java
@@ -17,41 +17,47 @@ package org.telegram.messenger.exoplayer2.audio;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
+import android.media.AudioAttributes;
import android.media.AudioFormat;
import android.media.AudioTimestamp;
-import android.media.PlaybackParams;
import android.os.ConditionVariable;
import android.os.SystemClock;
import android.util.Log;
import org.telegram.messenger.exoplayer2.C;
-import org.telegram.messenger.exoplayer2.Format;
+import org.telegram.messenger.exoplayer2.PlaybackParameters;
import org.telegram.messenger.exoplayer2.util.Assertions;
import org.telegram.messenger.exoplayer2.util.MimeTypes;
import org.telegram.messenger.exoplayer2.util.Util;
import java.lang.reflect.Method;
import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.util.ArrayList;
+import java.util.LinkedList;
/**
* Plays audio data. The implementation delegates to an {@link android.media.AudioTrack} and handles
* playback position smoothing, non-blocking writes and reconfiguration.
*
* Before starting playback, specify the input format by calling
- * {@link #configure(String, int, int, int, int)}. Next call {@link #initialize(int)}, optionally
- * specifying an audio session.
+ * {@link #configure(String, int, int, int, int)}. Optionally call {@link #setAudioSessionId(int)},
+ * {@link #setStreamType(int)}, {@link #enableTunnelingV21(int)} and {@link #disableTunneling()}
+ * to configure audio playback. These methods may be called after writing data to the track, in
+ * which case it will be reinitialized as required.
*
* Call {@link #handleBuffer(ByteBuffer, long)} to write data, and {@link #handleDiscontinuity()}
* when the data being fed is discontinuous. Call {@link #play()} to start playing the written data.
*
- * Call {@link #configure(String, int, int, int, int)} whenever the input format changes. If
- * {@link #isInitialized()} returns {@code false} after the call, it is necessary to call
- * {@link #initialize(int)} before writing more data.
+ * Call {@link #configure(String, int, int, int, int)} whenever the input format changes. The track
+ * will be reinitialized on the next call to {@link #handleBuffer(ByteBuffer, long)}.
*
- * The underlying {@link android.media.AudioTrack} is created by {@link #initialize(int)} and
- * released by {@link #reset()} (and {@link #configure(String, int, int, int, int)} unless the input
- * format is unchanged). It is safe to call {@link #initialize(int)} after calling {@link #reset()}
- * without reconfiguration.
+ * Calling {@link #reset()} releases the underlying {@link android.media.AudioTrack} (and so does
+ * calling {@link #configure(String, int, int, int, int)} unless the format is unchanged). It is
+ * safe to call {@link #handleBuffer(ByteBuffer, long)} after {@link #reset()} without calling
+ * {@link #configure(String, int, int, int, int)}.
*
- * Call {@link #release()} when the instance is no longer required.
+ * Call {@link #playToEndOfStream()} repeatedly to play out all data when no more input buffers will
+ * be provided via {@link #handleBuffer(ByteBuffer, long)} until the next {@link #reset}. Call
+ * {@link #release()} when the instance is no longer required.
*/
public final class AudioTrack {
@@ -60,6 +66,19 @@ public final class AudioTrack {
*/
public interface Listener {
+ /**
+ * Called when the audio track has been initialized with a newly generated audio session id.
+ *
+ * @param audioSessionId The newly generated audio session id.
+ */
+ void onAudioSessionId(int audioSessionId);
+
+ /**
+ * Called when the audio track handles a buffer whose timestamp is discontinuous with the last
+ * buffer handled since it was reset.
+ */
+ void onPositionDiscontinuity();
+
/**
* Called when the audio track underruns.
*
@@ -73,6 +92,21 @@ public final class AudioTrack {
}
+ /**
+ * Thrown when a failure occurs configuring the track.
+ */
+ public static final class ConfigurationException extends Exception {
+
+ public ConfigurationException(Throwable cause) {
+ super(cause);
+ }
+
+ public ConfigurationException(String message) {
+ super(message);
+ }
+
+ }
+
/**
* Thrown when a failure occurs initializing an {@link android.media.AudioTrack}.
*/
@@ -104,13 +138,15 @@ public final class AudioTrack {
public static final class WriteException extends Exception {
/**
- * An error value returned from {@link android.media.AudioTrack#write(byte[], int, int)}.
+ * The error value returned from {@link android.media.AudioTrack#write(byte[], int, int)} or
+ * {@link android.media.AudioTrack#write(ByteBuffer, int, int)}.
*/
public final int errorCode;
/**
- * @param errorCode An error value returned from
- * {@link android.media.AudioTrack#write(byte[], int, int)}.
+ * @param errorCode The error value returned from
+ * {@link android.media.AudioTrack#write(byte[], int, int)} or
+ * {@link android.media.AudioTrack#write(ByteBuffer, int, int)}.
*/
public WriteException(int errorCode) {
super("AudioTrack write failed: " + errorCode);
@@ -135,21 +171,7 @@ public final class AudioTrack {
}
/**
- * Returned in the result of {@link #handleBuffer} if the buffer was discontinuous.
- */
- public static final int RESULT_POSITION_DISCONTINUITY = 1;
- /**
- * Returned in the result of {@link #handleBuffer} if the buffer can be released.
- */
- public static final int RESULT_BUFFER_CONSUMED = 2;
-
- /**
- * Represents an unset {@link android.media.AudioTrack} session identifier.
- */
- public static final int SESSION_ID_NOT_SET = 0;
-
- /**
- * Returned by {@link #getCurrentPositionUs} when the position is not set.
+ * Returned by {@link #getCurrentPositionUs(boolean)} when the position is not set.
*/
public static final long CURRENT_POSITION_NOT_SET = Long.MIN_VALUE;
@@ -210,15 +232,15 @@ public final class AudioTrack {
/**
* AudioTrack timestamps are deemed spurious if they are offset from the system clock by more
* than this amount.
- *
- *
This is a fail safe that should not be required on correctly functioning devices.
+ *
+ * This is a fail safe that should not be required on correctly functioning devices.
*/
private static final long MAX_AUDIO_TIMESTAMP_OFFSET_US = 5 * C.MICROS_PER_SECOND;
/**
* AudioTrack latencies are deemed impossibly large if they are greater than this amount.
- *
- *
This is a fail safe that should not be required on correctly functioning devices.
+ *
+ * This is a fail safe that should not be required on correctly functioning devices.
*/
private static final long MAX_LATENCY_US = 5 * C.MICROS_PER_SECOND;
@@ -230,6 +252,13 @@ public final class AudioTrack {
private static final int MIN_PLAYHEAD_OFFSET_SAMPLE_INTERVAL_US = 30000;
private static final int MIN_TIMESTAMP_SAMPLE_INTERVAL_US = 500000;
+ /**
+ * The minimum number of output bytes from {@link #sonicAudioProcessor} at which the speedup is
+ * calculated using the input/output byte counts from the processor, rather than using the
+ * current playback parameters speed.
+ */
+ private static final int SONIC_MIN_BYTES_FOR_SPEEDUP = 1024;
+
/**
* Whether to enable a workaround for an issue where an audio effect does not keep its session
* active across releasing/initializing a new audio track, on platform builds where
@@ -249,30 +278,41 @@ public final class AudioTrack {
public static boolean failOnSpuriousAudioTimestamp = false;
private final AudioCapabilities audioCapabilities;
+ private final ChannelMappingAudioProcessor channelMappingAudioProcessor;
+ private final SonicAudioProcessor sonicAudioProcessor;
+ private final AudioProcessor[] availableAudioProcessors;
private final Listener listener;
private final ConditionVariable releasingConditionVariable;
private final long[] playheadOffsets;
private final AudioTrackUtil audioTrackUtil;
+ private final LinkedList playbackParametersCheckpoints;
/**
- * Used to keep the audio session active on pre-V21 builds (see {@link #initialize(int)}).
+ * Used to keep the audio session active on pre-V21 builds (see {@link #initialize()}).
*/
private android.media.AudioTrack keepSessionIdAudioTrack;
private android.media.AudioTrack audioTrack;
private int sampleRate;
private int channelConfig;
+ @C.Encoding
+ private int encoding;
+ @C.Encoding
+ private int outputEncoding;
@C.StreamType
private int streamType;
- @C.Encoding
- private int sourceEncoding;
- @C.Encoding
- private int targetEncoding;
private boolean passthrough;
- private int pcmFrameSize;
private int bufferSize;
private long bufferSizeUs;
+ private PlaybackParameters drainingPlaybackParameters;
+ private PlaybackParameters playbackParameters;
+ private long playbackParametersOffsetUs;
+ private long playbackParametersPositionUs;
+
+ private ByteBuffer avSyncHeader;
+ private int bytesUntilNextAvSync;
+
private int nextPlayheadOffsetIndex;
private int playheadOffsetCount;
private long smoothedPlayheadOffsetUs;
@@ -281,8 +321,12 @@ public final class AudioTrack {
private long lastTimestampSampleTimeUs;
private Method getLatencyMethod;
+ private int pcmFrameSize;
private long submittedPcmBytes;
private long submittedEncodedFrames;
+ private int outputPcmFrameSize;
+ private long writtenPcmBytes;
+ private long writtenEncodedFrames;
private int framesPerEncodedSample;
private int startMediaTimeState;
private long startMediaTimeUs;
@@ -290,21 +334,30 @@ public final class AudioTrack {
private long latencyUs;
private float volume;
- private byte[] temporaryBuffer;
- private int temporaryBufferOffset;
- private ByteBuffer currentSourceBuffer;
-
- private ByteBuffer resampledBuffer;
- private boolean useResampledBuffer;
+ private AudioProcessor[] audioProcessors;
+ private ByteBuffer[] outputBuffers;
+ private ByteBuffer inputBuffer;
+ private ByteBuffer outputBuffer;
+ private byte[] preV21OutputBuffer;
+ private int preV21OutputBufferOffset;
+ private int drainingAudioProcessorIndex;
+ private boolean handledEndOfStream;
+ private boolean playing;
+ private int audioSessionId;
+ private boolean tunneling;
private boolean hasData;
private long lastFeedElapsedRealtimeMs;
/**
- * @param audioCapabilities The current audio capabilities.
+ * @param audioCapabilities The audio capabilities for playback on this device. May be null if the
+ * default capabilities (no encoded audio passthrough support) should be assumed.
+ * @param audioProcessors An array of {@link AudioProcessor}s that will process PCM audio before
+ * output. May be empty.
* @param listener Listener for audio track events.
*/
- public AudioTrack(AudioCapabilities audioCapabilities, Listener listener) {
+ public AudioTrack(AudioCapabilities audioCapabilities, AudioProcessor[] audioProcessors,
+ Listener listener) {
this.audioCapabilities = audioCapabilities;
this.listener = listener;
releasingConditionVariable = new ConditionVariable(true);
@@ -316,17 +369,28 @@ public final class AudioTrack {
// There's no guarantee this method exists. Do nothing.
}
}
- if (Util.SDK_INT >= 23) {
- audioTrackUtil = new AudioTrackUtilV23();
- } else if (Util.SDK_INT >= 19) {
+ if (Util.SDK_INT >= 19) {
audioTrackUtil = new AudioTrackUtilV19();
} else {
audioTrackUtil = new AudioTrackUtil();
}
+ channelMappingAudioProcessor = new ChannelMappingAudioProcessor();
+ sonicAudioProcessor = new SonicAudioProcessor();
+ availableAudioProcessors = new AudioProcessor[3 + audioProcessors.length];
+ availableAudioProcessors[0] = new ResamplingAudioProcessor();
+ availableAudioProcessors[1] = channelMappingAudioProcessor;
+ System.arraycopy(audioProcessors, 0, availableAudioProcessors, 2, audioProcessors.length);
+ availableAudioProcessors[2 + audioProcessors.length] = sonicAudioProcessor;
playheadOffsets = new long[MAX_PLAYHEAD_OFFSET_COUNT];
volume = 1.0f;
startMediaTimeState = START_NOT_SET;
streamType = C.STREAM_TYPE_DEFAULT;
+ audioSessionId = C.AUDIO_SESSION_ID_UNSET;
+ playbackParameters = PlaybackParameters.DEFAULT;
+ drainingAudioProcessorIndex = C.INDEX_UNSET;
+ this.audioProcessors = new AudioProcessor[0];
+ outputBuffers = new ByteBuffer[0];
+ playbackParametersCheckpoints = new LinkedList<>();
}
/**
@@ -340,14 +404,6 @@ public final class AudioTrack {
&& audioCapabilities.supportsEncoding(getEncodingForMimeType(mimeType));
}
- /**
- * Returns whether the audio track has been successfully initialized via {@link #initialize} and
- * not yet {@link #reset}.
- */
- public boolean isInitialized() {
- return audioTrack != null;
- }
-
/**
* Returns the playback position in the stream starting at zero, in microseconds, or
* {@link #CURRENT_POSITION_NOT_SET} if it is not yet available.
@@ -369,33 +425,29 @@ public final class AudioTrack {
}
long systemClockUs = System.nanoTime() / 1000;
- long currentPositionUs;
+ long positionUs;
if (audioTimestampSet) {
- // How long ago in the past the audio timestamp is (negative if it's in the future).
- long presentationDiff = systemClockUs - (audioTrackUtil.getTimestampNanoTime() / 1000);
- // Fixes such difference if the playback speed is not real time speed.
- long actualSpeedPresentationDiff = (long) (presentationDiff
- * audioTrackUtil.getPlaybackSpeed());
- long framesDiff = durationUsToFrames(actualSpeedPresentationDiff);
- // The position of the frame that's currently being presented.
- long currentFramePosition = audioTrackUtil.getTimestampFramePosition() + framesDiff;
- currentPositionUs = framesToDurationUs(currentFramePosition) + startMediaTimeUs;
+ // Calculate the speed-adjusted position using the timestamp (which may be in the future).
+ long elapsedSinceTimestampUs = systemClockUs - (audioTrackUtil.getTimestampNanoTime() / 1000);
+ long elapsedSinceTimestampFrames = durationUsToFrames(elapsedSinceTimestampUs);
+ long elapsedFrames = audioTrackUtil.getTimestampFramePosition() + elapsedSinceTimestampFrames;
+ positionUs = framesToDurationUs(elapsedFrames);
} else {
if (playheadOffsetCount == 0) {
// The AudioTrack has started, but we don't have any samples to compute a smoothed position.
- currentPositionUs = audioTrackUtil.getPlaybackHeadPositionUs() + startMediaTimeUs;
+ positionUs = audioTrackUtil.getPositionUs();
} else {
- // getPlayheadPositionUs() only has a granularity of ~20ms, so we base the position off the
+ // getPlayheadPositionUs() only has a granularity of ~20 ms, so we base the position off the
// system clock (and a smoothed offset between it and the playhead position) so as to
// prevent jitter in the reported positions.
- currentPositionUs = systemClockUs + smoothedPlayheadOffsetUs + startMediaTimeUs;
+ positionUs = systemClockUs + smoothedPlayheadOffsetUs;
}
if (!sourceEnded) {
- currentPositionUs -= latencyUs;
+ positionUs -= latencyUs;
}
}
- return currentPositionUs;
+ return startMediaTimeUs + applySpeedup(positionUs);
}
/**
@@ -409,9 +461,56 @@ public final class AudioTrack {
* {@link C#ENCODING_PCM_32BIT}.
* @param specifiedBufferSize A specific size for the playback buffer in bytes, or 0 to infer a
* suitable buffer size automatically.
+ * @throws ConfigurationException If an error occurs configuring the track.
*/
public void configure(String mimeType, int channelCount, int sampleRate,
- @C.PcmEncoding int pcmEncoding, int specifiedBufferSize) {
+ @C.PcmEncoding int pcmEncoding, int specifiedBufferSize) throws ConfigurationException {
+ configure(mimeType, channelCount, sampleRate, pcmEncoding, specifiedBufferSize, null);
+ }
+
+ /**
+ * Configures (or reconfigures) the audio track.
+ *
+ * @param mimeType The mime type.
+ * @param channelCount The number of channels.
+ * @param sampleRate The sample rate in Hz.
+ * @param pcmEncoding For PCM formats, the encoding used. One of {@link C#ENCODING_PCM_16BIT},
+ * {@link C#ENCODING_PCM_16BIT}, {@link C#ENCODING_PCM_24BIT} and
+ * {@link C#ENCODING_PCM_32BIT}.
+ * @param specifiedBufferSize A specific size for the playback buffer in bytes, or 0 to infer a
+ * suitable buffer size automatically.
+ * @param outputChannels A mapping from input to output channels that is applied to this track's
+ * input as a preprocessing step, if handling PCM input. Specify {@code null} to leave the
+ * input unchanged. Otherwise, the element at index {@code i} specifies index of the input
+ * channel to map to output channel {@code i} when preprocessing input buffers. After the
+ * map is applied the audio data will have {@code outputChannels.length} channels.
+ * @throws ConfigurationException If an error occurs configuring the track.
+ */
+ public void configure(String mimeType, int channelCount, int sampleRate,
+ @C.PcmEncoding int pcmEncoding, int specifiedBufferSize, int[] outputChannels)
+ throws ConfigurationException {
+ boolean passthrough = !MimeTypes.AUDIO_RAW.equals(mimeType);
+ @C.Encoding int encoding = passthrough ? getEncodingForMimeType(mimeType) : pcmEncoding;
+ boolean flush = false;
+ if (!passthrough) {
+ pcmFrameSize = Util.getPcmFrameSize(pcmEncoding, channelCount);
+ channelMappingAudioProcessor.setChannelMap(outputChannels);
+ for (AudioProcessor audioProcessor : availableAudioProcessors) {
+ try {
+ flush |= audioProcessor.configure(sampleRate, channelCount, encoding);
+ } catch (AudioProcessor.UnhandledFormatException e) {
+ throw new ConfigurationException(e);
+ }
+ if (audioProcessor.isActive()) {
+ channelCount = audioProcessor.getOutputChannelCount();
+ encoding = audioProcessor.getOutputEncoding();
+ }
+ }
+ if (flush) {
+ resetAudioProcessors();
+ }
+ }
+
int channelConfig;
switch (channelCount) {
case 1:
@@ -439,21 +538,31 @@ public final class AudioTrack {
channelConfig = C.CHANNEL_OUT_7POINT1_SURROUND;
break;
default:
- throw new IllegalArgumentException("Unsupported channel count: " + channelCount);
+ throw new ConfigurationException("Unsupported channel count: " + channelCount);
}
- boolean passthrough = !MimeTypes.AUDIO_RAW.equals(mimeType);
- @C.Encoding int sourceEncoding;
- if (passthrough) {
- sourceEncoding = getEncodingForMimeType(mimeType);
- } else if (pcmEncoding == C.ENCODING_PCM_8BIT || pcmEncoding == C.ENCODING_PCM_16BIT
- || pcmEncoding == C.ENCODING_PCM_24BIT || pcmEncoding == C.ENCODING_PCM_32BIT) {
- sourceEncoding = pcmEncoding;
- } else {
- throw new IllegalArgumentException("Unsupported PCM encoding: " + pcmEncoding);
+ // Workaround for overly strict channel configuration checks on nVidia Shield.
+ if (Util.SDK_INT <= 23 && "foster".equals(Util.DEVICE) && "NVIDIA".equals(Util.MANUFACTURER)) {
+ switch (channelCount) {
+ case 7:
+ channelConfig = C.CHANNEL_OUT_7POINT1_SURROUND;
+ break;
+ case 3:
+ case 5:
+ channelConfig = AudioFormat.CHANNEL_OUT_5POINT1;
+ break;
+ default:
+ break;
+ }
}
- if (isInitialized() && this.sourceEncoding == sourceEncoding && this.sampleRate == sampleRate
+ // Workaround for Nexus Player not reporting support for mono passthrough.
+ // (See [Internal: b/34268671].)
+ if (Util.SDK_INT <= 25 && "fugu".equals(Util.DEVICE) && passthrough && channelCount == 1) {
+ channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
+ }
+
+ if (!flush && isInitialized() && this.encoding == encoding && this.sampleRate == sampleRate
&& this.channelConfig == channelConfig) {
// We already have an audio track with the correct sample rate, channel config and encoding.
return;
@@ -461,48 +570,63 @@ public final class AudioTrack {
reset();
- this.sourceEncoding = sourceEncoding;
+ this.encoding = encoding;
this.passthrough = passthrough;
this.sampleRate = sampleRate;
this.channelConfig = channelConfig;
- targetEncoding = passthrough ? sourceEncoding : C.ENCODING_PCM_16BIT;
- pcmFrameSize = 2 * channelCount; // 2 bytes per 16-bit sample * number of channels.
+ outputEncoding = passthrough ? encoding : C.ENCODING_PCM_16BIT;
+ outputPcmFrameSize = Util.getPcmFrameSize(C.ENCODING_PCM_16BIT, channelCount);
if (specifiedBufferSize != 0) {
bufferSize = specifiedBufferSize;
} else if (passthrough) {
// TODO: Set the minimum buffer size using getMinBufferSize when it takes the encoding into
// account. [Internal: b/25181305]
- if (targetEncoding == C.ENCODING_AC3 || targetEncoding == C.ENCODING_E_AC3) {
+ if (outputEncoding == C.ENCODING_AC3 || outputEncoding == C.ENCODING_E_AC3) {
// AC-3 allows bitrates up to 640 kbit/s.
bufferSize = (int) (PASSTHROUGH_BUFFER_DURATION_US * 80 * 1024 / C.MICROS_PER_SECOND);
- } else /* (targetEncoding == C.ENCODING_DTS || targetEncoding == C.ENCODING_DTS_HD */ {
+ } else /* (outputEncoding == C.ENCODING_DTS || outputEncoding == C.ENCODING_DTS_HD */ {
// DTS allows an 'open' bitrate, but we assume the maximum listed value: 1536 kbit/s.
bufferSize = (int) (PASSTHROUGH_BUFFER_DURATION_US * 192 * 1024 / C.MICROS_PER_SECOND);
}
} else {
int minBufferSize =
- android.media.AudioTrack.getMinBufferSize(sampleRate, channelConfig, targetEncoding);
+ android.media.AudioTrack.getMinBufferSize(sampleRate, channelConfig, outputEncoding);
Assertions.checkState(minBufferSize != ERROR_BAD_VALUE);
int multipliedBufferSize = minBufferSize * BUFFER_MULTIPLICATION_FACTOR;
- int minAppBufferSize = (int) durationUsToFrames(MIN_BUFFER_DURATION_US) * pcmFrameSize;
+ int minAppBufferSize = (int) durationUsToFrames(MIN_BUFFER_DURATION_US) * outputPcmFrameSize;
int maxAppBufferSize = (int) Math.max(minBufferSize,
- durationUsToFrames(MAX_BUFFER_DURATION_US) * pcmFrameSize);
+ durationUsToFrames(MAX_BUFFER_DURATION_US) * outputPcmFrameSize);
bufferSize = multipliedBufferSize < minAppBufferSize ? minAppBufferSize
: multipliedBufferSize > maxAppBufferSize ? maxAppBufferSize
: multipliedBufferSize;
}
- bufferSizeUs = passthrough ? C.TIME_UNSET : framesToDurationUs(pcmBytesToFrames(bufferSize));
+ bufferSizeUs = passthrough ? C.TIME_UNSET : framesToDurationUs(bufferSize / outputPcmFrameSize);
+
+ // The old playback parameters may no longer be applicable so try to reset them now.
+ setPlaybackParameters(playbackParameters);
}
- /**
- * Initializes the audio track for writing new buffers using {@link #handleBuffer}.
- *
- * @param sessionId Audio track session identifier to re-use, or {@link #SESSION_ID_NOT_SET} to
- * create a new one.
- * @return The new (or re-used) session identifier.
- */
- public int initialize(int sessionId) throws InitializationException {
+ private void resetAudioProcessors() {
+ ArrayList newAudioProcessors = new ArrayList<>();
+ for (AudioProcessor audioProcessor : availableAudioProcessors) {
+ if (audioProcessor.isActive()) {
+ newAudioProcessors.add(audioProcessor);
+ } else {
+ audioProcessor.flush();
+ }
+ }
+ int count = newAudioProcessors.size();
+ audioProcessors = newAudioProcessors.toArray(new AudioProcessor[count]);
+ outputBuffers = new ByteBuffer[count];
+ for (int i = 0; i < count; i++) {
+ AudioProcessor audioProcessor = audioProcessors[i];
+ audioProcessor.flush();
+ outputBuffers[i] = audioProcessor.getOutput();
+ }
+ }
+
+ private void initialize() throws InitializationException {
// If we're asynchronously releasing a previous audio track then we block until it has been
// released. This guarantees that we cannot end up in a state where we have multiple audio
// track instances. Without this guarantee it would be possible, in extreme cases, to exhaust
@@ -510,23 +634,26 @@ public final class AudioTrack {
// initialization of the audio track to fail.
releasingConditionVariable.block();
- if (sessionId == SESSION_ID_NOT_SET) {
+ if (tunneling) {
+ audioTrack = createHwAvSyncAudioTrackV21(sampleRate, channelConfig, outputEncoding,
+ bufferSize, audioSessionId);
+ } else if (audioSessionId == C.AUDIO_SESSION_ID_UNSET) {
audioTrack = new android.media.AudioTrack(streamType, sampleRate, channelConfig,
- targetEncoding, bufferSize, MODE_STREAM);
+ outputEncoding, bufferSize, MODE_STREAM);
} else {
// Re-attach to the same audio session.
audioTrack = new android.media.AudioTrack(streamType, sampleRate, channelConfig,
- targetEncoding, bufferSize, MODE_STREAM, sessionId);
+ outputEncoding, bufferSize, MODE_STREAM, audioSessionId);
}
checkAudioTrackInitialized();
- sessionId = audioTrack.getAudioSessionId();
+ int audioSessionId = audioTrack.getAudioSessionId();
if (enablePreV21AudioSessionWorkaround) {
if (Util.SDK_INT < 21) {
// The workaround creates an audio track with a two byte buffer on the same session, and
// does not release it until this object is released, which keeps the session active.
if (keepSessionIdAudioTrack != null
- && sessionId != keepSessionIdAudioTrack.getAudioSessionId()) {
+ && audioSessionId != keepSessionIdAudioTrack.getAudioSessionId()) {
releaseKeepSessionIdAudioTrack();
}
if (keepSessionIdAudioTrack == null) {
@@ -535,21 +662,25 @@ public final class AudioTrack {
@C.PcmEncoding int encoding = C.ENCODING_PCM_16BIT;
int bufferSize = 2; // Use a two byte buffer, as it is not actually used for playback.
keepSessionIdAudioTrack = new android.media.AudioTrack(streamType, sampleRate,
- channelConfig, encoding, bufferSize, MODE_STATIC, sessionId);
+ channelConfig, encoding, bufferSize, MODE_STATIC, audioSessionId);
}
}
}
+ if (this.audioSessionId != audioSessionId) {
+ this.audioSessionId = audioSessionId;
+ listener.onAudioSessionId(audioSessionId);
+ }
audioTrackUtil.reconfigure(audioTrack, needsPassthroughWorkarounds());
- setAudioTrackVolume();
+ setVolumeInternal();
hasData = false;
- return sessionId;
}
/**
* Starts or resumes playing audio if the audio track has been initialized.
*/
public void play() {
+ playing = true;
if (isInitialized()) {
resumeSystemTimeUs = System.nanoTime() / 1000;
audioTrack.play();
@@ -567,47 +698,40 @@ public final class AudioTrack {
}
/**
- * Attempts to write data from a {@link ByteBuffer} to the audio track, starting from its current
- * position and ending at its limit (exclusive). The position of the {@link ByteBuffer} is
- * advanced by the number of bytes that were successfully written.
+ * Attempts to process data from a {@link ByteBuffer}, starting from its current position and
+ * ending at its limit (exclusive). The position of the {@link ByteBuffer} is advanced by the
+ * number of bytes that were handled. {@link Listener#onPositionDiscontinuity()} will be called if
+ * {@code presentationTimeUs} is discontinuous with the last buffer handled since the last reset.
*
- * Returns a bit field containing {@link #RESULT_BUFFER_CONSUMED} if the data was written in full,
- * and {@link #RESULT_POSITION_DISCONTINUITY} if the buffer was discontinuous with previously
- * written data.
- *
- * If the data was not written in full then the same {@link ByteBuffer} must be provided to
- * subsequent calls until it has been fully consumed, except in the case of an interleaving call
- * to {@link #configure} or {@link #reset}.
+ * Returns whether the data was handled in full. If the data was not handled in full then the same
+ * {@link ByteBuffer} must be provided to subsequent calls until it has been fully consumed,
+ * except in the case of an interleaving call to {@link #reset()} (or an interleaving call to
+ * {@link #configure(String, int, int, int, int)} that caused the track to be reset).
*
- * @param buffer The buffer containing audio data to play back.
- * @param presentationTimeUs Presentation timestamp of the next buffer in microseconds.
- * @return A bit field with {@link #RESULT_BUFFER_CONSUMED} if the buffer can be released, and
- * {@link #RESULT_POSITION_DISCONTINUITY} if the buffer was not contiguous with previously
- * written data.
+ * @param buffer The buffer containing audio data.
+ * @param presentationTimeUs The presentation timestamp of the buffer in microseconds.
+ * @return Whether the buffer was handled fully.
+ * @throws InitializationException If an error occurs initializing the track.
* @throws WriteException If an error occurs writing the audio data.
*/
- public int handleBuffer(ByteBuffer buffer, long presentationTimeUs) throws WriteException {
- boolean hadData = hasData;
- hasData = hasPendingData();
- if (hadData && !hasData && audioTrack.getPlayState() != PLAYSTATE_STOPPED) {
- long elapsedSinceLastFeedMs = SystemClock.elapsedRealtime() - lastFeedElapsedRealtimeMs;
- listener.onUnderrun(bufferSize, C.usToMs(bufferSizeUs), elapsedSinceLastFeedMs);
+ @SuppressWarnings("ReferenceEquality")
+ public boolean handleBuffer(ByteBuffer buffer, long presentationTimeUs)
+ throws InitializationException, WriteException {
+ Assertions.checkArgument(inputBuffer == null || buffer == inputBuffer);
+ if (!isInitialized()) {
+ initialize();
+ if (playing) {
+ play();
+ }
}
- int result = writeBuffer(buffer, presentationTimeUs);
- lastFeedElapsedRealtimeMs = SystemClock.elapsedRealtime();
- return result;
- }
-
- private int writeBuffer(ByteBuffer buffer, long presentationTimeUs) throws WriteException {
- boolean isNewSourceBuffer = currentSourceBuffer == null;
- Assertions.checkState(isNewSourceBuffer || currentSourceBuffer == buffer);
- currentSourceBuffer = buffer;
if (needsPassthroughWorkarounds()) {
// An AC-3 audio track continues to play data written while it is paused. Stop writing so its
// buffer empties. See [Internal: b/18899620].
if (audioTrack.getPlayState() == PLAYSTATE_PAUSED) {
- return 0;
+ // We force an underrun to pause the track, so don't notify the listener in this case.
+ hasData = false;
+ return false;
}
// A new AC-3 audio track's playback position continues to increase from the old track's
@@ -615,32 +739,44 @@ public final class AudioTrack {
// head position actually returns to zero.
if (audioTrack.getPlayState() == PLAYSTATE_STOPPED
&& audioTrackUtil.getPlaybackHeadPosition() != 0) {
- return 0;
+ return false;
}
}
- int result = 0;
- if (isNewSourceBuffer) {
- // We're seeing this buffer for the first time.
+ boolean hadData = hasData;
+ hasData = hasPendingData();
+ if (hadData && !hasData && audioTrack.getPlayState() != PLAYSTATE_STOPPED) {
+ long elapsedSinceLastFeedMs = SystemClock.elapsedRealtime() - lastFeedElapsedRealtimeMs;
+ listener.onUnderrun(bufferSize, C.usToMs(bufferSizeUs), elapsedSinceLastFeedMs);
+ }
- if (!currentSourceBuffer.hasRemaining()) {
+ if (inputBuffer == null) {
+ // We are seeing this buffer for the first time.
+ if (!buffer.hasRemaining()) {
// The buffer is empty.
- currentSourceBuffer = null;
- return RESULT_BUFFER_CONSUMED;
- }
-
- useResampledBuffer = targetEncoding != sourceEncoding;
- if (useResampledBuffer) {
- Assertions.checkState(targetEncoding == C.ENCODING_PCM_16BIT);
- // Resample the buffer to get the data in the target encoding.
- resampledBuffer = resampleTo16BitPcm(currentSourceBuffer, sourceEncoding, resampledBuffer);
- buffer = resampledBuffer;
+ return true;
}
if (passthrough && framesPerEncodedSample == 0) {
// If this is the first encoded sample, calculate the sample size in frames.
- framesPerEncodedSample = getFramesPerEncodedSample(targetEncoding, buffer);
+ framesPerEncodedSample = getFramesPerEncodedSample(outputEncoding, buffer);
}
+
+ if (drainingPlaybackParameters != null) {
+ if (!drainAudioProcessorsToEndOfStream()) {
+ // Don't process any more input until draining completes.
+ return false;
+ }
+ // Store the position and corresponding media time from which the parameters will apply.
+ playbackParametersCheckpoints.add(new PlaybackParametersCheckpoint(
+ drainingPlaybackParameters, Math.max(0, presentationTimeUs),
+ framesToDurationUs(getWrittenFrames())));
+ drainingPlaybackParameters = null;
+ // The audio processors have drained, so flush them. This will cause any active speed
+ // adjustment audio processor to start producing audio with the new parameters.
+ resetAudioProcessors();
+ }
+
if (startMediaTimeState == START_NOT_SET) {
startMediaTimeUs = Math.max(0, presentationTimeUs);
startMediaTimeState = START_IN_SYNC;
@@ -659,66 +795,180 @@ public final class AudioTrack {
// number of bytes submitted.
startMediaTimeUs += (presentationTimeUs - expectedPresentationTimeUs);
startMediaTimeState = START_IN_SYNC;
- result |= RESULT_POSITION_DISCONTINUITY;
+ listener.onPositionDiscontinuity();
}
}
- if (Util.SDK_INT < 21) {
- // Copy {@code buffer} into {@code temporaryBuffer}.
- int bytesRemaining = buffer.remaining();
- if (temporaryBuffer == null || temporaryBuffer.length < bytesRemaining) {
- temporaryBuffer = new byte[bytesRemaining];
- }
- int originalPosition = buffer.position();
- buffer.get(temporaryBuffer, 0, bytesRemaining);
- buffer.position(originalPosition);
- temporaryBufferOffset = 0;
+
+ if (passthrough) {
+ submittedEncodedFrames += framesPerEncodedSample;
+ } else {
+ submittedPcmBytes += buffer.remaining();
}
+
+ inputBuffer = buffer;
}
- buffer = useResampledBuffer ? resampledBuffer : buffer;
+ if (passthrough) {
+ // Passthrough buffers are not processed.
+ writeBuffer(inputBuffer, presentationTimeUs);
+ } else {
+ processBuffers(presentationTimeUs);
+ }
+
+ if (!inputBuffer.hasRemaining()) {
+ inputBuffer = null;
+ return true;
+ }
+ return false;
+ }
+
+ private void processBuffers(long avSyncPresentationTimeUs) throws WriteException {
+ int count = audioProcessors.length;
+ int index = count;
+ while (index >= 0) {
+ ByteBuffer input = index > 0 ? outputBuffers[index - 1]
+ : (inputBuffer != null ? inputBuffer : AudioProcessor.EMPTY_BUFFER);
+ if (index == count) {
+ writeBuffer(input, avSyncPresentationTimeUs);
+ } else {
+ AudioProcessor audioProcessor = audioProcessors[index];
+ audioProcessor.queueInput(input);
+ ByteBuffer output = audioProcessor.getOutput();
+ outputBuffers[index] = output;
+ if (output.hasRemaining()) {
+ // Handle the output as input to the next audio processor or the AudioTrack.
+ index++;
+ continue;
+ }
+ }
+
+ if (input.hasRemaining()) {
+ // The input wasn't consumed and no output was produced, so give up for now.
+ return;
+ }
+
+ // Get more input from upstream.
+ index--;
+ }
+ }
+
+ @SuppressWarnings("ReferenceEquality")
+ private boolean writeBuffer(ByteBuffer buffer, long avSyncPresentationTimeUs)
+ throws WriteException {
+ if (!buffer.hasRemaining()) {
+ return true;
+ }
+ if (outputBuffer != null) {
+ Assertions.checkArgument(outputBuffer == buffer);
+ } else {
+ outputBuffer = buffer;
+ if (Util.SDK_INT < 21) {
+ int bytesRemaining = buffer.remaining();
+ if (preV21OutputBuffer == null || preV21OutputBuffer.length < bytesRemaining) {
+ preV21OutputBuffer = new byte[bytesRemaining];
+ }
+ int originalPosition = buffer.position();
+ buffer.get(preV21OutputBuffer, 0, bytesRemaining);
+ buffer.position(originalPosition);
+ preV21OutputBufferOffset = 0;
+ }
+ }
int bytesRemaining = buffer.remaining();
int bytesWritten = 0;
if (Util.SDK_INT < 21) { // passthrough == false
// Work out how many bytes we can write without the risk of blocking.
int bytesPending =
- (int) (submittedPcmBytes - (audioTrackUtil.getPlaybackHeadPosition() * pcmFrameSize));
+ (int) (writtenPcmBytes - (audioTrackUtil.getPlaybackHeadPosition() * outputPcmFrameSize));
int bytesToWrite = bufferSize - bytesPending;
if (bytesToWrite > 0) {
bytesToWrite = Math.min(bytesRemaining, bytesToWrite);
- bytesWritten = audioTrack.write(temporaryBuffer, temporaryBufferOffset, bytesToWrite);
- if (bytesWritten >= 0) {
- temporaryBufferOffset += bytesWritten;
+ bytesWritten = audioTrack.write(preV21OutputBuffer, preV21OutputBufferOffset, bytesToWrite);
+ if (bytesWritten > 0) {
+ preV21OutputBufferOffset += bytesWritten;
+ buffer.position(buffer.position() + bytesWritten);
}
- buffer.position(buffer.position() + bytesWritten);
}
+ } else if (tunneling) {
+ Assertions.checkState(avSyncPresentationTimeUs != C.TIME_UNSET);
+ bytesWritten = writeNonBlockingWithAvSyncV21(audioTrack, buffer, bytesRemaining,
+ avSyncPresentationTimeUs);
} else {
bytesWritten = writeNonBlockingV21(audioTrack, buffer, bytesRemaining);
}
+ lastFeedElapsedRealtimeMs = SystemClock.elapsedRealtime();
+
if (bytesWritten < 0) {
throw new WriteException(bytesWritten);
}
if (!passthrough) {
- submittedPcmBytes += bytesWritten;
+ writtenPcmBytes += bytesWritten;
}
if (bytesWritten == bytesRemaining) {
if (passthrough) {
- submittedEncodedFrames += framesPerEncodedSample;
+ writtenEncodedFrames += framesPerEncodedSample;
}
- currentSourceBuffer = null;
- result |= RESULT_BUFFER_CONSUMED;
+ outputBuffer = null;
+ return true;
}
- return result;
+ return false;
}
/**
- * Ensures that the last data passed to {@link #handleBuffer(ByteBuffer, long)} is played in full.
+ * Plays out remaining audio. {@link #isEnded()} will return {@code true} when playback has ended.
+ *
+ * @throws WriteException If an error occurs draining data to the track.
*/
- public void handleEndOfStream() {
- if (isInitialized()) {
- audioTrackUtil.handleEndOfStream(getSubmittedFrames());
+ public void playToEndOfStream() throws WriteException {
+ if (handledEndOfStream || !isInitialized()) {
+ return;
}
+
+ if (drainAudioProcessorsToEndOfStream()) {
+ // The audio processors have drained, so drain the underlying audio track.
+ audioTrackUtil.handleEndOfStream(getWrittenFrames());
+ bytesUntilNextAvSync = 0;
+ handledEndOfStream = true;
+ }
+ }
+
+ private boolean drainAudioProcessorsToEndOfStream() throws WriteException {
+ boolean audioProcessorNeedsEndOfStream = false;
+ if (drainingAudioProcessorIndex == C.INDEX_UNSET) {
+ drainingAudioProcessorIndex = passthrough ? audioProcessors.length : 0;
+ audioProcessorNeedsEndOfStream = true;
+ }
+ while (drainingAudioProcessorIndex < audioProcessors.length) {
+ AudioProcessor audioProcessor = audioProcessors[drainingAudioProcessorIndex];
+ if (audioProcessorNeedsEndOfStream) {
+ audioProcessor.queueEndOfStream();
+ }
+ processBuffers(C.TIME_UNSET);
+ if (!audioProcessor.isEnded()) {
+ return false;
+ }
+ audioProcessorNeedsEndOfStream = true;
+ drainingAudioProcessorIndex++;
+ }
+
+ // Finish writing any remaining output to the track.
+ if (outputBuffer != null) {
+ writeBuffer(outputBuffer, C.TIME_UNSET);
+ if (outputBuffer != null) {
+ return false;
+ }
+ }
+ drainingAudioProcessorIndex = C.INDEX_UNSET;
+ return true;
+ }
+
+ /**
+ * Returns whether all buffers passed to {@link #handleBuffer(ByteBuffer, long)} have been
+ * completely processed and played.
+ */
+ public boolean isEnded() {
+ return !isInitialized() || (handledEndOfStream && !hasPendingData());
}
/**
@@ -726,38 +976,115 @@ public final class AudioTrack {
*/
public boolean hasPendingData() {
return isInitialized()
- && (getSubmittedFrames() > audioTrackUtil.getPlaybackHeadPosition()
+ && (getWrittenFrames() > audioTrackUtil.getPlaybackHeadPosition()
|| overrideHasPendingData());
}
/**
- * Sets the playback parameters. Only available for {@link Util#SDK_INT} >= 23
+ * Attempts to set the playback parameters and returns the active playback parameters, which may
+ * differ from those passed in.
*
- * @param playbackParams The playback parameters to be used by the
- * {@link android.media.AudioTrack}.
- * @throws UnsupportedOperationException if the Playback Parameters are not supported. That is,
- * {@link Util#SDK_INT} < 23.
+ * @param playbackParameters The new playback parameters to attempt to set.
+ * @return The active playback parameters.
*/
- public void setPlaybackParams(PlaybackParams playbackParams) {
- audioTrackUtil.setPlaybackParams(playbackParams);
+ public PlaybackParameters setPlaybackParameters(PlaybackParameters playbackParameters) {
+ if (passthrough) {
+ // The playback parameters are always the default in passthrough mode.
+ this.playbackParameters = PlaybackParameters.DEFAULT;
+ return this.playbackParameters;
+ }
+ playbackParameters = new PlaybackParameters(
+ sonicAudioProcessor.setSpeed(playbackParameters.speed),
+ sonicAudioProcessor.setPitch(playbackParameters.pitch));
+ PlaybackParameters lastSetPlaybackParameters =
+ drainingPlaybackParameters != null ? drainingPlaybackParameters
+ : !playbackParametersCheckpoints.isEmpty()
+ ? playbackParametersCheckpoints.getLast().playbackParameters
+ : this.playbackParameters;
+ if (!playbackParameters.equals(lastSetPlaybackParameters)) {
+ if (isInitialized()) {
+ // Drain the audio processors so we can determine the frame position at which the new
+ // parameters apply.
+ drainingPlaybackParameters = playbackParameters;
+ } else {
+ this.playbackParameters = playbackParameters;
+ }
+ }
+ return this.playbackParameters;
}
/**
- * Sets the stream type for audio track. If the stream type has changed, {@link #isInitialized()}
- * will return {@code false} and the caller must re-{@link #initialize(int)} the audio track
- * before writing more data. The caller must not reuse the audio session identifier when
- * re-initializing with a new stream type.
+ * Gets the {@link PlaybackParameters}.
+ */
+ public PlaybackParameters getPlaybackParameters() {
+ return playbackParameters;
+ }
+
+ /**
+ * Sets the stream type for audio track. If the stream type has changed and if the audio track
+ * is not configured for use with tunneling, then the audio track is reset and the audio session
+ * id is cleared.
+ *
+ * If the audio track is configured for use with tunneling then the stream type is ignored, the
+ * audio track is not reset and the audio session id is not cleared. The passed stream type will
+ * be used if the audio track is later re-configured into non-tunneled mode.
*
* @param streamType The {@link C.StreamType} to use for audio output.
- * @return Whether the stream type changed.
*/
- public boolean setStreamType(@C.StreamType int streamType) {
+ public void setStreamType(@C.StreamType int streamType) {
if (this.streamType == streamType) {
- return false;
+ return;
}
this.streamType = streamType;
+ if (tunneling) {
+ // The stream type is ignored in tunneling mode, so no need to reset.
+ return;
+ }
reset();
- return true;
+ audioSessionId = C.AUDIO_SESSION_ID_UNSET;
+ }
+
+ /**
+ * Sets the audio session id. The audio track is reset if the audio session id has changed.
+ */
+ public void setAudioSessionId(int audioSessionId) {
+ if (this.audioSessionId != audioSessionId) {
+ this.audioSessionId = audioSessionId;
+ reset();
+ }
+ }
+
+ /**
+ * Enables tunneling. The audio track is reset if tunneling was previously disabled or if the
+ * audio session id has changed. Enabling tunneling requires platform API version 21 onwards.
+ *
+ * If this instance has {@link AudioProcessor}s and tunneling is enabled, care must be taken that
+ * audio processors do not output buffers with a different duration than their input, and buffer
+ * processors must produce output corresponding to their last input immediately after that input
+ * is queued.
+ *
+ * @param tunnelingAudioSessionId The audio session id to use.
+ * @throws IllegalStateException Thrown if enabling tunneling on platform API version < 21.
+ */
+ public void enableTunnelingV21(int tunnelingAudioSessionId) {
+ Assertions.checkState(Util.SDK_INT >= 21);
+ if (!tunneling || audioSessionId != tunnelingAudioSessionId) {
+ tunneling = true;
+ audioSessionId = tunnelingAudioSessionId;
+ reset();
+ }
+ }
+
+ /**
+ * Disables tunneling. If tunneling was previously enabled then the audio track is reset and the
+ * audio session id is cleared.
+ */
+ public void disableTunneling() {
+ if (tunneling) {
+ tunneling = false;
+ audioSessionId = C.AUDIO_SESSION_ID_UNSET;
+ reset();
+ }
}
/**
@@ -768,17 +1095,17 @@ public final class AudioTrack {
public void setVolume(float volume) {
if (this.volume != volume) {
this.volume = volume;
- setAudioTrackVolume();
+ setVolumeInternal();
}
}
- private void setAudioTrackVolume() {
+ private void setVolumeInternal() {
if (!isInitialized()) {
// Do nothing.
} else if (Util.SDK_INT >= 21) {
- setAudioTrackVolumeV21(audioTrack, volume);
+ setVolumeInternalV21(audioTrack, volume);
} else {
- setAudioTrackVolumeV3(audioTrack, volume);
+ setVolumeInternalV3(audioTrack, volume);
}
}
@@ -786,6 +1113,7 @@ public final class AudioTrack {
* Pauses playback.
*/
public void pause() {
+ playing = false;
if (isInitialized()) {
resetSyncParams();
audioTrackUtil.pause();
@@ -795,16 +1123,37 @@ public final class AudioTrack {
/**
* Releases the underlying audio track asynchronously.
*
- * Calling {@link #initialize(int)} will block until the audio track has been released, so it is
- * safe to initialize immediately after a reset. The audio session may remain active until
- * {@link #release()} is called.
+ * Calling {@link #handleBuffer(ByteBuffer, long)} will block until the audio track has been
+ * released, so it is safe to use the audio track immediately after a reset. The audio session may
+ * remain active until {@link #release()} is called.
*/
public void reset() {
if (isInitialized()) {
submittedPcmBytes = 0;
submittedEncodedFrames = 0;
+ writtenPcmBytes = 0;
+ writtenEncodedFrames = 0;
framesPerEncodedSample = 0;
- currentSourceBuffer = null;
+ if (drainingPlaybackParameters != null) {
+ playbackParameters = drainingPlaybackParameters;
+ drainingPlaybackParameters = null;
+ } else if (!playbackParametersCheckpoints.isEmpty()) {
+ playbackParameters = playbackParametersCheckpoints.getLast().playbackParameters;
+ }
+ playbackParametersCheckpoints.clear();
+ playbackParametersOffsetUs = 0;
+ playbackParametersPositionUs = 0;
+ inputBuffer = null;
+ outputBuffer = null;
+ for (int i = 0; i < audioProcessors.length; i++) {
+ AudioProcessor audioProcessor = audioProcessors[i];
+ audioProcessor.flush();
+ outputBuffers[i] = audioProcessor.getOutput();
+ }
+ handledEndOfStream = false;
+ drainingAudioProcessorIndex = C.INDEX_UNSET;
+ avSyncHeader = null;
+ bytesUntilNextAvSync = 0;
startMediaTimeState = START_NOT_SET;
latencyUs = 0;
resetSyncParams();
@@ -837,6 +1186,11 @@ public final class AudioTrack {
public void release() {
reset();
releaseKeepSessionIdAudioTrack();
+ for (AudioProcessor audioProcessor : availableAudioProcessors) {
+ audioProcessor.reset();
+ }
+ audioSessionId = C.AUDIO_SESSION_ID_UNSET;
+ playing = false;
}
/**
@@ -865,11 +1219,41 @@ public final class AudioTrack {
return isInitialized() && startMediaTimeState != START_NOT_SET;
}
+ /**
+ * Returns the underlying audio track {@code positionUs} with any applicable speedup applied.
+ */
+ private long applySpeedup(long positionUs) {
+ while (!playbackParametersCheckpoints.isEmpty()
+ && positionUs >= playbackParametersCheckpoints.getFirst().positionUs) {
+ // We are playing (or about to play) media with the new playback parameters, so update them.
+ PlaybackParametersCheckpoint checkpoint = playbackParametersCheckpoints.remove();
+ playbackParameters = checkpoint.playbackParameters;
+ playbackParametersPositionUs = checkpoint.positionUs;
+ playbackParametersOffsetUs = checkpoint.mediaTimeUs - startMediaTimeUs;
+ }
+
+ if (playbackParameters.speed == 1f) {
+ return positionUs + playbackParametersOffsetUs - playbackParametersPositionUs;
+ }
+
+ if (playbackParametersCheckpoints.isEmpty()
+ && sonicAudioProcessor.getOutputByteCount() >= SONIC_MIN_BYTES_FOR_SPEEDUP) {
+ return playbackParametersOffsetUs
+ + Util.scaleLargeTimestamp(positionUs - playbackParametersPositionUs,
+ sonicAudioProcessor.getInputByteCount(), sonicAudioProcessor.getOutputByteCount());
+ }
+
+ // We are playing drained data at a previous playback speed, or don't have enough bytes to
+ // calculate an accurate speedup, so fall back to multiplying by the speed.
+ return playbackParametersOffsetUs
+ + (long) ((double) playbackParameters.speed * (positionUs - playbackParametersPositionUs));
+ }
+
/**
* Updates the audio track latency and playback position parameters.
*/
private void maybeSampleSyncParams() {
- long playbackPositionUs = audioTrackUtil.getPlaybackHeadPositionUs();
+ long playbackPositionUs = audioTrackUtil.getPositionUs();
if (playbackPositionUs == 0) {
// The AudioTrack hasn't output anything yet.
return;
@@ -974,8 +1358,8 @@ public final class AudioTrack {
throw new InitializationException(state, sampleRate, channelConfig, bufferSize);
}
- private long pcmBytesToFrames(long byteCount) {
- return byteCount / pcmFrameSize;
+ private boolean isInitialized() {
+ return audioTrack != null;
}
private long framesToDurationUs(long frameCount) {
@@ -987,7 +1371,11 @@ public final class AudioTrack {
}
private long getSubmittedFrames() {
- return passthrough ? submittedEncodedFrames : pcmBytesToFrames(submittedPcmBytes);
+ return passthrough ? submittedEncodedFrames : (submittedPcmBytes / pcmFrameSize);
+ }
+
+ private long getWrittenFrames() {
+ return passthrough ? writtenEncodedFrames : (writtenPcmBytes / outputPcmFrameSize);
}
private void resetSyncParams() {
@@ -1005,7 +1393,7 @@ public final class AudioTrack {
*/
private boolean needsPassthroughWorkarounds() {
return Util.SDK_INT < 23
- && (targetEncoding == C.ENCODING_AC3 || targetEncoding == C.ENCODING_E_AC3);
+ && (outputEncoding == C.ENCODING_AC3 || outputEncoding == C.ENCODING_E_AC3);
}
/**
@@ -1021,79 +1409,23 @@ public final class AudioTrack {
}
/**
- * Converts the provided buffer into 16-bit PCM.
- *
- * @param buffer The buffer containing the data to convert.
- * @param sourceEncoding The data encoding.
- * @param out A buffer into which the output should be written, if its capacity is sufficient.
- * @return The 16-bit PCM output. Different to the out parameter if null was passed, or if the
- * capacity was insufficient for the output.
+ * Instantiates an {@link android.media.AudioTrack} to be used with tunneling video playback.
*/
- private static ByteBuffer resampleTo16BitPcm(ByteBuffer buffer, @C.PcmEncoding int sourceEncoding,
- ByteBuffer out) {
- int offset = buffer.position();
- int limit = buffer.limit();
- int size = limit - offset;
-
- int resampledSize;
- switch (sourceEncoding) {
- case C.ENCODING_PCM_8BIT:
- resampledSize = size * 2;
- break;
- case C.ENCODING_PCM_24BIT:
- resampledSize = (size / 3) * 2;
- break;
- case C.ENCODING_PCM_32BIT:
- resampledSize = size / 2;
- break;
- case C.ENCODING_PCM_16BIT:
- case C.ENCODING_INVALID:
- case Format.NO_VALUE:
- default:
- // Never happens.
- throw new IllegalStateException();
- }
-
- ByteBuffer resampledBuffer = out;
- if (resampledBuffer == null || resampledBuffer.capacity() < resampledSize) {
- resampledBuffer = ByteBuffer.allocateDirect(resampledSize);
- }
- resampledBuffer.position(0);
- resampledBuffer.limit(resampledSize);
-
- // Samples are little endian.
- switch (sourceEncoding) {
- case C.ENCODING_PCM_8BIT:
- // 8->16 bit resampling. Shift each byte from [0, 256) to [-128, 128) and scale up.
- for (int i = offset; i < limit; i++) {
- resampledBuffer.put((byte) 0);
- resampledBuffer.put((byte) ((buffer.get(i) & 0xFF) - 128));
- }
- break;
- case C.ENCODING_PCM_24BIT:
- // 24->16 bit resampling. Drop the least significant byte.
- for (int i = offset; i < limit; i += 3) {
- resampledBuffer.put(buffer.get(i + 1));
- resampledBuffer.put(buffer.get(i + 2));
- }
- break;
- case C.ENCODING_PCM_32BIT:
- // 32->16 bit resampling. Drop the two least significant bytes.
- for (int i = offset; i < limit; i += 4) {
- resampledBuffer.put(buffer.get(i + 2));
- resampledBuffer.put(buffer.get(i + 3));
- }
- break;
- case C.ENCODING_PCM_16BIT:
- case C.ENCODING_INVALID:
- case Format.NO_VALUE:
- default:
- // Never happens.
- throw new IllegalStateException();
- }
-
- resampledBuffer.position(0);
- return resampledBuffer;
+ @TargetApi(21)
+ private static android.media.AudioTrack createHwAvSyncAudioTrackV21(int sampleRate,
+ int channelConfig, int encoding, int bufferSize, int sessionId) {
+ AudioAttributes attributesBuilder = new AudioAttributes.Builder()
+ .setUsage(AudioAttributes.USAGE_MEDIA)
+ .setContentType(AudioAttributes.CONTENT_TYPE_MOVIE)
+ .setFlags(AudioAttributes.FLAG_HW_AV_SYNC)
+ .build();
+ AudioFormat format = new AudioFormat.Builder()
+ .setChannelMask(channelConfig)
+ .setEncoding(encoding)
+ .setSampleRate(sampleRate)
+ .build();
+ return new android.media.AudioTrack(attributesBuilder, format, bufferSize, MODE_STREAM,
+ sessionId);
}
@C.Encoding
@@ -1125,18 +1457,57 @@ public final class AudioTrack {
}
@TargetApi(21)
- private static int writeNonBlockingV21(
- android.media.AudioTrack audioTrack, ByteBuffer buffer, int size) {
+ private static int writeNonBlockingV21(android.media.AudioTrack audioTrack, ByteBuffer buffer,
+ int size) {
return audioTrack.write(buffer, size, WRITE_NON_BLOCKING);
}
@TargetApi(21)
- private static void setAudioTrackVolumeV21(android.media.AudioTrack audioTrack, float volume) {
+ private int writeNonBlockingWithAvSyncV21(android.media.AudioTrack audioTrack,
+ ByteBuffer buffer, int size, long presentationTimeUs) {
+ // TODO: Uncomment this when [Internal ref b/33627517] is clarified or fixed.
+ // if (Util.SDK_INT >= 23) {
+ // // The underlying platform AudioTrack writes AV sync headers directly.
+ // return audioTrack.write(buffer, size, WRITE_NON_BLOCKING, presentationTimeUs * 1000);
+ // }
+ if (avSyncHeader == null) {
+ avSyncHeader = ByteBuffer.allocate(16);
+ avSyncHeader.order(ByteOrder.BIG_ENDIAN);
+ avSyncHeader.putInt(0x55550001);
+ }
+ if (bytesUntilNextAvSync == 0) {
+ avSyncHeader.putInt(4, size);
+ avSyncHeader.putLong(8, presentationTimeUs * 1000);
+ avSyncHeader.position(0);
+ bytesUntilNextAvSync = size;
+ }
+ int avSyncHeaderBytesRemaining = avSyncHeader.remaining();
+ if (avSyncHeaderBytesRemaining > 0) {
+ int result = audioTrack.write(avSyncHeader, avSyncHeaderBytesRemaining, WRITE_NON_BLOCKING);
+ if (result < 0) {
+ bytesUntilNextAvSync = 0;
+ return result;
+ }
+ if (result < avSyncHeaderBytesRemaining) {
+ return 0;
+ }
+ }
+ int result = writeNonBlockingV21(audioTrack, buffer, size);
+ if (result < 0) {
+ bytesUntilNextAvSync = 0;
+ return result;
+ }
+ bytesUntilNextAvSync -= result;
+ return result;
+ }
+
+ @TargetApi(21)
+ private static void setVolumeInternalV21(android.media.AudioTrack audioTrack, float volume) {
audioTrack.setVolume(volume);
}
@SuppressWarnings("deprecation")
- private static void setAudioTrackVolumeV3(android.media.AudioTrack audioTrack, float volume) {
+ private static void setVolumeInternalV3(android.media.AudioTrack audioTrack, float volume) {
audioTrack.setStereoVolume(volume, volume);
}
@@ -1178,15 +1549,15 @@ public final class AudioTrack {
/**
* Stops the audio track in a way that ensures media written to it is played out in full, and
- * that {@link #getPlaybackHeadPosition()} and {@link #getPlaybackHeadPositionUs()} continue to
- * increment as the remaining media is played out.
+ * that {@link #getPlaybackHeadPosition()} and {@link #getPositionUs()} continue to increment as
+ * the remaining media is played out.
*
- * @param submittedFrames The total number of frames that have been submitted.
+ * @param writtenFrames The total number of frames that have been written.
*/
- public void handleEndOfStream(long submittedFrames) {
+ public void handleEndOfStream(long writtenFrames) {
stopPlaybackHeadPosition = getPlaybackHeadPosition();
stopTimestampUs = SystemClock.elapsedRealtime() * 1000;
- endPlaybackHeadPosition = submittedFrames;
+ endPlaybackHeadPosition = writtenFrames;
audioTrack.stop();
}
@@ -1208,8 +1579,7 @@ public final class AudioTrack {
* returns the playback head position as a long that will only wrap around if the value exceeds
* {@link Long#MAX_VALUE} (which in practice will never happen).
*
- * @return {@link android.media.AudioTrack#getPlaybackHeadPosition()} of {@link #audioTrack}
- * expressed as a long.
+ * @return The playback head position, in frames.
*/
public long getPlaybackHeadPosition() {
if (stopTimestampUs != C.TIME_UNSET) {
@@ -1244,9 +1614,9 @@ public final class AudioTrack {
}
/**
- * Returns {@link #getPlaybackHeadPosition()} expressed as microseconds.
+ * Returns the duration of played media since reconfiguration, in microseconds.
*/
- public long getPlaybackHeadPositionUs() {
+ public long getPositionUs() {
return (getPlaybackHeadPosition() * C.MICROS_PER_SECOND) / sampleRate;
}
@@ -1290,28 +1660,6 @@ public final class AudioTrack {
throw new UnsupportedOperationException();
}
- /**
- * Sets the Playback Parameters to be used by the underlying {@link android.media.AudioTrack}.
- *
- * @param playbackParams The playback parameters to be used by the
- * {@link android.media.AudioTrack}.
- * @throws UnsupportedOperationException If Playback Parameters are not supported
- * (i.e. {@link Util#SDK_INT} < 23).
- */
- public void setPlaybackParams(PlaybackParams playbackParams) {
- throw new UnsupportedOperationException();
- }
-
- /**
- * Returns the configured playback speed according to the used Playback Parameters. If these are
- * not supported, 1.0f(normal speed) is returned.
- *
- * @return The speed factor used by the underlying {@link android.media.AudioTrack}.
- */
- public float getPlaybackSpeed() {
- return 1.0f;
- }
-
}
@TargetApi(19)
@@ -1363,41 +1711,20 @@ public final class AudioTrack {
}
- @TargetApi(23)
- private static class AudioTrackUtilV23 extends AudioTrackUtilV19 {
+ /**
+ * Stores playback parameters with the position and media time at which they apply.
+ */
+ private static final class PlaybackParametersCheckpoint {
- private PlaybackParams playbackParams;
- private float playbackSpeed;
+ private final PlaybackParameters playbackParameters;
+ private final long mediaTimeUs;
+ private final long positionUs;
- public AudioTrackUtilV23() {
- playbackSpeed = 1.0f;
- }
-
- @Override
- public void reconfigure(android.media.AudioTrack audioTrack,
- boolean needsPassthroughWorkaround) {
- super.reconfigure(audioTrack, needsPassthroughWorkaround);
- maybeApplyPlaybackParams();
- }
-
- @Override
- public void setPlaybackParams(PlaybackParams playbackParams) {
- playbackParams = (playbackParams != null ? playbackParams : new PlaybackParams())
- .allowDefaults();
- this.playbackParams = playbackParams;
- this.playbackSpeed = playbackParams.getSpeed();
- maybeApplyPlaybackParams();
- }
-
- @Override
- public float getPlaybackSpeed() {
- return playbackSpeed;
- }
-
- private void maybeApplyPlaybackParams() {
- if (audioTrack != null && playbackParams != null) {
- audioTrack.setPlaybackParams(playbackParams);
- }
+ private PlaybackParametersCheckpoint(PlaybackParameters playbackParameters, long mediaTimeUs,
+ long positionUs) {
+ this.playbackParameters = playbackParameters;
+ this.mediaTimeUs = mediaTimeUs;
+ this.positionUs = positionUs;
}
}
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/audio/ChannelMappingAudioProcessor.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/audio/ChannelMappingAudioProcessor.java
new file mode 100755
index 000000000..ca4af2036
--- /dev/null
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/audio/ChannelMappingAudioProcessor.java
@@ -0,0 +1,162 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.telegram.messenger.exoplayer2.audio;
+
+import org.telegram.messenger.exoplayer2.C;
+import org.telegram.messenger.exoplayer2.C.Encoding;
+import org.telegram.messenger.exoplayer2.Format;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.util.Arrays;
+
+/**
+ * An {@link AudioProcessor} that applies a mapping from input channels onto specified output
+ * channels. This can be used to reorder, duplicate or discard channels.
+ */
+/* package */ final class ChannelMappingAudioProcessor implements AudioProcessor {
+
+ private int channelCount;
+ private int sampleRateHz;
+ private int[] pendingOutputChannels;
+
+ private boolean active;
+ private int[] outputChannels;
+ private ByteBuffer buffer;
+ private ByteBuffer outputBuffer;
+ private boolean inputEnded;
+
+ /**
+ * Creates a new processor that applies a channel mapping.
+ */
+ public ChannelMappingAudioProcessor() {
+ buffer = EMPTY_BUFFER;
+ outputBuffer = EMPTY_BUFFER;
+ channelCount = Format.NO_VALUE;
+ sampleRateHz = Format.NO_VALUE;
+ }
+
+ /**
+ * Resets the channel mapping. After calling this method, call {@link #configure(int, int, int)}
+ * to start using the new channel map.
+ *
+ * @see AudioTrack#configure(String, int, int, int, int, int[])
+ */
+ public void setChannelMap(int[] outputChannels) {
+ pendingOutputChannels = outputChannels;
+ }
+
+ @Override
+ public boolean configure(int sampleRateHz, int channelCount, @Encoding int encoding)
+ throws UnhandledFormatException {
+ boolean outputChannelsChanged = !Arrays.equals(pendingOutputChannels, outputChannels);
+ outputChannels = pendingOutputChannels;
+ if (outputChannels == null) {
+ active = false;
+ return outputChannelsChanged;
+ }
+ if (encoding != C.ENCODING_PCM_16BIT) {
+ throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
+ }
+ if (!outputChannelsChanged && this.sampleRateHz == sampleRateHz
+ && this.channelCount == channelCount) {
+ return false;
+ }
+ this.sampleRateHz = sampleRateHz;
+ this.channelCount = channelCount;
+
+ active = channelCount != outputChannels.length;
+ for (int i = 0; i < outputChannels.length; i++) {
+ int channelIndex = outputChannels[i];
+ if (channelIndex >= channelCount) {
+ throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
+ }
+ active |= (channelIndex != i);
+ }
+ return true;
+ }
+
+ @Override
+ public boolean isActive() {
+ return active;
+ }
+
+ @Override
+ public int getOutputChannelCount() {
+ return outputChannels == null ? channelCount : outputChannels.length;
+ }
+
+ @Override
+ public int getOutputEncoding() {
+ return C.ENCODING_PCM_16BIT;
+ }
+
+ @Override
+ public void queueInput(ByteBuffer inputBuffer) {
+ int position = inputBuffer.position();
+ int limit = inputBuffer.limit();
+ int frameCount = (limit - position) / (2 * channelCount);
+ int outputSize = frameCount * outputChannels.length * 2;
+ if (buffer.capacity() < outputSize) {
+ buffer = ByteBuffer.allocateDirect(outputSize).order(ByteOrder.nativeOrder());
+ } else {
+ buffer.clear();
+ }
+ while (position < limit) {
+ for (int channelIndex : outputChannels) {
+ buffer.putShort(inputBuffer.getShort(position + 2 * channelIndex));
+ }
+ position += channelCount * 2;
+ }
+ inputBuffer.position(limit);
+ buffer.flip();
+ outputBuffer = buffer;
+ }
+
+ @Override
+ public void queueEndOfStream() {
+ inputEnded = true;
+ }
+
+ @Override
+ public ByteBuffer getOutput() {
+ ByteBuffer outputBuffer = this.outputBuffer;
+ this.outputBuffer = EMPTY_BUFFER;
+ return outputBuffer;
+ }
+
+ @SuppressWarnings("ReferenceEquality")
+ @Override
+ public boolean isEnded() {
+ return inputEnded && outputBuffer == EMPTY_BUFFER;
+ }
+
+ @Override
+ public void flush() {
+ outputBuffer = EMPTY_BUFFER;
+ inputEnded = false;
+ }
+
+ @Override
+ public void reset() {
+ flush();
+ buffer = EMPTY_BUFFER;
+ channelCount = Format.NO_VALUE;
+ sampleRateHz = Format.NO_VALUE;
+ outputChannels = null;
+ active = false;
+ }
+
+}
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/audio/MediaCodecAudioRenderer.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/audio/MediaCodecAudioRenderer.java
index af04048fa..ffc84a918 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/audio/MediaCodecAudioRenderer.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/audio/MediaCodecAudioRenderer.java
@@ -19,12 +19,12 @@ import android.annotation.TargetApi;
import android.media.MediaCodec;
import android.media.MediaCrypto;
import android.media.MediaFormat;
-import android.media.PlaybackParams;
import android.media.audiofx.Virtualizer;
import android.os.Handler;
import org.telegram.messenger.exoplayer2.C;
import org.telegram.messenger.exoplayer2.ExoPlaybackException;
import org.telegram.messenger.exoplayer2.Format;
+import org.telegram.messenger.exoplayer2.PlaybackParameters;
import org.telegram.messenger.exoplayer2.audio.AudioRendererEventListener.EventDispatcher;
import org.telegram.messenger.exoplayer2.drm.DrmSessionManager;
import org.telegram.messenger.exoplayer2.drm.FrameworkMediaCrypto;
@@ -41,16 +41,16 @@ import java.nio.ByteBuffer;
* Decodes and renders audio using {@link MediaCodec} and {@link AudioTrack}.
*/
@TargetApi(16)
-public class MediaCodecAudioRenderer extends MediaCodecRenderer implements MediaClock,
- AudioTrack.Listener {
+public class MediaCodecAudioRenderer extends MediaCodecRenderer implements MediaClock {
private final EventDispatcher eventDispatcher;
private final AudioTrack audioTrack;
private boolean passthroughEnabled;
+ private boolean codecNeedsDiscardChannelsWorkaround;
private android.media.MediaFormat passthroughMediaFormat;
private int pcmEncoding;
- private int audioSessionId;
+ private int channelCount;
private long currentPositionUs;
private boolean allowPositionDiscontinuity;
@@ -123,14 +123,16 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
* @param eventListener A listener of events. May be null if delivery of events is not required.
* @param audioCapabilities The audio capabilities for playback on this device. May be null if the
* default capabilities (no encoded audio passthrough support) should be assumed.
+ * @param audioProcessors Optional {@link AudioProcessor}s that will process PCM audio before
+ * output.
*/
public MediaCodecAudioRenderer(MediaCodecSelector mediaCodecSelector,
DrmSessionManager drmSessionManager,
boolean playClearSamplesWithoutKeys, Handler eventHandler,
- AudioRendererEventListener eventListener, AudioCapabilities audioCapabilities) {
+ AudioRendererEventListener eventListener, AudioCapabilities audioCapabilities,
+ AudioProcessor... audioProcessors) {
super(C.TRACK_TYPE_AUDIO, mediaCodecSelector, drmSessionManager, playClearSamplesWithoutKeys);
- audioSessionId = AudioTrack.SESSION_ID_NOT_SET;
- audioTrack = new AudioTrack(audioCapabilities, this);
+ audioTrack = new AudioTrack(audioCapabilities, audioProcessors, new AudioTrackListener());
eventDispatcher = new EventDispatcher(eventHandler, eventListener);
}
@@ -141,8 +143,9 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
if (!MimeTypes.isAudio(mimeType)) {
return FORMAT_UNSUPPORTED_TYPE;
}
+ int tunnelingSupport = Util.SDK_INT >= 21 ? TUNNELING_SUPPORTED : TUNNELING_NOT_SUPPORTED;
if (allowPassthrough(mimeType) && mediaCodecSelector.getPassthroughDecoderInfo() != null) {
- return ADAPTIVE_NOT_SEAMLESS | FORMAT_HANDLED;
+ return ADAPTIVE_NOT_SEAMLESS | tunnelingSupport | FORMAT_HANDLED;
}
MediaCodecInfo decoderInfo = mediaCodecSelector.getDecoderInfo(mimeType, false);
if (decoderInfo == null) {
@@ -155,7 +158,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
&& (format.channelCount == Format.NO_VALUE
|| decoderInfo.isAudioChannelCountSupportedV21(format.channelCount)));
int formatSupport = decoderCapable ? FORMAT_HANDLED : FORMAT_EXCEEDS_CAPABILITIES;
- return ADAPTIVE_NOT_SEAMLESS | formatSupport;
+ return ADAPTIVE_NOT_SEAMLESS | tunnelingSupport | formatSupport;
}
@Override
@@ -185,7 +188,9 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
}
@Override
- protected void configureCodec(MediaCodec codec, Format format, MediaCrypto crypto) {
+ protected void configureCodec(MediaCodecInfo codecInfo, MediaCodec codec, Format format,
+ MediaCrypto crypto) {
+ codecNeedsDiscardChannelsWorkaround = codecNeedsDiscardChannelsWorkaround(codecInfo.name);
if (passthroughEnabled) {
// Override the MIME type used to configure the codec if we are using a passthrough decoder.
passthroughMediaFormat = format.getFrameworkMediaFormatV16();
@@ -217,39 +222,72 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
// output 16-bit PCM.
pcmEncoding = MimeTypes.AUDIO_RAW.equals(newFormat.sampleMimeType) ? newFormat.pcmEncoding
: C.ENCODING_PCM_16BIT;
+ channelCount = newFormat.channelCount;
}
@Override
- protected void onOutputFormatChanged(MediaCodec codec, MediaFormat outputFormat) {
+ protected void onOutputFormatChanged(MediaCodec codec, MediaFormat outputFormat)
+ throws ExoPlaybackException {
boolean passthrough = passthroughMediaFormat != null;
String mimeType = passthrough ? passthroughMediaFormat.getString(MediaFormat.KEY_MIME)
: MimeTypes.AUDIO_RAW;
MediaFormat format = passthrough ? passthroughMediaFormat : outputFormat;
int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
- audioTrack.configure(mimeType, channelCount, sampleRate, pcmEncoding, 0);
+ int[] channelMap;
+ if (codecNeedsDiscardChannelsWorkaround && channelCount == 6 && this.channelCount < 6) {
+ channelMap = new int[this.channelCount];
+ for (int i = 0; i < this.channelCount; i++) {
+ channelMap[i] = i;
+ }
+ } else {
+ channelMap = null;
+ }
+
+ try {
+ audioTrack.configure(mimeType, channelCount, sampleRate, pcmEncoding, 0, channelMap);
+ } catch (AudioTrack.ConfigurationException e) {
+ throw ExoPlaybackException.createForRenderer(e, getIndex());
+ }
}
/**
- * Called when the audio session id becomes known. Once the id is known it will not change (and
- * hence this method will not be called again) unless the renderer is disabled and then
- * subsequently re-enabled.
- *
- * The default implementation is a no-op. One reason for overriding this method would be to
- * instantiate and enable a {@link Virtualizer} in order to spatialize the audio channels. For
- * this use case, any {@link Virtualizer} instances should be released in {@link #onDisabled()}
- * (if not before).
+ * Called when the audio session id becomes known. The default implementation is a no-op. One
+ * reason for overriding this method would be to instantiate and enable a {@link Virtualizer} in
+ * order to spatialize the audio channels. For this use case, any {@link Virtualizer} instances
+ * should be released in {@link #onDisabled()} (if not before).
*
- * @param audioSessionId The audio session id.
+ * @see AudioTrack.Listener#onAudioSessionId(int)
*/
protected void onAudioSessionId(int audioSessionId) {
// Do nothing.
}
+ /**
+ * @see AudioTrack.Listener#onPositionDiscontinuity()
+ */
+ protected void onAudioTrackPositionDiscontinuity() {
+ // Do nothing.
+ }
+
+ /**
+ * @see AudioTrack.Listener#onUnderrun(int, long, long)
+ */
+ protected void onAudioTrackUnderrun(int bufferSize, long bufferSizeMs,
+ long elapsedSinceLastFeedMs) {
+ // Do nothing.
+ }
+
@Override
protected void onEnabled(boolean joining) throws ExoPlaybackException {
super.onEnabled(joining);
eventDispatcher.enabled(decoderCounters);
+ int tunnelingAudioSessionId = getConfiguration().tunnelingAudioSessionId;
+ if (tunnelingAudioSessionId != C.AUDIO_SESSION_ID_UNSET) {
+ audioTrack.enableTunnelingV21(tunnelingAudioSessionId);
+ } else {
+ audioTrack.disableTunneling();
+ }
}
@Override
@@ -274,7 +312,6 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
@Override
protected void onDisabled() {
- audioSessionId = AudioTrack.SESSION_ID_NOT_SET;
try {
audioTrack.release();
} finally {
@@ -289,7 +326,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
@Override
public boolean isEnded() {
- return super.isEnded() && !audioTrack.hasPendingData();
+ return super.isEnded() && audioTrack.isEnded();
}
@Override
@@ -308,6 +345,16 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
return currentPositionUs;
}
+ @Override
+ public PlaybackParameters setPlaybackParameters(PlaybackParameters playbackParameters) {
+ return audioTrack.setPlaybackParameters(playbackParameters);
+ }
+
+ @Override
+ public PlaybackParameters getPlaybackParameters() {
+ return audioTrack.getPlaybackParameters();
+ }
+
@Override
protected boolean processOutputBuffer(long positionUs, long elapsedRealtimeUs, MediaCodec codec,
ByteBuffer buffer, int bufferIndex, int bufferFlags, long bufferPresentationTimeUs,
@@ -325,54 +372,25 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
return true;
}
- if (!audioTrack.isInitialized()) {
- // Initialize the AudioTrack now.
- try {
- if (audioSessionId == AudioTrack.SESSION_ID_NOT_SET) {
- audioSessionId = audioTrack.initialize(AudioTrack.SESSION_ID_NOT_SET);
- eventDispatcher.audioSessionId(audioSessionId);
- onAudioSessionId(audioSessionId);
- } else {
- audioTrack.initialize(audioSessionId);
- }
- } catch (AudioTrack.InitializationException e) {
- throw ExoPlaybackException.createForRenderer(e, getIndex());
- }
- if (getState() == STATE_STARTED) {
- audioTrack.play();
- }
- }
-
- int handleBufferResult;
try {
- handleBufferResult = audioTrack.handleBuffer(buffer, bufferPresentationTimeUs);
- } catch (AudioTrack.WriteException e) {
+ if (audioTrack.handleBuffer(buffer, bufferPresentationTimeUs)) {
+ codec.releaseOutputBuffer(bufferIndex, false);
+ decoderCounters.renderedOutputBufferCount++;
+ return true;
+ }
+ } catch (AudioTrack.InitializationException | AudioTrack.WriteException e) {
throw ExoPlaybackException.createForRenderer(e, getIndex());
}
-
- // If we are out of sync, allow currentPositionUs to jump backwards.
- if ((handleBufferResult & AudioTrack.RESULT_POSITION_DISCONTINUITY) != 0) {
- handleAudioTrackDiscontinuity();
- allowPositionDiscontinuity = true;
- }
-
- // Release the buffer if it was consumed.
- if ((handleBufferResult & AudioTrack.RESULT_BUFFER_CONSUMED) != 0) {
- codec.releaseOutputBuffer(bufferIndex, false);
- decoderCounters.renderedOutputBufferCount++;
- return true;
- }
-
return false;
}
@Override
- protected void onOutputStreamEnded() {
- audioTrack.handleEndOfStream();
- }
-
- protected void handleAudioTrackDiscontinuity() {
- // Do nothing
+ protected void renderToEndOfStream() throws ExoPlaybackException {
+ try {
+ audioTrack.playToEndOfStream();
+ } catch (AudioTrack.WriteException e) {
+ throw ExoPlaybackException.createForRenderer(e, getIndex());
+ }
}
@Override
@@ -381,14 +399,9 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
case C.MSG_SET_VOLUME:
audioTrack.setVolume((Float) message);
break;
- case C.MSG_SET_PLAYBACK_PARAMS:
- audioTrack.setPlaybackParams((PlaybackParams) message);
- break;
case C.MSG_SET_STREAM_TYPE:
@C.StreamType int streamType = (Integer) message;
- if (audioTrack.setStreamType(streamType)) {
- audioSessionId = AudioTrack.SESSION_ID_NOT_SET;
- }
+ audioTrack.setStreamType(streamType);
break;
default:
super.handleMessage(messageType, message);
@@ -396,11 +409,41 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
}
}
- // AudioTrack.Listener implementation.
+ /**
+ * Returns whether the decoder is known to output six audio channels when provided with input with
+ * fewer than six channels.
+ *
+ * See [Internal: b/35655036].
+ */
+ private static boolean codecNeedsDiscardChannelsWorkaround(String codecName) {
+ // The workaround applies to Samsung Galaxy S6 and Samsung Galaxy S7.
+ return Util.SDK_INT < 24 && "OMX.SEC.aac.dec".equals(codecName)
+ && "samsung".equals(Util.MANUFACTURER)
+ && (Util.DEVICE.startsWith("zeroflte") || Util.DEVICE.startsWith("herolte")
+ || Util.DEVICE.startsWith("heroqlte"));
+ }
+
+ private final class AudioTrackListener implements AudioTrack.Listener {
+
+ @Override
+ public void onAudioSessionId(int audioSessionId) {
+ eventDispatcher.audioSessionId(audioSessionId);
+ MediaCodecAudioRenderer.this.onAudioSessionId(audioSessionId);
+ }
+
+ @Override
+ public void onPositionDiscontinuity() {
+ onAudioTrackPositionDiscontinuity();
+ // We are out of sync so allow currentPositionUs to jump backwards.
+ MediaCodecAudioRenderer.this.allowPositionDiscontinuity = true;
+ }
+
+ @Override
+ public void onUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) {
+ eventDispatcher.audioTrackUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs);
+ onAudioTrackUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs);
+ }
- @Override
- public void onUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) {
- eventDispatcher.audioTrackUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs);
}
}
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/audio/ResamplingAudioProcessor.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/audio/ResamplingAudioProcessor.java
new file mode 100755
index 000000000..afa3c217a
--- /dev/null
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/audio/ResamplingAudioProcessor.java
@@ -0,0 +1,179 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.telegram.messenger.exoplayer2.audio;
+
+import org.telegram.messenger.exoplayer2.C;
+import org.telegram.messenger.exoplayer2.Format;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+/**
+ * An {@link AudioProcessor} that converts audio data to {@link C#ENCODING_PCM_16BIT}.
+ */
+/* package */ final class ResamplingAudioProcessor implements AudioProcessor {
+
+ private int sampleRateHz;
+ private int channelCount;
+ @C.PcmEncoding
+ private int encoding;
+ private ByteBuffer buffer;
+ private ByteBuffer outputBuffer;
+ private boolean inputEnded;
+
+ /**
+ * Creates a new audio processor that converts audio data to {@link C#ENCODING_PCM_16BIT}.
+ */
+ public ResamplingAudioProcessor() {
+ sampleRateHz = Format.NO_VALUE;
+ channelCount = Format.NO_VALUE;
+ encoding = C.ENCODING_INVALID;
+ buffer = EMPTY_BUFFER;
+ outputBuffer = EMPTY_BUFFER;
+ }
+
+ @Override
+ public boolean configure(int sampleRateHz, int channelCount, @C.Encoding int encoding)
+ throws UnhandledFormatException {
+ if (encoding != C.ENCODING_PCM_8BIT && encoding != C.ENCODING_PCM_16BIT
+ && encoding != C.ENCODING_PCM_24BIT && encoding != C.ENCODING_PCM_32BIT) {
+ throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
+ }
+ if (this.sampleRateHz == sampleRateHz && this.channelCount == channelCount
+ && this.encoding == encoding) {
+ return false;
+ }
+ this.sampleRateHz = sampleRateHz;
+ this.channelCount = channelCount;
+ this.encoding = encoding;
+ if (encoding == C.ENCODING_PCM_16BIT) {
+ buffer = EMPTY_BUFFER;
+ }
+ return true;
+ }
+
+ @Override
+ public boolean isActive() {
+ return encoding != C.ENCODING_INVALID && encoding != C.ENCODING_PCM_16BIT;
+ }
+
+ @Override
+ public int getOutputChannelCount() {
+ return channelCount;
+ }
+
+ @Override
+ public int getOutputEncoding() {
+ return C.ENCODING_PCM_16BIT;
+ }
+
+ @Override
+ public void queueInput(ByteBuffer inputBuffer) {
+ // Prepare the output buffer.
+ int position = inputBuffer.position();
+ int limit = inputBuffer.limit();
+ int size = limit - position;
+ int resampledSize;
+ switch (encoding) {
+ case C.ENCODING_PCM_8BIT:
+ resampledSize = size * 2;
+ break;
+ case C.ENCODING_PCM_24BIT:
+ resampledSize = (size / 3) * 2;
+ break;
+ case C.ENCODING_PCM_32BIT:
+ resampledSize = size / 2;
+ break;
+ case C.ENCODING_PCM_16BIT:
+ case C.ENCODING_INVALID:
+ case Format.NO_VALUE:
+ default:
+ throw new IllegalStateException();
+ }
+ if (buffer.capacity() < resampledSize) {
+ buffer = ByteBuffer.allocateDirect(resampledSize).order(ByteOrder.nativeOrder());
+ } else {
+ buffer.clear();
+ }
+
+ // Resample the little endian input and update the input/output buffers.
+ switch (encoding) {
+ case C.ENCODING_PCM_8BIT:
+ // 8->16 bit resampling. Shift each byte from [0, 256) to [-128, 128) and scale up.
+ for (int i = position; i < limit; i++) {
+ buffer.put((byte) 0);
+ buffer.put((byte) ((inputBuffer.get(i) & 0xFF) - 128));
+ }
+ break;
+ case C.ENCODING_PCM_24BIT:
+ // 24->16 bit resampling. Drop the least significant byte.
+ for (int i = position; i < limit; i += 3) {
+ buffer.put(inputBuffer.get(i + 1));
+ buffer.put(inputBuffer.get(i + 2));
+ }
+ break;
+ case C.ENCODING_PCM_32BIT:
+ // 32->16 bit resampling. Drop the two least significant bytes.
+ for (int i = position; i < limit; i += 4) {
+ buffer.put(inputBuffer.get(i + 2));
+ buffer.put(inputBuffer.get(i + 3));
+ }
+ break;
+ case C.ENCODING_PCM_16BIT:
+ case C.ENCODING_INVALID:
+ case Format.NO_VALUE:
+ default:
+ // Never happens.
+ throw new IllegalStateException();
+ }
+ inputBuffer.position(inputBuffer.limit());
+ buffer.flip();
+ outputBuffer = buffer;
+ }
+
+ @Override
+ public void queueEndOfStream() {
+ inputEnded = true;
+ }
+
+ @Override
+ public ByteBuffer getOutput() {
+ ByteBuffer outputBuffer = this.outputBuffer;
+ this.outputBuffer = EMPTY_BUFFER;
+ return outputBuffer;
+ }
+
+ @SuppressWarnings("ReferenceEquality")
+ @Override
+ public boolean isEnded() {
+ return inputEnded && outputBuffer == EMPTY_BUFFER;
+ }
+
+ @Override
+ public void flush() {
+ outputBuffer = EMPTY_BUFFER;
+ inputEnded = false;
+ }
+
+ @Override
+ public void reset() {
+ flush();
+ buffer = EMPTY_BUFFER;
+ sampleRateHz = Format.NO_VALUE;
+ channelCount = Format.NO_VALUE;
+ encoding = C.ENCODING_INVALID;
+ }
+
+}
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/audio/SimpleDecoderAudioRenderer.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/audio/SimpleDecoderAudioRenderer.java
index e62afc2b8..e4fe25306 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/audio/SimpleDecoderAudioRenderer.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/audio/SimpleDecoderAudioRenderer.java
@@ -15,15 +15,17 @@
*/
package org.telegram.messenger.exoplayer2.audio;
-import android.media.PlaybackParams;
+import android.media.audiofx.Virtualizer;
import android.os.Handler;
import android.os.Looper;
import android.os.SystemClock;
+import android.support.annotation.IntDef;
import org.telegram.messenger.exoplayer2.BaseRenderer;
import org.telegram.messenger.exoplayer2.C;
import org.telegram.messenger.exoplayer2.ExoPlaybackException;
import org.telegram.messenger.exoplayer2.Format;
import org.telegram.messenger.exoplayer2.FormatHolder;
+import org.telegram.messenger.exoplayer2.PlaybackParameters;
import org.telegram.messenger.exoplayer2.audio.AudioRendererEventListener.EventDispatcher;
import org.telegram.messenger.exoplayer2.decoder.DecoderCounters;
import org.telegram.messenger.exoplayer2.decoder.DecoderInputBuffer;
@@ -32,23 +34,46 @@ import org.telegram.messenger.exoplayer2.decoder.SimpleOutputBuffer;
import org.telegram.messenger.exoplayer2.drm.DrmSession;
import org.telegram.messenger.exoplayer2.drm.DrmSessionManager;
import org.telegram.messenger.exoplayer2.drm.ExoMediaCrypto;
+import org.telegram.messenger.exoplayer2.util.Assertions;
import org.telegram.messenger.exoplayer2.util.MediaClock;
import org.telegram.messenger.exoplayer2.util.MimeTypes;
import org.telegram.messenger.exoplayer2.util.TraceUtil;
import org.telegram.messenger.exoplayer2.util.Util;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
/**
* Decodes and renders audio using a {@link SimpleDecoder}.
*/
-public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements MediaClock,
- AudioTrack.Listener {
+public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements MediaClock {
+ @Retention(RetentionPolicy.SOURCE)
+ @IntDef({REINITIALIZATION_STATE_NONE, REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM,
+ REINITIALIZATION_STATE_WAIT_END_OF_STREAM})
+ private @interface ReinitializationState {}
+ /**
+ * The decoder does not need to be re-initialized.
+ */
+ private static final int REINITIALIZATION_STATE_NONE = 0;
+ /**
+ * The input format has changed in a way that requires the decoder to be re-initialized, but we
+ * haven't yet signaled an end of stream to the existing decoder. We need to do so in order to
+ * ensure that it outputs any remaining buffers before we release it.
+ */
+ private static final int REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM = 1;
+ /**
+ * The input format has changed in a way that requires the decoder to be re-initialized, and we've
+ * signaled an end of stream to the existing decoder. We're waiting for the decoder to output an
+ * end of stream signal to indicate that it has output any remaining buffers before we release it.
+ */
+ private static final int REINITIALIZATION_STATE_WAIT_END_OF_STREAM = 2;
+
+ private final DrmSessionManager drmSessionManager;
private final boolean playClearSamplesWithoutKeys;
-
private final EventDispatcher eventDispatcher;
private final AudioTrack audioTrack;
- private final DrmSessionManager drmSessionManager;
private final FormatHolder formatHolder;
+ private final DecoderInputBuffer flagsOnlyBuffer;
private DecoderCounters decoderCounters;
private Format inputFormat;
@@ -59,14 +84,16 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
private DrmSession drmSession;
private DrmSession pendingDrmSession;
+ @ReinitializationState private int decoderReinitializationState;
+ private boolean decoderReceivedBuffers;
+ private boolean audioTrackNeedsConfigure;
+
private long currentPositionUs;
private boolean allowPositionDiscontinuity;
private boolean inputStreamEnded;
private boolean outputStreamEnded;
private boolean waitingForKeys;
- private int audioSessionId;
-
public SimpleDecoderAudioRenderer() {
this(null, null);
}
@@ -75,10 +102,11 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required.
+ * @param audioProcessors Optional {@link AudioProcessor}s that will process audio before output.
*/
public SimpleDecoderAudioRenderer(Handler eventHandler,
- AudioRendererEventListener eventListener) {
- this(eventHandler, eventListener, null);
+ AudioRendererEventListener eventListener, AudioProcessor... audioProcessors) {
+ this(eventHandler, eventListener, null, null, false, audioProcessors);
}
/**
@@ -106,17 +134,21 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
* begin in parallel with key acquisition. This parameter specifies whether the renderer is
* permitted to play clear regions of encrypted media files before {@code drmSessionManager}
* has obtained the keys necessary to decrypt encrypted regions of the media.
+ * @param audioProcessors Optional {@link AudioProcessor}s that will process audio before output.
*/
public SimpleDecoderAudioRenderer(Handler eventHandler,
AudioRendererEventListener eventListener, AudioCapabilities audioCapabilities,
- DrmSessionManager drmSessionManager, boolean playClearSamplesWithoutKeys) {
+ DrmSessionManager drmSessionManager, boolean playClearSamplesWithoutKeys,
+ AudioProcessor... audioProcessors) {
super(C.TRACK_TYPE_AUDIO);
- eventDispatcher = new EventDispatcher(eventHandler, eventListener);
- audioTrack = new AudioTrack(audioCapabilities, this);
this.drmSessionManager = drmSessionManager;
- formatHolder = new FormatHolder();
this.playClearSamplesWithoutKeys = playClearSamplesWithoutKeys;
- audioSessionId = AudioTrack.SESSION_ID_NOT_SET;
+ eventDispatcher = new EventDispatcher(eventHandler, eventListener);
+ audioTrack = new AudioTrack(audioCapabilities, audioProcessors, new AudioTrackListener());
+ formatHolder = new FormatHolder();
+ flagsOnlyBuffer = DecoderInputBuffer.newFlagsOnlyInstance();
+ decoderReinitializationState = REINITIALIZATION_STATE_NONE;
+ audioTrackNeedsConfigure = true;
}
@Override
@@ -124,59 +156,98 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
return this;
}
+ @Override
+ public final int supportsFormat(Format format) {
+ int formatSupport = supportsFormatInternal(format);
+ if (formatSupport == FORMAT_UNSUPPORTED_TYPE || formatSupport == FORMAT_UNSUPPORTED_SUBTYPE) {
+ return formatSupport;
+ }
+ int tunnelingSupport = Util.SDK_INT >= 21 ? TUNNELING_SUPPORTED : TUNNELING_NOT_SUPPORTED;
+ return ADAPTIVE_NOT_SEAMLESS | tunnelingSupport | formatSupport;
+ }
+
+ /**
+ * Returns the {@link #FORMAT_SUPPORT_MASK} component of the return value for
+ * {@link #supportsFormat(Format)}.
+ *
+ * @param format The format.
+ * @return The extent to which the renderer supports the format itself.
+ */
+ protected abstract int supportsFormatInternal(Format format);
+
@Override
public void render(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException {
if (outputStreamEnded) {
+ try {
+ audioTrack.playToEndOfStream();
+ } catch (AudioTrack.WriteException e) {
+ throw ExoPlaybackException.createForRenderer(e, getIndex());
+ }
return;
}
// Try and read a format if we don't have one already.
- if (inputFormat == null && !readFormat()) {
- // We can't make progress without one.
- return;
- }
-
- drmSession = pendingDrmSession;
- ExoMediaCrypto mediaCrypto = null;
- if (drmSession != null) {
- @DrmSession.State int drmSessionState = drmSession.getState();
- if (drmSessionState == DrmSession.STATE_ERROR) {
- throw ExoPlaybackException.createForRenderer(drmSession.getError(), getIndex());
- } else if (drmSessionState == DrmSession.STATE_OPENED
- || drmSessionState == DrmSession.STATE_OPENED_WITH_KEYS) {
- mediaCrypto = drmSession.getMediaCrypto();
+ if (inputFormat == null) {
+ // We don't have a format yet, so try and read one.
+ flagsOnlyBuffer.clear();
+ int result = readSource(formatHolder, flagsOnlyBuffer, true);
+ if (result == C.RESULT_FORMAT_READ) {
+ onInputFormatChanged(formatHolder.format);
+ } else if (result == C.RESULT_BUFFER_READ) {
+ // End of stream read having not read a format.
+ Assertions.checkState(flagsOnlyBuffer.isEndOfStream());
+ inputStreamEnded = true;
+ processEndOfStream();
+ return;
} else {
- // The drm session isn't open yet.
+ // We still don't have a format and can't make progress without one.
return;
}
}
+
// If we don't have a decoder yet, we need to instantiate one.
- if (decoder == null) {
+ maybeInitDecoder();
+
+ if (decoder != null) {
try {
- long codecInitializingTimestamp = SystemClock.elapsedRealtime();
- TraceUtil.beginSection("createAudioDecoder");
- decoder = createDecoder(inputFormat, mediaCrypto);
+ // Rendering loop.
+ TraceUtil.beginSection("drainAndFeed");
+ while (drainOutputBuffer()) {}
+ while (feedInputBuffer()) {}
TraceUtil.endSection();
- long codecInitializedTimestamp = SystemClock.elapsedRealtime();
- eventDispatcher.decoderInitialized(decoder.getName(), codecInitializedTimestamp,
- codecInitializedTimestamp - codecInitializingTimestamp);
- decoderCounters.decoderInitCount++;
- } catch (AudioDecoderException e) {
+ } catch (AudioDecoderException | AudioTrack.ConfigurationException
+ | AudioTrack.InitializationException | AudioTrack.WriteException e) {
throw ExoPlaybackException.createForRenderer(e, getIndex());
}
+ decoderCounters.ensureUpdated();
}
+ }
- // Rendering loop.
- try {
- TraceUtil.beginSection("drainAndFeed");
- while (drainOutputBuffer()) {}
- while (feedInputBuffer()) {}
- TraceUtil.endSection();
- } catch (AudioTrack.InitializationException | AudioTrack.WriteException
- | AudioDecoderException e) {
- throw ExoPlaybackException.createForRenderer(e, getIndex());
- }
- decoderCounters.ensureUpdated();
+ /**
+ * Called when the audio session id becomes known. The default implementation is a no-op. One
+ * reason for overriding this method would be to instantiate and enable a {@link Virtualizer} in
+ * order to spatialize the audio channels. For this use case, any {@link Virtualizer} instances
+ * should be released in {@link #onDisabled()} (if not before).
+ *
+ * @see AudioTrack.Listener#onAudioSessionId(int)
+ */
+ protected void onAudioSessionId(int audioSessionId) {
+ // Do nothing.
+ }
+
+ /**
+ * @see AudioTrack.Listener#onPositionDiscontinuity()
+ */
+ protected void onAudioTrackPositionDiscontinuity() {
+ // Do nothing.
+ }
+
+ /**
+ * @see AudioTrack.Listener#onUnderrun(int, long, long)
+ */
+ protected void onAudioTrackUnderrun(int bufferSize, long bufferSizeMs,
+ long elapsedSinceLastFeedMs) {
+ // Do nothing.
}
/**
@@ -205,12 +276,9 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
null, null, 0, null);
}
- private boolean drainOutputBuffer() throws AudioDecoderException,
- AudioTrack.InitializationException, AudioTrack.WriteException {
- if (outputStreamEnded) {
- return false;
- }
-
+ private boolean drainOutputBuffer() throws ExoPlaybackException, AudioDecoderException,
+ AudioTrack.ConfigurationException, AudioTrack.InitializationException,
+ AudioTrack.WriteException {
if (outputBuffer == null) {
outputBuffer = decoder.dequeueOutputBuffer();
if (outputBuffer == null) {
@@ -220,38 +288,28 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
}
if (outputBuffer.isEndOfStream()) {
- outputStreamEnded = true;
- audioTrack.handleEndOfStream();
- outputBuffer.release();
- outputBuffer = null;
+ if (decoderReinitializationState == REINITIALIZATION_STATE_WAIT_END_OF_STREAM) {
+ // We're waiting to re-initialize the decoder, and have now processed all final buffers.
+ releaseDecoder();
+ maybeInitDecoder();
+ // The audio track may need to be recreated once the new output format is known.
+ audioTrackNeedsConfigure = true;
+ } else {
+ outputBuffer.release();
+ outputBuffer = null;
+ processEndOfStream();
+ }
return false;
}
- if (!audioTrack.isInitialized()) {
+ if (audioTrackNeedsConfigure) {
Format outputFormat = getOutputFormat();
audioTrack.configure(outputFormat.sampleMimeType, outputFormat.channelCount,
outputFormat.sampleRate, outputFormat.pcmEncoding, 0);
- if (audioSessionId == AudioTrack.SESSION_ID_NOT_SET) {
- audioSessionId = audioTrack.initialize(AudioTrack.SESSION_ID_NOT_SET);
- eventDispatcher.audioSessionId(audioSessionId);
- onAudioSessionId(audioSessionId);
- } else {
- audioTrack.initialize(audioSessionId);
- }
- if (getState() == STATE_STARTED) {
- audioTrack.play();
- }
+ audioTrackNeedsConfigure = false;
}
- int handleBufferResult = audioTrack.handleBuffer(outputBuffer.data, outputBuffer.timeUs);
-
- // If we are out of sync, allow currentPositionUs to jump backwards.
- if ((handleBufferResult & AudioTrack.RESULT_POSITION_DISCONTINUITY) != 0) {
- allowPositionDiscontinuity = true;
- }
-
- // Release the buffer if it was consumed.
- if ((handleBufferResult & AudioTrack.RESULT_BUFFER_CONSUMED) != 0) {
+ if (audioTrack.handleBuffer(outputBuffer.data, outputBuffer.timeUs)) {
decoderCounters.renderedOutputBufferCount++;
outputBuffer.release();
outputBuffer = null;
@@ -262,7 +320,9 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
}
private boolean feedInputBuffer() throws AudioDecoderException, ExoPlaybackException {
- if (inputStreamEnded) {
+ if (decoder == null || decoderReinitializationState == REINITIALIZATION_STATE_WAIT_END_OF_STREAM
+ || inputStreamEnded) {
+ // We need to reinitialize the decoder or the input stream has ended.
return false;
}
@@ -273,12 +333,20 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
}
}
+ if (decoderReinitializationState == REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM) {
+ inputBuffer.setFlags(C.BUFFER_FLAG_END_OF_STREAM);
+ decoder.queueInputBuffer(inputBuffer);
+ inputBuffer = null;
+ decoderReinitializationState = REINITIALIZATION_STATE_WAIT_END_OF_STREAM;
+ return false;
+ }
+
int result;
if (waitingForKeys) {
// We've already read an encrypted sample into buffer, and are waiting for keys.
result = C.RESULT_BUFFER_READ;
} else {
- result = readSource(formatHolder, inputBuffer);
+ result = readSource(formatHolder, inputBuffer, false);
}
if (result == C.RESULT_NOTHING_READ) {
@@ -301,6 +369,7 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
}
inputBuffer.flip();
decoder.queueInputBuffer(inputBuffer);
+ decoderReceivedBuffers = true;
decoderCounters.inputBufferCount++;
inputBuffer = null;
return true;
@@ -318,19 +387,34 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
&& (bufferEncrypted || !playClearSamplesWithoutKeys);
}
- private void flushDecoder() {
- inputBuffer = null;
- waitingForKeys = false;
- if (outputBuffer != null) {
- outputBuffer.release();
- outputBuffer = null;
+ private void processEndOfStream() throws ExoPlaybackException {
+ outputStreamEnded = true;
+ try {
+ audioTrack.playToEndOfStream();
+ } catch (AudioTrack.WriteException e) {
+ throw ExoPlaybackException.createForRenderer(drmSession.getError(), getIndex());
+ }
+ }
+
+ private void flushDecoder() throws ExoPlaybackException {
+ waitingForKeys = false;
+ if (decoderReinitializationState != REINITIALIZATION_STATE_NONE) {
+ releaseDecoder();
+ maybeInitDecoder();
+ } else {
+ inputBuffer = null;
+ if (outputBuffer != null) {
+ outputBuffer.release();
+ outputBuffer = null;
+ }
+ decoder.flush();
+ decoderReceivedBuffers = false;
}
- decoder.flush();
}
@Override
public boolean isEnded() {
- return outputStreamEnded && !audioTrack.hasPendingData();
+ return outputStreamEnded && audioTrack.isEnded();
}
@Override
@@ -350,27 +434,30 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
return currentPositionUs;
}
- /**
- * Called when the audio session id becomes known. Once the id is known it will not change (and
- * hence this method will not be called again) unless the renderer is disabled and then
- * subsequently re-enabled.
- *
- * The default implementation is a no-op.
- *
- * @param audioSessionId The audio session id.
- */
- protected void onAudioSessionId(int audioSessionId) {
- // Do nothing.
+ @Override
+ public PlaybackParameters setPlaybackParameters(PlaybackParameters playbackParameters) {
+ return audioTrack.setPlaybackParameters(playbackParameters);
+ }
+
+ @Override
+ public PlaybackParameters getPlaybackParameters() {
+ return audioTrack.getPlaybackParameters();
}
@Override
protected void onEnabled(boolean joining) throws ExoPlaybackException {
decoderCounters = new DecoderCounters();
eventDispatcher.enabled(decoderCounters);
+ int tunnelingAudioSessionId = getConfiguration().tunnelingAudioSessionId;
+ if (tunnelingAudioSessionId != C.AUDIO_SESSION_ID_UNSET) {
+ audioTrack.enableTunnelingV21(tunnelingAudioSessionId);
+ } else {
+ audioTrack.disableTunneling();
+ }
}
@Override
- protected void onPositionReset(long positionUs, boolean joining) {
+ protected void onPositionReset(long positionUs, boolean joining) throws ExoPlaybackException {
audioTrack.reset();
currentPositionUs = positionUs;
allowPositionDiscontinuity = true;
@@ -393,17 +480,11 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
@Override
protected void onDisabled() {
- inputBuffer = null;
- outputBuffer = null;
inputFormat = null;
- audioSessionId = AudioTrack.SESSION_ID_NOT_SET;
+ audioTrackNeedsConfigure = true;
waitingForKeys = false;
try {
- if (decoder != null) {
- decoder.release();
- decoder = null;
- decoderCounters.decoderReleaseCount++;
- }
+ releaseDecoder();
audioTrack.release();
} finally {
try {
@@ -425,13 +506,52 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
}
}
- private boolean readFormat() throws ExoPlaybackException {
- int result = readSource(formatHolder, null);
- if (result == C.RESULT_FORMAT_READ) {
- onInputFormatChanged(formatHolder.format);
- return true;
+ private void maybeInitDecoder() throws ExoPlaybackException {
+ if (decoder != null) {
+ return;
}
- return false;
+
+ drmSession = pendingDrmSession;
+ ExoMediaCrypto mediaCrypto = null;
+ if (drmSession != null) {
+ @DrmSession.State int drmSessionState = drmSession.getState();
+ if (drmSessionState == DrmSession.STATE_ERROR) {
+ throw ExoPlaybackException.createForRenderer(drmSession.getError(), getIndex());
+ } else if (drmSessionState == DrmSession.STATE_OPENED
+ || drmSessionState == DrmSession.STATE_OPENED_WITH_KEYS) {
+ mediaCrypto = drmSession.getMediaCrypto();
+ } else {
+ // The drm session isn't open yet.
+ return;
+ }
+ }
+
+ try {
+ long codecInitializingTimestamp = SystemClock.elapsedRealtime();
+ TraceUtil.beginSection("createAudioDecoder");
+ decoder = createDecoder(inputFormat, mediaCrypto);
+ TraceUtil.endSection();
+ long codecInitializedTimestamp = SystemClock.elapsedRealtime();
+ eventDispatcher.decoderInitialized(decoder.getName(), codecInitializedTimestamp,
+ codecInitializedTimestamp - codecInitializingTimestamp);
+ decoderCounters.decoderInitCount++;
+ } catch (AudioDecoderException e) {
+ throw ExoPlaybackException.createForRenderer(e, getIndex());
+ }
+ }
+
+ private void releaseDecoder() {
+ if (decoder == null) {
+ return;
+ }
+
+ inputBuffer = null;
+ outputBuffer = null;
+ decoder.release();
+ decoder = null;
+ decoderCounters.decoderReleaseCount++;
+ decoderReinitializationState = REINITIALIZATION_STATE_NONE;
+ decoderReceivedBuffers = false;
}
private void onInputFormatChanged(Format newFormat) throws ExoPlaybackException {
@@ -456,6 +576,16 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
}
}
+ if (decoderReceivedBuffers) {
+ // Signal end of stream and wait for any final output buffers before re-initialization.
+ decoderReinitializationState = REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM;
+ } else {
+ // There aren't any final output buffers, so release the decoder immediately.
+ releaseDecoder();
+ maybeInitDecoder();
+ audioTrackNeedsConfigure = true;
+ }
+
eventDispatcher.inputFormatChanged(newFormat);
}
@@ -465,14 +595,9 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
case C.MSG_SET_VOLUME:
audioTrack.setVolume((Float) message);
break;
- case C.MSG_SET_PLAYBACK_PARAMS:
- audioTrack.setPlaybackParams((PlaybackParams) message);
- break;
case C.MSG_SET_STREAM_TYPE:
@C.StreamType int streamType = (Integer) message;
- if (audioTrack.setStreamType(streamType)) {
- audioSessionId = AudioTrack.SESSION_ID_NOT_SET;
- }
+ audioTrack.setStreamType(streamType);
break;
default:
super.handleMessage(messageType, message);
@@ -480,11 +605,27 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
}
}
- // AudioTrack.Listener implementation.
+ private final class AudioTrackListener implements AudioTrack.Listener {
+
+ @Override
+ public void onAudioSessionId(int audioSessionId) {
+ eventDispatcher.audioSessionId(audioSessionId);
+ SimpleDecoderAudioRenderer.this.onAudioSessionId(audioSessionId);
+ }
+
+ @Override
+ public void onPositionDiscontinuity() {
+ onAudioTrackPositionDiscontinuity();
+ // We are out of sync so allow currentPositionUs to jump backwards.
+ SimpleDecoderAudioRenderer.this.allowPositionDiscontinuity = true;
+ }
+
+ @Override
+ public void onUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) {
+ eventDispatcher.audioTrackUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs);
+ onAudioTrackUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs);
+ }
- @Override
- public void onUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) {
- eventDispatcher.audioTrackUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs);
}
}
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/audio/Sonic.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/audio/Sonic.java
new file mode 100755
index 000000000..8182d95bc
--- /dev/null
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/audio/Sonic.java
@@ -0,0 +1,534 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ * Copyright (C) 2010 Bill Cox, Sonic Library
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.telegram.messenger.exoplayer2.audio;
+
+import org.telegram.messenger.exoplayer2.util.Assertions;
+import java.nio.ShortBuffer;
+import java.util.Arrays;
+
+/**
+ * Sonic audio stream processor for time/pitch stretching.
+ *
+ * Based on https://github.com/waywardgeek/sonic.
+ */
+/* package */ final class Sonic {
+
+ private static final boolean USE_CHORD_PITCH = false;
+ private static final int MINIMUM_PITCH = 65;
+ private static final int MAXIMUM_PITCH = 400;
+ private static final int AMDF_FREQUENCY = 4000;
+
+ private final int sampleRate;
+ private final int numChannels;
+ private final int minPeriod;
+ private final int maxPeriod;
+ private final int maxRequired;
+ private final short[] downSampleBuffer;
+
+ private int inputBufferSize;
+ private short[] inputBuffer;
+ private int outputBufferSize;
+ private short[] outputBuffer;
+ private int pitchBufferSize;
+ private short[] pitchBuffer;
+ private int oldRatePosition;
+ private int newRatePosition;
+ private float speed;
+ private float pitch;
+ private int numInputSamples;
+ private int numOutputSamples;
+ private int numPitchSamples;
+ private int remainingInputToCopy;
+ private int prevPeriod;
+ private int prevMinDiff;
+ private int minDiff;
+ private int maxDiff;
+
+ /**
+ * Creates a new Sonic audio stream processor.
+ *
+ * @param sampleRate The sample rate of input audio.
+ * @param numChannels The number of channels in the input audio.
+ */
+ public Sonic(int sampleRate, int numChannels) {
+ this.sampleRate = sampleRate;
+ this.numChannels = numChannels;
+ minPeriod = sampleRate / MAXIMUM_PITCH;
+ maxPeriod = sampleRate / MINIMUM_PITCH;
+ maxRequired = 2 * maxPeriod;
+ downSampleBuffer = new short[maxRequired];
+ inputBufferSize = maxRequired;
+ inputBuffer = new short[maxRequired * numChannels];
+ outputBufferSize = maxRequired;
+ outputBuffer = new short[maxRequired * numChannels];
+ pitchBufferSize = maxRequired;
+ pitchBuffer = new short[maxRequired * numChannels];
+ oldRatePosition = 0;
+ newRatePosition = 0;
+ prevPeriod = 0;
+ speed = 1.0f;
+ pitch = 1.0f;
+ }
+
+ /**
+ * Sets the output speed.
+ */
+ public void setSpeed(float speed) {
+ this.speed = speed;
+ }
+
+ /**
+ * Gets the output speed.
+ */
+ public float getSpeed() {
+ return speed;
+ }
+
+ /**
+ * Sets the output pitch.
+ */
+ public void setPitch(float pitch) {
+ this.pitch = pitch;
+ }
+
+ /**
+ * Gets the output pitch.
+ */
+ public float getPitch() {
+ return pitch;
+ }
+
+ /**
+ * Queues remaining data from {@code buffer}, and advances its position by the number of bytes
+ * consumed.
+ *
+ * @param buffer A {@link ShortBuffer} containing input data between its position and limit.
+ */
+ public void queueInput(ShortBuffer buffer) {
+ int samplesToWrite = buffer.remaining() / numChannels;
+ int bytesToWrite = samplesToWrite * numChannels * 2;
+ enlargeInputBufferIfNeeded(samplesToWrite);
+ buffer.get(inputBuffer, numInputSamples * numChannels, bytesToWrite / 2);
+ numInputSamples += samplesToWrite;
+ processStreamInput();
+ }
+
+ /**
+ * Gets available output, outputting to the start of {@code buffer}. The buffer's position will be
+ * advanced by the number of bytes written.
+ *
+ * @param buffer A {@link ShortBuffer} into which output will be written.
+ */
+ public void getOutput(ShortBuffer buffer) {
+ int samplesToRead = Math.min(buffer.remaining() / numChannels, numOutputSamples);
+ buffer.put(outputBuffer, 0, samplesToRead * numChannels);
+ numOutputSamples -= samplesToRead;
+ System.arraycopy(outputBuffer, samplesToRead * numChannels, outputBuffer, 0,
+ numOutputSamples * numChannels);
+ }
+
+ /**
+ * Forces generating output using whatever data has been queued already. No extra delay will be
+ * added to the output, but flushing in the middle of words could introduce distortion.
+ */
+ public void queueEndOfStream() {
+ int remainingSamples = numInputSamples;
+ float s = speed / pitch;
+ int expectedOutputSamples =
+ numOutputSamples + (int) ((remainingSamples / s + numPitchSamples) / pitch + 0.5f);
+
+ // Add enough silence to flush both input and pitch buffers.
+ enlargeInputBufferIfNeeded(remainingSamples + 2 * maxRequired);
+ for (int xSample = 0; xSample < 2 * maxRequired * numChannels; xSample++) {
+ inputBuffer[remainingSamples * numChannels + xSample] = 0;
+ }
+ numInputSamples += 2 * maxRequired;
+ processStreamInput();
+ // Throw away any extra samples we generated due to the silence we added.
+ if (numOutputSamples > expectedOutputSamples) {
+ numOutputSamples = expectedOutputSamples;
+ }
+ // Empty input and pitch buffers.
+ numInputSamples = 0;
+ remainingInputToCopy = 0;
+ numPitchSamples = 0;
+ }
+
+ /**
+ * Returns the number of output samples that can be read with {@link #getOutput(ShortBuffer)}.
+ */
+ public int getSamplesAvailable() {
+ return numOutputSamples;
+ }
+
+ // Internal methods.
+
+ private void enlargeOutputBufferIfNeeded(int numSamples) {
+ if (numOutputSamples + numSamples > outputBufferSize) {
+ outputBufferSize += (outputBufferSize / 2) + numSamples;
+ outputBuffer = Arrays.copyOf(outputBuffer, outputBufferSize * numChannels);
+ }
+ }
+
+ private void enlargeInputBufferIfNeeded(int numSamples) {
+ if (numInputSamples + numSamples > inputBufferSize) {
+ inputBufferSize += (inputBufferSize / 2) + numSamples;
+ inputBuffer = Arrays.copyOf(inputBuffer, inputBufferSize * numChannels);
+ }
+ }
+
+ private void removeProcessedInputSamples(int position) {
+ int remainingSamples = numInputSamples - position;
+ System.arraycopy(inputBuffer, position * numChannels, inputBuffer, 0,
+ remainingSamples * numChannels);
+ numInputSamples = remainingSamples;
+ }
+
+ private void copyToOutput(short[] samples, int position, int numSamples) {
+ enlargeOutputBufferIfNeeded(numSamples);
+ System.arraycopy(samples, position * numChannels, outputBuffer, numOutputSamples * numChannels,
+ numSamples * numChannels);
+ numOutputSamples += numSamples;
+ }
+
+ private int copyInputToOutput(int position) {
+ int numSamples = Math.min(maxRequired, remainingInputToCopy);
+ copyToOutput(inputBuffer, position, numSamples);
+ remainingInputToCopy -= numSamples;
+ return numSamples;
+ }
+
+ private void downSampleInput(short[] samples, int position, int skip) {
+ // If skip is greater than one, average skip samples together and write them to the down-sample
+ // buffer. If numChannels is greater than one, mix the channels together as we down sample.
+ int numSamples = maxRequired / skip;
+ int samplesPerValue = numChannels * skip;
+ position *= numChannels;
+ for (int i = 0; i < numSamples; i++) {
+ int value = 0;
+ for (int j = 0; j < samplesPerValue; j++) {
+ value += samples[position + i * samplesPerValue + j];
+ }
+ value /= samplesPerValue;
+ downSampleBuffer[i] = (short) value;
+ }
+ }
+
+ private int findPitchPeriodInRange(short[] samples, int position, int minPeriod, int maxPeriod) {
+ // Find the best frequency match in the range, and given a sample skip multiple. For now, just
+ // find the pitch of the first channel.
+ int bestPeriod = 0;
+ int worstPeriod = 255;
+ int minDiff = 1;
+ int maxDiff = 0;
+ position *= numChannels;
+ for (int period = minPeriod; period <= maxPeriod; period++) {
+ int diff = 0;
+ for (int i = 0; i < period; i++) {
+ short sVal = samples[position + i];
+ short pVal = samples[position + period + i];
+ diff += sVal >= pVal ? sVal - pVal : pVal - sVal;
+ }
+ // Note that the highest number of samples we add into diff will be less than 256, since we
+ // skip samples. Thus, diff is a 24 bit number, and we can safely multiply by numSamples
+ // without overflow.
+ if (diff * bestPeriod < minDiff * period) {
+ minDiff = diff;
+ bestPeriod = period;
+ }
+ if (diff * worstPeriod > maxDiff * period) {
+ maxDiff = diff;
+ worstPeriod = period;
+ }
+ }
+ this.minDiff = minDiff / bestPeriod;
+ this.maxDiff = maxDiff / worstPeriod;
+ return bestPeriod;
+ }
+
+ /**
+ * Returns whether the previous pitch period estimate is a better approximation, which can occur
+ * at the abrupt end of voiced words.
+ */
+ private boolean previousPeriodBetter(int minDiff, int maxDiff, boolean preferNewPeriod) {
+ if (minDiff == 0 || prevPeriod == 0) {
+ return false;
+ }
+ if (preferNewPeriod) {
+ if (maxDiff > minDiff * 3) {
+ // Got a reasonable match this period
+ return false;
+ }
+ if (minDiff * 2 <= prevMinDiff * 3) {
+ // Mismatch is not that much greater this period
+ return false;
+ }
+ } else {
+ if (minDiff <= prevMinDiff) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ private int findPitchPeriod(short[] samples, int position, boolean preferNewPeriod) {
+ // Find the pitch period. This is a critical step, and we may have to try multiple ways to get a
+ // good answer. This version uses AMDF. To improve speed, we down sample by an integer factor
+ // get in the 11 kHz range, and then do it again with a narrower frequency range without down
+ // sampling.
+ int period;
+ int retPeriod;
+ int skip = sampleRate > AMDF_FREQUENCY ? sampleRate / AMDF_FREQUENCY : 1;
+ if (numChannels == 1 && skip == 1) {
+ period = findPitchPeriodInRange(samples, position, minPeriod, maxPeriod);
+ } else {
+ downSampleInput(samples, position, skip);
+ period = findPitchPeriodInRange(downSampleBuffer, 0, minPeriod / skip, maxPeriod / skip);
+ if (skip != 1) {
+ period *= skip;
+ int minP = period - (skip * 4);
+ int maxP = period + (skip * 4);
+ if (minP < minPeriod) {
+ minP = minPeriod;
+ }
+ if (maxP > maxPeriod) {
+ maxP = maxPeriod;
+ }
+ if (numChannels == 1) {
+ period = findPitchPeriodInRange(samples, position, minP, maxP);
+ } else {
+ downSampleInput(samples, position, 1);
+ period = findPitchPeriodInRange(downSampleBuffer, 0, minP, maxP);
+ }
+ }
+ }
+ if (previousPeriodBetter(minDiff, maxDiff, preferNewPeriod)) {
+ retPeriod = prevPeriod;
+ } else {
+ retPeriod = period;
+ }
+ prevMinDiff = minDiff;
+ prevPeriod = period;
+ return retPeriod;
+ }
+
+ private void moveNewSamplesToPitchBuffer(int originalNumOutputSamples) {
+ int numSamples = numOutputSamples - originalNumOutputSamples;
+ if (numPitchSamples + numSamples > pitchBufferSize) {
+ pitchBufferSize += (pitchBufferSize / 2) + numSamples;
+ pitchBuffer = Arrays.copyOf(pitchBuffer, pitchBufferSize * numChannels);
+ }
+ System.arraycopy(outputBuffer, originalNumOutputSamples * numChannels, pitchBuffer,
+ numPitchSamples * numChannels, numSamples * numChannels);
+ numOutputSamples = originalNumOutputSamples;
+ numPitchSamples += numSamples;
+ }
+
+ private void removePitchSamples(int numSamples) {
+ if (numSamples == 0) {
+ return;
+ }
+ System.arraycopy(pitchBuffer, numSamples * numChannels, pitchBuffer, 0,
+ (numPitchSamples - numSamples) * numChannels);
+ numPitchSamples -= numSamples;
+ }
+
+ private void adjustPitch(int originalNumOutputSamples) {
+ // Latency due to pitch changes could be reduced by looking at past samples to determine pitch,
+ // rather than future.
+ if (numOutputSamples == originalNumOutputSamples) {
+ return;
+ }
+ moveNewSamplesToPitchBuffer(originalNumOutputSamples);
+ int position = 0;
+ while (numPitchSamples - position >= maxRequired) {
+ int period = findPitchPeriod(pitchBuffer, position, false);
+ int newPeriod = (int) (period / pitch);
+ enlargeOutputBufferIfNeeded(newPeriod);
+ if (pitch >= 1.0f) {
+ overlapAdd(newPeriod, numChannels, outputBuffer, numOutputSamples, pitchBuffer, position,
+ pitchBuffer, position + period - newPeriod);
+ } else {
+ int separation = newPeriod - period;
+ overlapAddWithSeparation(period, numChannels, separation, outputBuffer, numOutputSamples,
+ pitchBuffer, position, pitchBuffer, position);
+ }
+ numOutputSamples += newPeriod;
+ position += period;
+ }
+ removePitchSamples(position);
+ }
+
+ private short interpolate(short[] in, int inPos, int oldSampleRate, int newSampleRate) {
+ short left = in[inPos * numChannels];
+ short right = in[inPos * numChannels + numChannels];
+ int position = newRatePosition * oldSampleRate;
+ int leftPosition = oldRatePosition * newSampleRate;
+ int rightPosition = (oldRatePosition + 1) * newSampleRate;
+ int ratio = rightPosition - position;
+ int width = rightPosition - leftPosition;
+ return (short) ((ratio * left + (width - ratio) * right) / width);
+ }
+
+ private void adjustRate(float rate, int originalNumOutputSamples) {
+ if (numOutputSamples == originalNumOutputSamples) {
+ return;
+ }
+ int newSampleRate = (int) (sampleRate / rate);
+ int oldSampleRate = sampleRate;
+ // Set these values to help with the integer math.
+ while (newSampleRate > (1 << 14) || oldSampleRate > (1 << 14)) {
+ newSampleRate /= 2;
+ oldSampleRate /= 2;
+ }
+ moveNewSamplesToPitchBuffer(originalNumOutputSamples);
+ // Leave at least one pitch sample in the buffer.
+ for (int position = 0; position < numPitchSamples - 1; position++) {
+ while ((oldRatePosition + 1) * newSampleRate > newRatePosition * oldSampleRate) {
+ enlargeOutputBufferIfNeeded(1);
+ for (int i = 0; i < numChannels; i++) {
+ outputBuffer[numOutputSamples * numChannels + i] =
+ interpolate(pitchBuffer, position + i, oldSampleRate, newSampleRate);
+ }
+ newRatePosition++;
+ numOutputSamples++;
+ }
+ oldRatePosition++;
+ if (oldRatePosition == oldSampleRate) {
+ oldRatePosition = 0;
+ Assertions.checkState(newRatePosition == newSampleRate);
+ newRatePosition = 0;
+ }
+ }
+ removePitchSamples(numPitchSamples - 1);
+ }
+
+ private int skipPitchPeriod(short[] samples, int position, float speed, int period) {
+ // Skip over a pitch period, and copy period/speed samples to the output.
+ int newSamples;
+ if (speed >= 2.0f) {
+ newSamples = (int) (period / (speed - 1.0f));
+ } else {
+ newSamples = period;
+ remainingInputToCopy = (int) (period * (2.0f - speed) / (speed - 1.0f));
+ }
+ enlargeOutputBufferIfNeeded(newSamples);
+ overlapAdd(newSamples, numChannels, outputBuffer, numOutputSamples, samples, position, samples,
+ position + period);
+ numOutputSamples += newSamples;
+ return newSamples;
+ }
+
+ private int insertPitchPeriod(short[] samples, int position, float speed, int period) {
+ // Insert a pitch period, and determine how much input to copy directly.
+ int newSamples;
+ if (speed < 0.5f) {
+ newSamples = (int) (period * speed / (1.0f - speed));
+ } else {
+ newSamples = period;
+ remainingInputToCopy = (int) (period * (2.0f * speed - 1.0f) / (1.0f - speed));
+ }
+ enlargeOutputBufferIfNeeded(period + newSamples);
+ System.arraycopy(samples, position * numChannels, outputBuffer, numOutputSamples * numChannels,
+ period * numChannels);
+ overlapAdd(newSamples, numChannels, outputBuffer, numOutputSamples + period, samples,
+ position + period, samples, position);
+ numOutputSamples += period + newSamples;
+ return newSamples;
+ }
+
+ private void changeSpeed(float speed) {
+ if (numInputSamples < maxRequired) {
+ return;
+ }
+ int numSamples = numInputSamples;
+ int position = 0;
+ do {
+ if (remainingInputToCopy > 0) {
+ position += copyInputToOutput(position);
+ } else {
+ int period = findPitchPeriod(inputBuffer, position, true);
+ if (speed > 1.0) {
+ position += period + skipPitchPeriod(inputBuffer, position, speed, period);
+ } else {
+ position += insertPitchPeriod(inputBuffer, position, speed, period);
+ }
+ }
+ } while (position + maxRequired <= numSamples);
+ removeProcessedInputSamples(position);
+ }
+
+ private void processStreamInput() {
+ // Resample as many pitch periods as we have buffered on the input.
+ int originalNumOutputSamples = numOutputSamples;
+ float s = speed / pitch;
+ if (s > 1.00001 || s < 0.99999) {
+ changeSpeed(s);
+ } else {
+ copyToOutput(inputBuffer, 0, numInputSamples);
+ numInputSamples = 0;
+ }
+ if (USE_CHORD_PITCH) {
+ if (pitch != 1.0f) {
+ adjustPitch(originalNumOutputSamples);
+ }
+ } else if (!USE_CHORD_PITCH && pitch != 1.0f) {
+ adjustRate(pitch, originalNumOutputSamples);
+ }
+ }
+
+ private static void overlapAdd(int numSamples, int numChannels, short[] out, int outPos,
+ short[] rampDown, int rampDownPos, short[] rampUp, int rampUpPos) {
+ for (int i = 0; i < numChannels; i++) {
+ int o = outPos * numChannels + i;
+ int u = rampUpPos * numChannels + i;
+ int d = rampDownPos * numChannels + i;
+ for (int t = 0; t < numSamples; t++) {
+ out[o] = (short) ((rampDown[d] * (numSamples - t) + rampUp[u] * t) / numSamples);
+ o += numChannels;
+ d += numChannels;
+ u += numChannels;
+ }
+ }
+ }
+
+ private static void overlapAddWithSeparation(int numSamples, int numChannels, int separation,
+ short[] out, int outPos, short[] rampDown, int rampDownPos, short[] rampUp, int rampUpPos) {
+ for (int i = 0; i < numChannels; i++) {
+ int o = outPos * numChannels + i;
+ int u = rampUpPos * numChannels + i;
+ int d = rampDownPos * numChannels + i;
+ for (int t = 0; t < numSamples + separation; t++) {
+ if (t < separation) {
+ out[o] = (short) (rampDown[d] * (numSamples - t) / numSamples);
+ d += numChannels;
+ } else if (t < numSamples) {
+ out[o] =
+ (short) ((rampDown[d] * (numSamples - t) + rampUp[u] * (t - separation))
+ / numSamples);
+ d += numChannels;
+ u += numChannels;
+ } else {
+ out[o] = (short) (rampUp[u] * (t - separation) / numSamples);
+ u += numChannels;
+ }
+ o += numChannels;
+ }
+ }
+ }
+
+}
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/audio/SonicAudioProcessor.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/audio/SonicAudioProcessor.java
new file mode 100755
index 000000000..42990b401
--- /dev/null
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/audio/SonicAudioProcessor.java
@@ -0,0 +1,212 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.telegram.messenger.exoplayer2.audio;
+
+import org.telegram.messenger.exoplayer2.C;
+import org.telegram.messenger.exoplayer2.C.Encoding;
+import org.telegram.messenger.exoplayer2.Format;
+import org.telegram.messenger.exoplayer2.util.Util;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.ShortBuffer;
+
+/**
+ * An {@link AudioProcessor} that uses the Sonic library to modify the speed/pitch of audio.
+ */
+public final class SonicAudioProcessor implements AudioProcessor {
+
+ /**
+ * The maximum allowed playback speed in {@link #setSpeed(float)}.
+ */
+ public static final float MAXIMUM_SPEED = 8.0f;
+ /**
+ * The minimum allowed playback speed in {@link #setSpeed(float)}.
+ */
+ public static final float MINIMUM_SPEED = 0.1f;
+ /**
+ * The maximum allowed pitch in {@link #setPitch(float)}.
+ */
+ public static final float MAXIMUM_PITCH = 8.0f;
+ /**
+ * The minimum allowed pitch in {@link #setPitch(float)}.
+ */
+ public static final float MINIMUM_PITCH = 0.1f;
+
+ /**
+ * The threshold below which the difference between two pitch/speed factors is negligible.
+ */
+ private static final float CLOSE_THRESHOLD = 0.01f;
+
+ private int channelCount;
+ private int sampleRateHz;
+
+ private Sonic sonic;
+ private float speed;
+ private float pitch;
+
+ private ByteBuffer buffer;
+ private ShortBuffer shortBuffer;
+ private ByteBuffer outputBuffer;
+ private long inputBytes;
+ private long outputBytes;
+ private boolean inputEnded;
+
+ /**
+ * Creates a new Sonic audio processor.
+ */
+ public SonicAudioProcessor() {
+ speed = 1f;
+ pitch = 1f;
+ channelCount = Format.NO_VALUE;
+ sampleRateHz = Format.NO_VALUE;
+ buffer = EMPTY_BUFFER;
+ shortBuffer = buffer.asShortBuffer();
+ outputBuffer = EMPTY_BUFFER;
+ }
+
+ /**
+ * Sets the playback speed. The new speed will take effect after a call to {@link #flush()}.
+ *
+ * @param speed The requested new playback speed.
+ * @return The actual new playback speed.
+ */
+ public float setSpeed(float speed) {
+ this.speed = Util.constrainValue(speed, MINIMUM_SPEED, MAXIMUM_SPEED);
+ return this.speed;
+ }
+
+ /**
+ * Sets the playback pitch. The new pitch will take effect after a call to {@link #flush()}.
+ *
+ * @param pitch The requested new pitch.
+ * @return The actual new pitch.
+ */
+ public float setPitch(float pitch) {
+ this.pitch = Util.constrainValue(pitch, MINIMUM_PITCH, MAXIMUM_PITCH);
+ return pitch;
+ }
+
+ /**
+ * Returns the number of bytes of input queued since the last call to {@link #flush()}.
+ */
+ public long getInputByteCount() {
+ return inputBytes;
+ }
+
+ /**
+ * Returns the number of bytes of output dequeued since the last call to {@link #flush()}.
+ */
+ public long getOutputByteCount() {
+ return outputBytes;
+ }
+
+ @Override
+ public boolean configure(int sampleRateHz, int channelCount, @Encoding int encoding)
+ throws UnhandledFormatException {
+ if (encoding != C.ENCODING_PCM_16BIT) {
+ throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
+ }
+ if (this.sampleRateHz == sampleRateHz && this.channelCount == channelCount) {
+ return false;
+ }
+ this.sampleRateHz = sampleRateHz;
+ this.channelCount = channelCount;
+ return true;
+ }
+
+ @Override
+ public boolean isActive() {
+ return Math.abs(speed - 1f) >= CLOSE_THRESHOLD || Math.abs(pitch - 1f) >= CLOSE_THRESHOLD;
+ }
+
+ @Override
+ public int getOutputChannelCount() {
+ return channelCount;
+ }
+
+ @Override
+ public int getOutputEncoding() {
+ return C.ENCODING_PCM_16BIT;
+ }
+
+ @Override
+ public void queueInput(ByteBuffer inputBuffer) {
+ if (inputBuffer.hasRemaining()) {
+ ShortBuffer shortBuffer = inputBuffer.asShortBuffer();
+ int inputSize = inputBuffer.remaining();
+ inputBytes += inputSize;
+ sonic.queueInput(shortBuffer);
+ inputBuffer.position(inputBuffer.position() + inputSize);
+ }
+ int outputSize = sonic.getSamplesAvailable() * channelCount * 2;
+ if (outputSize > 0) {
+ if (buffer.capacity() < outputSize) {
+ buffer = ByteBuffer.allocateDirect(outputSize).order(ByteOrder.nativeOrder());
+ shortBuffer = buffer.asShortBuffer();
+ } else {
+ buffer.clear();
+ shortBuffer.clear();
+ }
+ sonic.getOutput(shortBuffer);
+ outputBytes += outputSize;
+ buffer.limit(outputSize);
+ outputBuffer = buffer;
+ }
+ }
+
+ @Override
+ public void queueEndOfStream() {
+ sonic.queueEndOfStream();
+ inputEnded = true;
+ }
+
+ @Override
+ public ByteBuffer getOutput() {
+ ByteBuffer outputBuffer = this.outputBuffer;
+ this.outputBuffer = EMPTY_BUFFER;
+ return outputBuffer;
+ }
+
+ @Override
+ public boolean isEnded() {
+ return inputEnded && (sonic == null || sonic.getSamplesAvailable() == 0);
+ }
+
+ @Override
+ public void flush() {
+ sonic = new Sonic(sampleRateHz, channelCount);
+ sonic.setSpeed(speed);
+ sonic.setPitch(pitch);
+ outputBuffer = EMPTY_BUFFER;
+ inputBytes = 0;
+ outputBytes = 0;
+ inputEnded = false;
+ }
+
+ @Override
+ public void reset() {
+ sonic = null;
+ buffer = EMPTY_BUFFER;
+ shortBuffer = buffer.asShortBuffer();
+ outputBuffer = EMPTY_BUFFER;
+ channelCount = Format.NO_VALUE;
+ sampleRateHz = Format.NO_VALUE;
+ inputBytes = 0;
+ outputBytes = 0;
+ inputEnded = false;
+ }
+
+}
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/decoder/CryptoInfo.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/decoder/CryptoInfo.java
index a01adc9de..eeb357a3e 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/decoder/CryptoInfo.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/decoder/CryptoInfo.java
@@ -49,11 +49,21 @@ public final class CryptoInfo {
* @see android.media.MediaCodec.CryptoInfo#numSubSamples
*/
public int numSubSamples;
+ /**
+ * @see android.media.MediaCodec.CryptoInfo.Pattern
+ */
+ public int patternBlocksToEncrypt;
+ /**
+ * @see android.media.MediaCodec.CryptoInfo.Pattern
+ */
+ public int patternBlocksToSkip;
private final android.media.MediaCodec.CryptoInfo frameworkCryptoInfo;
+ private final PatternHolderV24 patternHolder;
public CryptoInfo() {
frameworkCryptoInfo = Util.SDK_INT >= 16 ? newFrameworkCryptoInfoV16() : null;
+ patternHolder = Util.SDK_INT >= 24 ? new PatternHolderV24(frameworkCryptoInfo) : null;
}
/**
@@ -67,11 +77,21 @@ public final class CryptoInfo {
this.key = key;
this.iv = iv;
this.mode = mode;
+ patternBlocksToEncrypt = 0;
+ patternBlocksToSkip = 0;
if (Util.SDK_INT >= 16) {
updateFrameworkCryptoInfoV16();
}
}
+ public void setPattern(int patternBlocksToEncrypt, int patternBlocksToSkip) {
+ this.patternBlocksToEncrypt = patternBlocksToEncrypt;
+ this.patternBlocksToSkip = patternBlocksToSkip;
+ if (Util.SDK_INT >= 24) {
+ patternHolder.set(patternBlocksToEncrypt, patternBlocksToSkip);
+ }
+ }
+
/**
* Returns an equivalent {@link android.media.MediaCodec.CryptoInfo} instance.
*
@@ -93,8 +113,35 @@ public final class CryptoInfo {
@TargetApi(16)
private void updateFrameworkCryptoInfoV16() {
- frameworkCryptoInfo.set(numSubSamples, numBytesOfClearData, numBytesOfEncryptedData, key, iv,
- mode);
+ // Update fields directly because the framework's CryptoInfo.set performs an unnecessary object
+ // allocation on Android N.
+ frameworkCryptoInfo.numSubSamples = numSubSamples;
+ frameworkCryptoInfo.numBytesOfClearData = numBytesOfClearData;
+ frameworkCryptoInfo.numBytesOfEncryptedData = numBytesOfEncryptedData;
+ frameworkCryptoInfo.key = key;
+ frameworkCryptoInfo.iv = iv;
+ frameworkCryptoInfo.mode = mode;
+ if (Util.SDK_INT >= 24) {
+ patternHolder.set(patternBlocksToEncrypt, patternBlocksToSkip);
+ }
+ }
+
+ @TargetApi(24)
+ private static final class PatternHolderV24 {
+
+ private final android.media.MediaCodec.CryptoInfo frameworkCryptoInfo;
+ private final android.media.MediaCodec.CryptoInfo.Pattern pattern;
+
+ private PatternHolderV24(android.media.MediaCodec.CryptoInfo frameworkCryptoInfo) {
+ this.frameworkCryptoInfo = frameworkCryptoInfo;
+ pattern = new android.media.MediaCodec.CryptoInfo.Pattern(0, 0);
+ }
+
+ private void set(int blocksToEncrypt, int blocksToSkip) {
+ pattern.set(blocksToEncrypt, blocksToSkip);
+ frameworkCryptoInfo.setPattern(pattern);
+ }
+
}
}
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/decoder/DecoderInputBuffer.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/decoder/DecoderInputBuffer.java
index 8855a6a07..e4b756b45 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/decoder/DecoderInputBuffer.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/decoder/DecoderInputBuffer.java
@@ -61,8 +61,16 @@ public class DecoderInputBuffer extends Buffer {
*/
public long timeUs;
- @BufferReplacementMode
- private final int bufferReplacementMode;
+ @BufferReplacementMode private final int bufferReplacementMode;
+
+ /**
+ * Creates a new instance for which {@link #isFlagsOnly()} will return true.
+ *
+ * @return A new flags only input buffer.
+ */
+ public static DecoderInputBuffer newFlagsOnlyInstance() {
+ return new DecoderInputBuffer(BUFFER_REPLACEMENT_MODE_DISABLED);
+ }
/**
* @param bufferReplacementMode Determines the behavior of {@link #ensureSpaceForWrite(int)}. One
@@ -110,6 +118,14 @@ public class DecoderInputBuffer extends Buffer {
data = newData;
}
+ /**
+ * Returns whether the buffer is only able to hold flags, meaning {@link #data} is null and
+ * its replacement mode is {@link #BUFFER_REPLACEMENT_MODE_DISABLED}.
+ */
+ public final boolean isFlagsOnly() {
+ return data == null && bufferReplacementMode == BUFFER_REPLACEMENT_MODE_DISABLED;
+ }
+
/**
* Returns whether the {@link C#BUFFER_FLAG_ENCRYPTED} flag is set.
*/
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/drm/StreamingDrmSessionManager.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/drm/DefaultDrmSessionManager.java
similarity index 61%
rename from TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/drm/StreamingDrmSessionManager.java
rename to TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/drm/DefaultDrmSessionManager.java
index 8f85b9ae7..5fad98182 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/drm/StreamingDrmSessionManager.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/drm/DefaultDrmSessionManager.java
@@ -24,7 +24,10 @@ import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
+import android.support.annotation.IntDef;
import android.text.TextUtils;
+import android.util.Log;
+import android.util.Pair;
import org.telegram.messenger.exoplayer2.C;
import org.telegram.messenger.exoplayer2.drm.DrmInitData.SchemeData;
import org.telegram.messenger.exoplayer2.drm.ExoMediaDrm.KeyRequest;
@@ -32,19 +35,23 @@ import org.telegram.messenger.exoplayer2.drm.ExoMediaDrm.OnEventListener;
import org.telegram.messenger.exoplayer2.drm.ExoMediaDrm.ProvisionRequest;
import org.telegram.messenger.exoplayer2.extractor.mp4.PsshAtomUtil;
import org.telegram.messenger.exoplayer2.util.Assertions;
+import org.telegram.messenger.exoplayer2.util.MimeTypes;
import org.telegram.messenger.exoplayer2.util.Util;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
import java.util.HashMap;
+import java.util.Map;
import java.util.UUID;
/**
- * A {@link DrmSessionManager} that supports streaming playbacks using {@link MediaDrm}.
+ * A {@link DrmSessionManager} that supports playbacks using {@link MediaDrm}.
*/
@TargetApi(18)
-public class StreamingDrmSessionManager implements DrmSessionManager,
+public class DefaultDrmSessionManager implements DrmSessionManager,
DrmSession {
/**
- * Listener of {@link StreamingDrmSessionManager} events.
+ * Listener of {@link DefaultDrmSessionManager} events.
*/
public interface EventListener {
@@ -60,6 +67,16 @@ public class StreamingDrmSessionManager implements Drm
*/
void onDrmSessionManagerError(Exception e);
+ /**
+ * Called each time offline keys are restored.
+ */
+ void onDrmKeysRestored();
+
+ /**
+ * Called each time offline keys are removed.
+ */
+ void onDrmKeysRemoved();
+
}
/**
@@ -67,9 +84,33 @@ public class StreamingDrmSessionManager implements Drm
*/
public static final String PLAYREADY_CUSTOM_DATA_KEY = "PRCustomData";
+ /** Determines the action to be done after a session acquired. */
+ @Retention(RetentionPolicy.SOURCE)
+ @IntDef({MODE_PLAYBACK, MODE_QUERY, MODE_DOWNLOAD, MODE_RELEASE})
+ public @interface Mode {}
+ /**
+ * Loads and refreshes (if necessary) a license for playback. Supports streaming and offline
+ * licenses.
+ */
+ public static final int MODE_PLAYBACK = 0;
+ /**
+ * Restores an offline license to allow its status to be queried. If the offline license is
+ * expired sets state to {@link #STATE_ERROR}.
+ */
+ public static final int MODE_QUERY = 1;
+ /** Downloads an offline license or renews an existing one. */
+ public static final int MODE_DOWNLOAD = 2;
+ /** Releases an existing offline license. */
+ public static final int MODE_RELEASE = 3;
+
+ private static final String TAG = "OfflineDrmSessionMngr";
+ private static final String CENC_SCHEME_MIME_TYPE = "cenc";
+
private static final int MSG_PROVISION = 0;
private static final int MSG_KEYS = 1;
+ private static final int MAX_LICENSE_DURATION_TO_RENEW = 60;
+
private final Handler eventHandler;
private final EventListener eventListener;
private final ExoMediaDrm mediaDrm;
@@ -85,14 +126,17 @@ public class StreamingDrmSessionManager implements Drm
private HandlerThread requestHandlerThread;
private Handler postRequestHandler;
+ private int mode;
private int openCount;
private boolean provisioningInProgress;
@DrmSession.State
private int state;
private T mediaCrypto;
- private Exception lastException;
- private SchemeData schemeData;
+ private DrmSessionException lastException;
+ private byte[] schemeInitData;
+ private String schemeMimeType;
private byte[] sessionId;
+ private byte[] offlineLicenseKeySetId;
/**
* Instantiates a new instance using the Widevine scheme.
@@ -105,7 +149,7 @@ public class StreamingDrmSessionManager implements Drm
* @param eventListener A listener of events. May be null if delivery of events is not required.
* @throws UnsupportedDrmException If the specified DRM scheme is not supported.
*/
- public static StreamingDrmSessionManager newWidevineInstance(
+ public static DefaultDrmSessionManager newWidevineInstance(
MediaDrmCallback callback, HashMap optionalKeyRequestParameters,
Handler eventHandler, EventListener eventListener) throws UnsupportedDrmException {
return newFrameworkInstance(C.WIDEVINE_UUID, callback, optionalKeyRequestParameters,
@@ -125,7 +169,7 @@ public class StreamingDrmSessionManager implements Drm
* @param eventListener A listener of events. May be null if delivery of events is not required.
* @throws UnsupportedDrmException If the specified DRM scheme is not supported.
*/
- public static StreamingDrmSessionManager newPlayReadyInstance(
+ public static DefaultDrmSessionManager newPlayReadyInstance(
MediaDrmCallback callback, String customData, Handler eventHandler,
EventListener eventListener) throws UnsupportedDrmException {
HashMap optionalKeyRequestParameters;
@@ -151,10 +195,10 @@ public class StreamingDrmSessionManager implements Drm
* @param eventListener A listener of events. May be null if delivery of events is not required.
* @throws UnsupportedDrmException If the specified DRM scheme is not supported.
*/
- public static StreamingDrmSessionManager newFrameworkInstance(
+ public static DefaultDrmSessionManager newFrameworkInstance(
UUID uuid, MediaDrmCallback callback, HashMap optionalKeyRequestParameters,
Handler eventHandler, EventListener eventListener) throws UnsupportedDrmException {
- return new StreamingDrmSessionManager<>(uuid, FrameworkMediaDrm.newInstance(uuid), callback,
+ return new DefaultDrmSessionManager<>(uuid, FrameworkMediaDrm.newInstance(uuid), callback,
optionalKeyRequestParameters, eventHandler, eventListener);
}
@@ -168,7 +212,7 @@ public class StreamingDrmSessionManager implements Drm
* null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required.
*/
- public StreamingDrmSessionManager(UUID uuid, ExoMediaDrm mediaDrm, MediaDrmCallback callback,
+ public DefaultDrmSessionManager(UUID uuid, ExoMediaDrm mediaDrm, MediaDrmCallback callback,
HashMap optionalKeyRequestParameters, Handler eventHandler,
EventListener eventListener) {
this.uuid = uuid;
@@ -179,6 +223,7 @@ public class StreamingDrmSessionManager implements Drm
this.eventListener = eventListener;
mediaDrm.setOnEventListener(new MediaDrmEventListener());
state = STATE_CLOSED;
+ mode = MODE_PLAYBACK;
}
/**
@@ -229,6 +274,37 @@ public class StreamingDrmSessionManager implements Drm
mediaDrm.setPropertyByteArray(key, value);
}
+ /**
+ * Sets the mode, which determines the role of sessions acquired from the instance. This must be
+ * called before {@link #acquireSession(Looper, DrmInitData)} is called.
+ *
+ * By default, the mode is {@link #MODE_PLAYBACK} and a streaming license is requested when
+ * required.
+ *
+ *
{@code mode} must be one of these:
+ *
+ * - {@link #MODE_PLAYBACK}: If {@code offlineLicenseKeySetId} is null, a streaming license is
+ * requested otherwise the offline license is restored.
+ *
- {@link #MODE_QUERY}: {@code offlineLicenseKeySetId} can not be null. The offline license
+ * is restored.
+ *
- {@link #MODE_DOWNLOAD}: If {@code offlineLicenseKeySetId} is null, an offline license is
+ * requested otherwise the offline license is renewed.
+ *
- {@link #MODE_RELEASE}: {@code offlineLicenseKeySetId} can not be null. The offline license
+ * is released.
+ *
+ *
+ * @param mode The mode to be set.
+ * @param offlineLicenseKeySetId The key set id of the license to be used with the given mode.
+ */
+ public void setMode(@Mode int mode, byte[] offlineLicenseKeySetId) {
+ Assertions.checkState(openCount == 0);
+ if (mode == MODE_QUERY || mode == MODE_RELEASE) {
+ Assertions.checkNotNull(offlineLicenseKeySetId);
+ }
+ this.mode = mode;
+ this.offlineLicenseKeySetId = offlineLicenseKeySetId;
+ }
+
// DrmSessionManager implementation.
@Override
@@ -248,18 +324,28 @@ public class StreamingDrmSessionManager implements Drm
requestHandlerThread.start();
postRequestHandler = new PostRequestHandler(requestHandlerThread.getLooper());
- schemeData = drmInitData.get(uuid);
- if (schemeData == null) {
- onError(new IllegalStateException("Media does not support uuid: " + uuid));
- return this;
- }
- if (Util.SDK_INT < 21) {
- // Prior to L the Widevine CDM required data to be extracted from the PSSH atom.
- byte[] psshData = PsshAtomUtil.parseSchemeSpecificData(schemeData.data, C.WIDEVINE_UUID);
- if (psshData == null) {
- // Extraction failed. schemeData isn't a Widevine PSSH atom, so leave it unchanged.
- } else {
- schemeData = new SchemeData(C.WIDEVINE_UUID, schemeData.mimeType, psshData);
+ if (offlineLicenseKeySetId == null) {
+ SchemeData schemeData = drmInitData.get(uuid);
+ if (schemeData == null) {
+ onError(new IllegalStateException("Media does not support uuid: " + uuid));
+ return this;
+ }
+ schemeInitData = schemeData.data;
+ schemeMimeType = schemeData.mimeType;
+ if (Util.SDK_INT < 21) {
+ // Prior to L the Widevine CDM required data to be extracted from the PSSH atom.
+ byte[] psshData = PsshAtomUtil.parseSchemeSpecificData(schemeInitData, C.WIDEVINE_UUID);
+ if (psshData == null) {
+ // Extraction failed. schemeData isn't a Widevine PSSH atom, so leave it unchanged.
+ } else {
+ schemeInitData = psshData;
+ }
+ }
+ if (Util.SDK_INT < 26 && C.CLEARKEY_UUID.equals(uuid)
+ && (MimeTypes.VIDEO_MP4.equals(schemeMimeType)
+ || MimeTypes.AUDIO_MP4.equals(schemeMimeType))) {
+ // Prior to API level 26 the ClearKey CDM only accepted "cenc" as the scheme for MP4.
+ schemeMimeType = CENC_SCHEME_MIME_TYPE;
}
}
state = STATE_OPENING;
@@ -280,7 +366,8 @@ public class StreamingDrmSessionManager implements Drm
postRequestHandler = null;
requestHandlerThread.quit();
requestHandlerThread = null;
- schemeData = null;
+ schemeInitData = null;
+ schemeMimeType = null;
mediaCrypto = null;
lastException = null;
if (sessionId != null) {
@@ -314,10 +401,25 @@ public class StreamingDrmSessionManager implements Drm
}
@Override
- public final Exception getError() {
+ public final DrmSessionException getError() {
return state == STATE_ERROR ? lastException : null;
}
+ @Override
+ public Map queryKeyStatus() {
+ // User may call this method rightfully even if state == STATE_ERROR. So only check if there is
+ // a sessionId
+ if (sessionId == null) {
+ throw new IllegalStateException();
+ }
+ return mediaDrm.queryKeyStatus(sessionId);
+ }
+
+ @Override
+ public byte[] getOfflineLicenseKeySetId() {
+ return offlineLicenseKeySetId;
+ }
+
// Internal methods.
private void openInternal(boolean allowProvisioning) {
@@ -325,7 +427,7 @@ public class StreamingDrmSessionManager implements Drm
sessionId = mediaDrm.openSession();
mediaCrypto = mediaDrm.createMediaCrypto(uuid, sessionId);
state = STATE_OPENED;
- postKeyRequest();
+ doLicense();
} catch (NotProvisionedException e) {
if (allowProvisioning) {
postProvisionRequest();
@@ -363,20 +465,86 @@ public class StreamingDrmSessionManager implements Drm
if (state == STATE_OPENING) {
openInternal(false);
} else {
- postKeyRequest();
+ doLicense();
}
} catch (DeniedByServerException e) {
onError(e);
}
}
- private void postKeyRequest() {
- KeyRequest keyRequest;
+ private void doLicense() {
+ switch (mode) {
+ case MODE_PLAYBACK:
+ case MODE_QUERY:
+ if (offlineLicenseKeySetId == null) {
+ postKeyRequest(sessionId, MediaDrm.KEY_TYPE_STREAMING);
+ } else {
+ if (restoreKeys()) {
+ long licenseDurationRemainingSec = getLicenseDurationRemainingSec();
+ if (mode == MODE_PLAYBACK
+ && licenseDurationRemainingSec <= MAX_LICENSE_DURATION_TO_RENEW) {
+ Log.d(TAG, "Offline license has expired or will expire soon. "
+ + "Remaining seconds: " + licenseDurationRemainingSec);
+ postKeyRequest(sessionId, MediaDrm.KEY_TYPE_OFFLINE);
+ } else if (licenseDurationRemainingSec <= 0) {
+ onError(new KeysExpiredException());
+ } else {
+ state = STATE_OPENED_WITH_KEYS;
+ if (eventHandler != null && eventListener != null) {
+ eventHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ eventListener.onDrmKeysRestored();
+ }
+ });
+ }
+ }
+ }
+ }
+ break;
+ case MODE_DOWNLOAD:
+ if (offlineLicenseKeySetId == null) {
+ postKeyRequest(sessionId, MediaDrm.KEY_TYPE_OFFLINE);
+ } else {
+ // Renew
+ if (restoreKeys()) {
+ postKeyRequest(sessionId, MediaDrm.KEY_TYPE_OFFLINE);
+ }
+ }
+ break;
+ case MODE_RELEASE:
+ if (restoreKeys()) {
+ postKeyRequest(offlineLicenseKeySetId, MediaDrm.KEY_TYPE_RELEASE);
+ }
+ break;
+ }
+ }
+
+ private boolean restoreKeys() {
try {
- keyRequest = mediaDrm.getKeyRequest(sessionId, schemeData.data, schemeData.mimeType,
- MediaDrm.KEY_TYPE_STREAMING, optionalKeyRequestParameters);
+ mediaDrm.restoreKeys(sessionId, offlineLicenseKeySetId);
+ return true;
+ } catch (Exception e) {
+ Log.e(TAG, "Error trying to restore Widevine keys.", e);
+ onError(e);
+ }
+ return false;
+ }
+
+ private long getLicenseDurationRemainingSec() {
+ if (!C.WIDEVINE_UUID.equals(uuid)) {
+ return Long.MAX_VALUE;
+ }
+ Pair pair = WidevineUtil.getLicenseDurationRemainingSec(this);
+ return Math.min(pair.first, pair.second);
+ }
+
+ private void postKeyRequest(byte[] scope, int keyType) {
+ try {
+ KeyRequest keyRequest = mediaDrm.getKeyRequest(scope, schemeInitData, schemeMimeType, keyType,
+ optionalKeyRequestParameters);
postRequestHandler.obtainMessage(MSG_KEYS, keyRequest).sendToTarget();
- } catch (NotProvisionedException e) {
+ } catch (Exception e) {
onKeysError(e);
}
}
@@ -393,15 +561,31 @@ public class StreamingDrmSessionManager implements Drm
}
try {
- mediaDrm.provideKeyResponse(sessionId, (byte[]) response);
- state = STATE_OPENED_WITH_KEYS;
- if (eventHandler != null && eventListener != null) {
- eventHandler.post(new Runnable() {
- @Override
- public void run() {
- eventListener.onDrmKeysLoaded();
- }
- });
+ if (mode == MODE_RELEASE) {
+ mediaDrm.provideKeyResponse(offlineLicenseKeySetId, (byte[]) response);
+ if (eventHandler != null && eventListener != null) {
+ eventHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ eventListener.onDrmKeysRemoved();
+ }
+ });
+ }
+ } else {
+ byte[] keySetId = mediaDrm.provideKeyResponse(sessionId, (byte[]) response);
+ if ((mode == MODE_DOWNLOAD || (mode == MODE_PLAYBACK && offlineLicenseKeySetId != null))
+ && keySetId != null && keySetId.length != 0) {
+ offlineLicenseKeySetId = keySetId;
+ }
+ state = STATE_OPENED_WITH_KEYS;
+ if (eventHandler != null && eventListener != null) {
+ eventHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ eventListener.onDrmKeysLoaded();
+ }
+ });
+ }
}
} catch (Exception e) {
onKeysError(e);
@@ -417,7 +601,7 @@ public class StreamingDrmSessionManager implements Drm
}
private void onError(final Exception e) {
- lastException = e;
+ lastException = new DrmSessionException(e);
if (eventHandler != null && eventListener != null) {
eventHandler.post(new Runnable() {
@Override
@@ -446,11 +630,16 @@ public class StreamingDrmSessionManager implements Drm
}
switch (msg.what) {
case MediaDrm.EVENT_KEY_REQUIRED:
- postKeyRequest();
+ doLicense();
break;
case MediaDrm.EVENT_KEY_EXPIRED:
- state = STATE_OPENED;
- onError(new KeysExpiredException());
+ // When an already expired key is loaded MediaDrm sends this event immediately. Ignore
+ // this event if the state isn't STATE_OPENED_WITH_KEYS yet which means we're still
+ // waiting for key response.
+ if (state == STATE_OPENED_WITH_KEYS) {
+ state = STATE_OPENED;
+ onError(new KeysExpiredException());
+ }
break;
case MediaDrm.EVENT_PROVISION_REQUIRED:
state = STATE_OPENED;
@@ -466,7 +655,9 @@ public class StreamingDrmSessionManager implements Drm
@Override
public void onEvent(ExoMediaDrm extends T> md, byte[] sessionId, int event, int extra,
byte[] data) {
- mediaDrmHandler.sendEmptyMessage(event);
+ if (mode == MODE_PLAYBACK) {
+ mediaDrmHandler.sendEmptyMessage(event);
+ }
}
}
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/drm/DrmSession.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/drm/DrmSession.java
index bd833e001..1064ef553 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/drm/DrmSession.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/drm/DrmSession.java
@@ -16,9 +16,11 @@
package org.telegram.messenger.exoplayer2.drm;
import android.annotation.TargetApi;
+import android.media.MediaDrm;
import android.support.annotation.IntDef;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
+import java.util.Map;
/**
* A DRM session.
@@ -26,6 +28,15 @@ import java.lang.annotation.RetentionPolicy;
@TargetApi(16)
public interface DrmSession {
+ /** Wraps the exception which is the cause of the error state. */
+ class DrmSessionException extends Exception {
+
+ public DrmSessionException(Exception e) {
+ super(e);
+ }
+
+ }
+
/**
* The state of the DRM session.
*/
@@ -59,8 +70,7 @@ public interface DrmSession {
* @return One of {@link #STATE_ERROR}, {@link #STATE_CLOSED}, {@link #STATE_OPENING},
* {@link #STATE_OPENED} and {@link #STATE_OPENED_WITH_KEYS}.
*/
- @State
- int getState();
+ @State int getState();
/**
* Returns a {@link ExoMediaCrypto} for the open session.
@@ -96,6 +106,26 @@ public interface DrmSession {
*
* @return An exception if the state is {@link #STATE_ERROR}. Null otherwise.
*/
- Exception getError();
+ DrmSessionException getError();
+
+ /**
+ * Returns an informative description of the key status for the session. The status is in the form
+ * of {name, value} pairs.
+ *
+ * Since DRM license policies vary by vendor, the specific status field names are determined by
+ * each DRM vendor. Refer to your DRM provider documentation for definitions of the field names
+ * for a particular DRM engine plugin.
+ *
+ * @return A map of key status.
+ * @throws IllegalStateException If called when the session isn't opened.
+ * @see MediaDrm#queryKeyStatus(byte[])
+ */
+ Map queryKeyStatus();
+
+ /**
+ * Returns the key set id of the offline license loaded into this session, if there is one. Null
+ * otherwise.
+ */
+ byte[] getOfflineLicenseKeySetId();
}
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/drm/FrameworkMediaDrm.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/drm/FrameworkMediaDrm.java
index f3b4deb4f..82827819f 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/drm/FrameworkMediaDrm.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/drm/FrameworkMediaDrm.java
@@ -23,6 +23,7 @@ import android.media.MediaDrm;
import android.media.NotProvisionedException;
import android.media.ResourceBusyException;
import android.media.UnsupportedSchemeException;
+import android.support.annotation.NonNull;
import org.telegram.messenger.exoplayer2.util.Assertions;
import java.util.HashMap;
import java.util.Map;
@@ -62,7 +63,8 @@ public final class FrameworkMediaDrm implements ExoMediaDrm listener) {
mediaDrm.setOnEventListener(listener == null ? null : new MediaDrm.OnEventListener() {
@Override
- public void onEvent(MediaDrm md, byte[] sessionId, int event, int extra, byte[] data) {
+ public void onEvent(@NonNull MediaDrm md, byte[] sessionId, int event, int extra,
+ byte[] data) {
listener.onEvent(FrameworkMediaDrm.this, sessionId, event, extra, data);
}
});
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/drm/HttpMediaDrmCallback.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/drm/HttpMediaDrmCallback.java
index be3bdd691..7b6051ae9 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/drm/HttpMediaDrmCallback.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/drm/HttpMediaDrmCallback.java
@@ -24,6 +24,8 @@ import org.telegram.messenger.exoplayer2.drm.ExoMediaDrm.ProvisionRequest;
import org.telegram.messenger.exoplayer2.upstream.DataSourceInputStream;
import org.telegram.messenger.exoplayer2.upstream.DataSpec;
import org.telegram.messenger.exoplayer2.upstream.HttpDataSource;
+import org.telegram.messenger.exoplayer2.upstream.HttpDataSource.Factory;
+import org.telegram.messenger.exoplayer2.util.Assertions;
import org.telegram.messenger.exoplayer2.util.Util;
import java.io.IOException;
import java.util.HashMap;
@@ -57,21 +59,62 @@ public final class HttpMediaDrmCallback implements MediaDrmCallback {
}
/**
+ * @deprecated Use {@link HttpMediaDrmCallback#HttpMediaDrmCallback(String, Factory)}. Request
+ * properties can be set by calling {@link #setKeyRequestProperty(String, String)}.
* @param defaultUrl The default license URL.
* @param dataSourceFactory A factory from which to obtain {@link HttpDataSource} instances.
* @param keyRequestProperties Request properties to set when making key requests, or null.
*/
+ @Deprecated
public HttpMediaDrmCallback(String defaultUrl, HttpDataSource.Factory dataSourceFactory,
Map keyRequestProperties) {
this.dataSourceFactory = dataSourceFactory;
this.defaultUrl = defaultUrl;
- this.keyRequestProperties = keyRequestProperties;
+ this.keyRequestProperties = new HashMap<>();
+ if (keyRequestProperties != null) {
+ this.keyRequestProperties.putAll(keyRequestProperties);
+ }
+ }
+
+ /**
+ * Sets a header for key requests made by the callback.
+ *
+ * @param name The name of the header field.
+ * @param value The value of the field.
+ */
+ public void setKeyRequestProperty(String name, String value) {
+ Assertions.checkNotNull(name);
+ Assertions.checkNotNull(value);
+ synchronized (keyRequestProperties) {
+ keyRequestProperties.put(name, value);
+ }
+ }
+
+ /**
+ * Clears a header for key requests made by the callback.
+ *
+ * @param name The name of the header field.
+ */
+ public void clearKeyRequestProperty(String name) {
+ Assertions.checkNotNull(name);
+ synchronized (keyRequestProperties) {
+ keyRequestProperties.remove(name);
+ }
+ }
+
+ /**
+ * Clears all headers for key requests made by the callback.
+ */
+ public void clearAllKeyRequestProperties() {
+ synchronized (keyRequestProperties) {
+ keyRequestProperties.clear();
+ }
}
@Override
public byte[] executeProvisionRequest(UUID uuid, ProvisionRequest request) throws IOException {
String url = request.getDefaultUrl() + "&signedRequest=" + new String(request.getData());
- return executePost(url, new byte[0], null);
+ return executePost(dataSourceFactory, url, new byte[0], null);
}
@Override
@@ -85,14 +128,14 @@ public final class HttpMediaDrmCallback implements MediaDrmCallback {
if (C.PLAYREADY_UUID.equals(uuid)) {
requestProperties.putAll(PLAYREADY_KEY_REQUEST_PROPERTIES);
}
- if (keyRequestProperties != null) {
+ synchronized (keyRequestProperties) {
requestProperties.putAll(keyRequestProperties);
}
- return executePost(url, request.getData(), requestProperties);
+ return executePost(dataSourceFactory, url, request.getData(), requestProperties);
}
- private byte[] executePost(String url, byte[] data, Map requestProperties)
- throws IOException {
+ private static byte[] executePost(HttpDataSource.Factory dataSourceFactory, String url,
+ byte[] data, Map requestProperties) throws IOException {
HttpDataSource dataSource = dataSourceFactory.createDataSource();
if (requestProperties != null) {
for (Map.Entry requestProperty : requestProperties.entrySet()) {
@@ -105,7 +148,7 @@ public final class HttpMediaDrmCallback implements MediaDrmCallback {
try {
return Util.toByteArray(inputStream);
} finally {
- inputStream.close();
+ Util.closeQuietly(inputStream);
}
}
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/drm/OfflineLicenseHelper.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/drm/OfflineLicenseHelper.java
new file mode 100755
index 000000000..1ed3d7c72
--- /dev/null
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/drm/OfflineLicenseHelper.java
@@ -0,0 +1,213 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.telegram.messenger.exoplayer2.drm;
+
+import android.media.MediaDrm;
+import android.os.ConditionVariable;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.util.Pair;
+import org.telegram.messenger.exoplayer2.C;
+import org.telegram.messenger.exoplayer2.drm.DefaultDrmSessionManager.EventListener;
+import org.telegram.messenger.exoplayer2.drm.DefaultDrmSessionManager.Mode;
+import org.telegram.messenger.exoplayer2.drm.DrmSession.DrmSessionException;
+import org.telegram.messenger.exoplayer2.upstream.HttpDataSource;
+import org.telegram.messenger.exoplayer2.upstream.HttpDataSource.Factory;
+import org.telegram.messenger.exoplayer2.util.Assertions;
+import java.io.IOException;
+import java.util.HashMap;
+
+/**
+ * Helper class to download, renew and release offline licenses.
+ */
+public final class OfflineLicenseHelper {
+
+ private final ConditionVariable conditionVariable;
+ private final DefaultDrmSessionManager drmSessionManager;
+ private final HandlerThread handlerThread;
+
+ /**
+ * Instantiates a new instance which uses Widevine CDM. Call {@link #release()} when the instance
+ * is no longer required.
+ *
+ * @param licenseUrl The default license URL.
+ * @param httpDataSourceFactory A factory from which to obtain {@link HttpDataSource} instances.
+ * @return A new instance which uses Widevine CDM.
+ * @throws UnsupportedDrmException If the Widevine DRM scheme is unsupported or cannot be
+ * instantiated.
+ */
+ public static OfflineLicenseHelper newWidevineInstance(
+ String licenseUrl, Factory httpDataSourceFactory) throws UnsupportedDrmException {
+ return newWidevineInstance(
+ new HttpMediaDrmCallback(licenseUrl, httpDataSourceFactory), null);
+ }
+
+ /**
+ * Instantiates a new instance which uses Widevine CDM. Call {@link #release()} when the instance
+ * is no longer required.
+ *
+ * @param callback Performs key and provisioning requests.
+ * @param optionalKeyRequestParameters An optional map of parameters to pass as the last argument
+ * to {@link MediaDrm#getKeyRequest(byte[], byte[], String, int, HashMap)}. May be null.
+ * @return A new instance which uses Widevine CDM.
+ * @throws UnsupportedDrmException If the Widevine DRM scheme is unsupported or cannot be
+ * instantiated.
+ * @see DefaultDrmSessionManager#DefaultDrmSessionManager(java.util.UUID, ExoMediaDrm,
+ * MediaDrmCallback, HashMap, Handler, EventListener)
+ */
+ public static OfflineLicenseHelper newWidevineInstance(
+ MediaDrmCallback callback, HashMap optionalKeyRequestParameters)
+ throws UnsupportedDrmException {
+ return new OfflineLicenseHelper<>(FrameworkMediaDrm.newInstance(C.WIDEVINE_UUID), callback,
+ optionalKeyRequestParameters);
+ }
+
+ /**
+ * Constructs an instance. Call {@link #release()} when the instance is no longer required.
+ *
+ * @param mediaDrm An underlying {@link ExoMediaDrm} for use by the manager.
+ * @param callback Performs key and provisioning requests.
+ * @param optionalKeyRequestParameters An optional map of parameters to pass as the last argument
+ * to {@link MediaDrm#getKeyRequest(byte[], byte[], String, int, HashMap)}. May be null.
+ * @see DefaultDrmSessionManager#DefaultDrmSessionManager(java.util.UUID, ExoMediaDrm,
+ * MediaDrmCallback, HashMap, Handler, EventListener)
+ */
+ public OfflineLicenseHelper(ExoMediaDrm mediaDrm, MediaDrmCallback callback,
+ HashMap optionalKeyRequestParameters) {
+ handlerThread = new HandlerThread("OfflineLicenseHelper");
+ handlerThread.start();
+ conditionVariable = new ConditionVariable();
+ EventListener eventListener = new EventListener() {
+ @Override
+ public void onDrmKeysLoaded() {
+ conditionVariable.open();
+ }
+
+ @Override
+ public void onDrmSessionManagerError(Exception e) {
+ conditionVariable.open();
+ }
+
+ @Override
+ public void onDrmKeysRestored() {
+ conditionVariable.open();
+ }
+
+ @Override
+ public void onDrmKeysRemoved() {
+ conditionVariable.open();
+ }
+ };
+ drmSessionManager = new DefaultDrmSessionManager<>(C.WIDEVINE_UUID, mediaDrm, callback,
+ optionalKeyRequestParameters, new Handler(handlerThread.getLooper()), eventListener);
+ }
+
+ /** Releases the helper. Should be called when the helper is no longer required. */
+ public void release() {
+ handlerThread.quit();
+ }
+
+ /**
+ * Downloads an offline license.
+ *
+ * @param drmInitData The {@link DrmInitData} for the content whose license is to be downloaded.
+ * @return The key set id for the downloaded license.
+ * @throws IOException If an error occurs reading data from the stream.
+ * @throws InterruptedException If the thread has been interrupted.
+ * @throws DrmSessionException Thrown when a DRM session error occurs.
+ */
+ public synchronized byte[] downloadLicense(DrmInitData drmInitData) throws IOException,
+ InterruptedException, DrmSessionException {
+ Assertions.checkArgument(drmInitData != null);
+ return blockingKeyRequest(DefaultDrmSessionManager.MODE_DOWNLOAD, null, drmInitData);
+ }
+
+ /**
+ * Renews an offline license.
+ *
+ * @param offlineLicenseKeySetId The key set id of the license to be renewed.
+ * @return The renewed offline license key set id.
+ * @throws DrmSessionException Thrown when a DRM session error occurs.
+ */
+ public synchronized byte[] renewLicense(byte[] offlineLicenseKeySetId)
+ throws DrmSessionException {
+ Assertions.checkNotNull(offlineLicenseKeySetId);
+ return blockingKeyRequest(DefaultDrmSessionManager.MODE_DOWNLOAD, offlineLicenseKeySetId, null);
+ }
+
+ /**
+ * Releases an offline license.
+ *
+ * @param offlineLicenseKeySetId The key set id of the license to be released.
+ * @throws DrmSessionException Thrown when a DRM session error occurs.
+ */
+ public synchronized void releaseLicense(byte[] offlineLicenseKeySetId)
+ throws DrmSessionException {
+ Assertions.checkNotNull(offlineLicenseKeySetId);
+ blockingKeyRequest(DefaultDrmSessionManager.MODE_RELEASE, offlineLicenseKeySetId, null);
+ }
+
+ /**
+ * Returns the remaining license and playback durations in seconds, for an offline license.
+ *
+ * @param offlineLicenseKeySetId The key set id of the license.
+ * @return The remaining license and playback durations, in seconds.
+ * @throws DrmSessionException Thrown when a DRM session error occurs.
+ */
+ public synchronized Pair getLicenseDurationRemainingSec(byte[] offlineLicenseKeySetId)
+ throws DrmSessionException {
+ Assertions.checkNotNull(offlineLicenseKeySetId);
+ DrmSession drmSession = openBlockingKeyRequest(DefaultDrmSessionManager.MODE_QUERY,
+ offlineLicenseKeySetId, null);
+ DrmSessionException error = drmSession.getError();
+ Pair licenseDurationRemainingSec =
+ WidevineUtil.getLicenseDurationRemainingSec(drmSession);
+ drmSessionManager.releaseSession(drmSession);
+ if (error != null) {
+ if (error.getCause() instanceof KeysExpiredException) {
+ return Pair.create(0L, 0L);
+ }
+ throw error;
+ }
+ return licenseDurationRemainingSec;
+ }
+
+ private byte[] blockingKeyRequest(@Mode int licenseMode, byte[] offlineLicenseKeySetId,
+ DrmInitData drmInitData) throws DrmSessionException {
+ DrmSession drmSession = openBlockingKeyRequest(licenseMode, offlineLicenseKeySetId,
+ drmInitData);
+ DrmSessionException error = drmSession.getError();
+ byte[] keySetId = drmSession.getOfflineLicenseKeySetId();
+ drmSessionManager.releaseSession(drmSession);
+ if (error != null) {
+ throw error;
+ }
+ return keySetId;
+ }
+
+ private DrmSession openBlockingKeyRequest(@Mode int licenseMode, byte[] offlineLicenseKeySetId,
+ DrmInitData drmInitData) {
+ drmSessionManager.setMode(licenseMode, offlineLicenseKeySetId);
+ conditionVariable.close();
+ DrmSession drmSession = drmSessionManager.acquireSession(handlerThread.getLooper(),
+ drmInitData);
+ // Block current thread until key loading is finished
+ conditionVariable.block();
+ return drmSession;
+ }
+
+}
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/drm/UnsupportedDrmException.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/drm/UnsupportedDrmException.java
index 4c20d7fdc..6c8c66fad 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/drm/UnsupportedDrmException.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/drm/UnsupportedDrmException.java
@@ -43,8 +43,7 @@ public final class UnsupportedDrmException extends Exception {
/**
* Either {@link #REASON_UNSUPPORTED_SCHEME} or {@link #REASON_INSTANTIATION_ERROR}.
*/
- @Reason
- public final int reason;
+ @Reason public final int reason;
/**
* @param reason {@link #REASON_UNSUPPORTED_SCHEME} or {@link #REASON_INSTANTIATION_ERROR}.
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/drm/WidevineUtil.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/drm/WidevineUtil.java
new file mode 100755
index 000000000..b1e11dbb0
--- /dev/null
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/drm/WidevineUtil.java
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.telegram.messenger.exoplayer2.drm;
+
+import android.util.Pair;
+import org.telegram.messenger.exoplayer2.C;
+import java.util.Map;
+
+/**
+ * Utility methods for Widevine.
+ */
+public final class WidevineUtil {
+
+ /** Widevine specific key status field name for the remaining license duration, in seconds. */
+ public static final String PROPERTY_LICENSE_DURATION_REMAINING = "LicenseDurationRemaining";
+ /** Widevine specific key status field name for the remaining playback duration, in seconds. */
+ public static final String PROPERTY_PLAYBACK_DURATION_REMAINING = "PlaybackDurationRemaining";
+
+ private WidevineUtil() {}
+
+ /**
+ * Returns license and playback durations remaining in seconds.
+ *
+ * @return A {@link Pair} consisting of the remaining license and playback durations in seconds.
+ * @throws IllegalStateException If called when a session isn't opened.
+ * @param drmSession
+ */
+ public static Pair getLicenseDurationRemainingSec(DrmSession> drmSession) {
+ Map keyStatus = drmSession.queryKeyStatus();
+ return new Pair<>(
+ getDurationRemainingSec(keyStatus, PROPERTY_LICENSE_DURATION_REMAINING),
+ getDurationRemainingSec(keyStatus, PROPERTY_PLAYBACK_DURATION_REMAINING));
+ }
+
+ private static long getDurationRemainingSec(Map keyStatus, String property) {
+ if (keyStatus != null) {
+ try {
+ String value = keyStatus.get(property);
+ if (value != null) {
+ return Long.parseLong(value);
+ }
+ } catch (NumberFormatException e) {
+ // do nothing.
+ }
+ }
+ return C.TIME_UNSET;
+ }
+
+}
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/ChunkIndex.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/ChunkIndex.java
index 6502a5fd5..3d9cef3bf 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/ChunkIndex.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/ChunkIndex.java
@@ -61,7 +61,11 @@ public final class ChunkIndex implements SeekMap {
this.durationsUs = durationsUs;
this.timesUs = timesUs;
length = sizes.length;
- durationUs = durationsUs[length - 1] + timesUs[length - 1];
+ if (length > 0) {
+ durationUs = durationsUs[length - 1] + timesUs[length - 1];
+ } else {
+ durationUs = 0;
+ }
}
/**
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/DefaultExtractorInput.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/DefaultExtractorInput.java
index d495cde17..34b640c80 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/DefaultExtractorInput.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/DefaultExtractorInput.java
@@ -18,6 +18,7 @@ package org.telegram.messenger.exoplayer2.extractor;
import org.telegram.messenger.exoplayer2.C;
import org.telegram.messenger.exoplayer2.upstream.DataSource;
import org.telegram.messenger.exoplayer2.util.Assertions;
+import org.telegram.messenger.exoplayer2.util.Util;
import java.io.EOFException;
import java.io.IOException;
import java.util.Arrays;
@@ -27,6 +28,8 @@ import java.util.Arrays;
*/
public final class DefaultExtractorInput implements ExtractorInput {
+ private static final int PEEK_MIN_FREE_SPACE_AFTER_RESIZE = 64 * 1024;
+ private static final int PEEK_MAX_FREE_SPACE = 512 * 1024;
private static final byte[] SCRATCH_SPACE = new byte[4096];
private final DataSource dataSource;
@@ -46,7 +49,7 @@ public final class DefaultExtractorInput implements ExtractorInput {
this.dataSource = dataSource;
this.position = position;
this.streamLength = length;
- peekBuffer = new byte[8 * 1024];
+ peekBuffer = new byte[PEEK_MIN_FREE_SPACE_AFTER_RESIZE];
}
@Override
@@ -176,7 +179,9 @@ public final class DefaultExtractorInput implements ExtractorInput {
private void ensureSpaceForPeek(int length) {
int requiredLength = peekBufferPosition + length;
if (requiredLength > peekBuffer.length) {
- peekBuffer = Arrays.copyOf(peekBuffer, Math.max(peekBuffer.length * 2, requiredLength));
+ int newPeekCapacity = Util.constrainValue(peekBuffer.length * 2,
+ requiredLength + PEEK_MIN_FREE_SPACE_AFTER_RESIZE, requiredLength + PEEK_MAX_FREE_SPACE);
+ peekBuffer = Arrays.copyOf(peekBuffer, newPeekCapacity);
}
}
@@ -218,7 +223,12 @@ public final class DefaultExtractorInput implements ExtractorInput {
private void updatePeekBuffer(int bytesConsumed) {
peekBufferLength -= bytesConsumed;
peekBufferPosition = 0;
- System.arraycopy(peekBuffer, bytesConsumed, peekBuffer, 0, peekBufferLength);
+ byte[] newPeekBuffer = peekBuffer;
+ if (peekBufferLength < peekBuffer.length - PEEK_MAX_FREE_SPACE) {
+ newPeekBuffer = new byte[peekBufferLength + PEEK_MIN_FREE_SPACE_AFTER_RESIZE];
+ }
+ System.arraycopy(peekBuffer, bytesConsumed, newPeekBuffer, 0, peekBufferLength);
+ peekBuffer = newPeekBuffer;
}
/**
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/DefaultExtractorsFactory.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/DefaultExtractorsFactory.java
index 5709efb9e..162c2d57e 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/DefaultExtractorsFactory.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/DefaultExtractorsFactory.java
@@ -15,141 +15,132 @@
*/
package org.telegram.messenger.exoplayer2.extractor;
-import java.util.ArrayList;
-import java.util.List;
+import org.telegram.messenger.exoplayer2.extractor.flv.FlvExtractor;
+import org.telegram.messenger.exoplayer2.extractor.mkv.MatroskaExtractor;
+import org.telegram.messenger.exoplayer2.extractor.mp3.Mp3Extractor;
+import org.telegram.messenger.exoplayer2.extractor.mp4.FragmentedMp4Extractor;
+import org.telegram.messenger.exoplayer2.extractor.mp4.Mp4Extractor;
+import org.telegram.messenger.exoplayer2.extractor.ogg.OggExtractor;
+import org.telegram.messenger.exoplayer2.extractor.ts.Ac3Extractor;
+import org.telegram.messenger.exoplayer2.extractor.ts.AdtsExtractor;
+import org.telegram.messenger.exoplayer2.extractor.ts.DefaultTsPayloadReaderFactory;
+import org.telegram.messenger.exoplayer2.extractor.ts.PsExtractor;
+import org.telegram.messenger.exoplayer2.extractor.ts.TsExtractor;
+import org.telegram.messenger.exoplayer2.extractor.wav.WavExtractor;
+import java.lang.reflect.Constructor;
/**
* An {@link ExtractorsFactory} that provides an array of extractors for the following formats:
*
*
- * - MP4, including M4A ({@link org.telegram.messenger.exoplayer2.extractor.mp4.Mp4Extractor})
- * - fMP4 ({@link org.telegram.messenger.exoplayer2.extractor.mp4.FragmentedMp4Extractor})
- * - Matroska and WebM ({@link org.telegram.messenger.exoplayer2.extractor.mkv.MatroskaExtractor})
- *
- * - Ogg Vorbis/FLAC ({@link org.telegram.messenger.exoplayer2.extractor.ogg.OggExtractor}
- * - MP3 ({@link org.telegram.messenger.exoplayer2.extractor.mp3.Mp3Extractor})
- * - AAC ({@link org.telegram.messenger.exoplayer2.extractor.ts.AdtsExtractor})
- * - MPEG TS ({@link org.telegram.messenger.exoplayer2.extractor.ts.TsExtractor})
- * - MPEG PS ({@link org.telegram.messenger.exoplayer2.extractor.ts.PsExtractor})
- * - FLV ({@link org.telegram.messenger.exoplayer2.extractor.flv.FlvExtractor})
- * - WAV ({@link org.telegram.messenger.exoplayer2.extractor.wav.WavExtractor})
+ * - MP4, including M4A ({@link Mp4Extractor})
+ * - fMP4 ({@link FragmentedMp4Extractor})
+ * - Matroska and WebM ({@link MatroskaExtractor})
+ * - Ogg Vorbis/FLAC ({@link OggExtractor}
+ * - MP3 ({@link Mp3Extractor})
+ * - AAC ({@link AdtsExtractor})
+ * - MPEG TS ({@link TsExtractor})
+ * - MPEG PS ({@link PsExtractor})
+ * - FLV ({@link FlvExtractor})
+ * - WAV ({@link WavExtractor})
+ * - AC3 ({@link Ac3Extractor})
* - FLAC (only available if the FLAC extension is built and included)
*
*/
public final class DefaultExtractorsFactory implements ExtractorsFactory {
- // Lazily initialized default extractor classes in priority order.
- private static List> defaultExtractorClasses;
+ private static final Constructor extends Extractor> FLAC_EXTRACTOR_CONSTRUCTOR;
+ static {
+ Constructor extends Extractor> flacExtractorConstructor = null;
+ try {
+ flacExtractorConstructor =
+ Class.forName("org.telegram.messenger.exoplayer2.ext.flac.FlacExtractor")
+ .asSubclass(Extractor.class).getConstructor();
+ } catch (ClassNotFoundException e) {
+ // Extractor not found.
+ } catch (NoSuchMethodException e) {
+ // Constructor not found.
+ }
+ FLAC_EXTRACTOR_CONSTRUCTOR = flacExtractorConstructor;
+ }
+
+ private @MatroskaExtractor.Flags int matroskaFlags;
+ private @FragmentedMp4Extractor.Flags int fragmentedMp4Flags;
+ private @Mp3Extractor.Flags int mp3Flags;
+ private @DefaultTsPayloadReaderFactory.Flags int tsFlags;
/**
- * Creates a new factory for the default extractors.
+ * Sets flags for {@link MatroskaExtractor} instances created by the factory.
+ *
+ * @see MatroskaExtractor#MatroskaExtractor(int)
+ * @param flags The flags to use.
+ * @return The factory, for convenience.
*/
- public DefaultExtractorsFactory() {
- synchronized (DefaultExtractorsFactory.class) {
- if (defaultExtractorClasses == null) {
- // Lazily initialize defaultExtractorClasses.
- List> extractorClasses = new ArrayList<>();
- // We reference extractors using reflection so that they can be deleted cleanly.
- // Class.forName is used so that automated tools like proguard can detect the use of
- // reflection (see http://proguard.sourceforge.net/FAQ.html#forname).
- try {
- extractorClasses.add(
- Class.forName("org.telegram.messenger.exoplayer2.extractor.mkv.MatroskaExtractor")
- .asSubclass(Extractor.class));
- } catch (ClassNotFoundException e) {
- // Extractor not found.
- }
- try {
- extractorClasses.add(
- Class.forName("org.telegram.messenger.exoplayer2.extractor.mp4.FragmentedMp4Extractor")
- .asSubclass(Extractor.class));
- } catch (ClassNotFoundException e) {
- // Extractor not found.
- }
- try {
- extractorClasses.add(
- Class.forName("org.telegram.messenger.exoplayer2.extractor.mp4.Mp4Extractor")
- .asSubclass(Extractor.class));
- } catch (ClassNotFoundException e) {
- // Extractor not found.
- }
- try {
- extractorClasses.add(
- Class.forName("org.telegram.messenger.exoplayer2.extractor.mp3.Mp3Extractor")
- .asSubclass(Extractor.class));
- } catch (ClassNotFoundException e) {
- // Extractor not found.
- }
- try {
- extractorClasses.add(
- Class.forName("org.telegram.messenger.exoplayer2.extractor.ts.AdtsExtractor")
- .asSubclass(Extractor.class));
- } catch (ClassNotFoundException e) {
- // Extractor not found.
- }
- try {
- extractorClasses.add(
- Class.forName("org.telegram.messenger.exoplayer2.extractor.ts.Ac3Extractor")
- .asSubclass(Extractor.class));
- } catch (ClassNotFoundException e) {
- // Extractor not found.
- }
- try {
- extractorClasses.add(
- Class.forName("org.telegram.messenger.exoplayer2.extractor.ts.TsExtractor")
- .asSubclass(Extractor.class));
- } catch (ClassNotFoundException e) {
- // Extractor not found.
- }
- try {
- extractorClasses.add(
- Class.forName("org.telegram.messenger.exoplayer2.extractor.flv.FlvExtractor")
- .asSubclass(Extractor.class));
- } catch (ClassNotFoundException e) {
- // Extractor not found.
- }
- try {
- extractorClasses.add(
- Class.forName("org.telegram.messenger.exoplayer2.extractor.ogg.OggExtractor")
- .asSubclass(Extractor.class));
- } catch (ClassNotFoundException e) {
- // Extractor not found.
- }
- try {
- extractorClasses.add(
- Class.forName("org.telegram.messenger.exoplayer2.extractor.ts.PsExtractor")
- .asSubclass(Extractor.class));
- } catch (ClassNotFoundException e) {
- // Extractor not found.
- }
- try {
- extractorClasses.add(
- Class.forName("org.telegram.messenger.exoplayer2.extractor.wav.WavExtractor")
- .asSubclass(Extractor.class));
- } catch (ClassNotFoundException e) {
- // Extractor not found.
- }
- try {
- extractorClasses.add(
- Class.forName("org.telegram.messenger.exoplayer2.ext.flac.FlacExtractor")
- .asSubclass(Extractor.class));
- } catch (ClassNotFoundException e) {
- // Extractor not found.
- }
- defaultExtractorClasses = extractorClasses;
- }
- }
+ public synchronized DefaultExtractorsFactory setMatroskaExtractorFlags(
+ @MatroskaExtractor.Flags int flags) {
+ this.matroskaFlags = flags;
+ return this;
+ }
+
+ /**
+ * Sets flags for {@link FragmentedMp4Extractor} instances created by the factory.
+ *
+ * @see FragmentedMp4Extractor#FragmentedMp4Extractor(int)
+ * @param flags The flags to use.
+ * @return The factory, for convenience.
+ */
+ public synchronized DefaultExtractorsFactory setFragmentedMp4ExtractorFlags(
+ @FragmentedMp4Extractor.Flags int flags) {
+ this.fragmentedMp4Flags = flags;
+ return this;
+ }
+
+ /**
+ * Sets flags for {@link Mp3Extractor} instances created by the factory.
+ *
+ * @see Mp3Extractor#Mp3Extractor(int)
+ * @param flags The flags to use.
+ * @return The factory, for convenience.
+ */
+ public synchronized DefaultExtractorsFactory setMp3ExtractorFlags(@Mp3Extractor.Flags int flags) {
+ mp3Flags = flags;
+ return this;
+ }
+
+ /**
+ * Sets flags for {@link DefaultTsPayloadReaderFactory}s used by {@link TsExtractor} instances
+ * created by the factory.
+ *
+ * @see TsExtractor#TsExtractor(int)
+ * @param flags The flags to use.
+ * @return The factory, for convenience.
+ */
+ public synchronized DefaultExtractorsFactory setTsExtractorFlags(
+ @DefaultTsPayloadReaderFactory.Flags int flags) {
+ tsFlags = flags;
+ return this;
}
@Override
- public Extractor[] createExtractors() {
- Extractor[] extractors = new Extractor[defaultExtractorClasses.size()];
- for (int i = 0; i < extractors.length; i++) {
+ public synchronized Extractor[] createExtractors() {
+ Extractor[] extractors = new Extractor[FLAC_EXTRACTOR_CONSTRUCTOR == null ? 11 : 12];
+ extractors[0] = new MatroskaExtractor(matroskaFlags);
+ extractors[1] = new FragmentedMp4Extractor(fragmentedMp4Flags);
+ extractors[2] = new Mp4Extractor();
+ extractors[3] = new Mp3Extractor(mp3Flags);
+ extractors[4] = new AdtsExtractor();
+ extractors[5] = new Ac3Extractor();
+ extractors[6] = new TsExtractor(tsFlags);
+ extractors[7] = new FlvExtractor();
+ extractors[8] = new OggExtractor();
+ extractors[9] = new PsExtractor();
+ extractors[10] = new WavExtractor();
+ if (FLAC_EXTRACTOR_CONSTRUCTOR != null) {
try {
- extractors[i] = defaultExtractorClasses.get(i).getConstructor().newInstance();
+ extractors[11] = FLAC_EXTRACTOR_CONSTRUCTOR.newInstance();
} catch (Exception e) {
// Should never happen.
- throw new IllegalStateException("Unexpected error creating default extractor", e);
+ throw new IllegalStateException("Unexpected error creating FLAC extractor", e);
}
}
return extractors;
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/DefaultTrackOutput.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/DefaultTrackOutput.java
index ec2eb222c..1359ce4da 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/DefaultTrackOutput.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/DefaultTrackOutput.java
@@ -70,11 +70,12 @@ public final class DefaultTrackOutput implements TrackOutput {
private Format downstreamFormat;
// Accessed only by the loading thread (or the consuming thread when there is no loading thread).
+ private boolean pendingFormatAdjustment;
+ private Format lastUnadjustedFormat;
private long sampleOffsetUs;
private long totalBytesWritten;
private Allocation lastAllocation;
private int lastAllocationOffset;
- private boolean needKeyframe;
private boolean pendingSplice;
private UpstreamFormatChangedListener upstreamFormatChangeListener;
@@ -90,7 +91,6 @@ public final class DefaultTrackOutput implements TrackOutput {
scratch = new ParsableByteArray(INITIAL_SCRATCH_SIZE);
state = new AtomicInteger();
lastAllocationOffset = allocationLength;
- needKeyframe = true;
}
// Called by the consuming thread, but only when there is no loading thread.
@@ -226,13 +226,28 @@ public final class DefaultTrackOutput implements TrackOutput {
}
/**
- * Attempts to skip to the keyframe before the specified time, if it's present in the buffer.
+ * Skips all samples currently in the buffer.
+ */
+ public void skipAll() {
+ long nextOffset = infoQueue.skipAll();
+ if (nextOffset != C.POSITION_UNSET) {
+ dropDownstreamTo(nextOffset);
+ }
+ }
+
+ /**
+ * Attempts to skip to the keyframe before or at the specified time. Succeeds only if the buffer
+ * contains a keyframe with a timestamp of {@code timeUs} or earlier. If
+ * {@code allowTimeBeyondBuffer} is {@code false} then it is also required that {@code timeUs}
+ * falls within the buffer.
*
* @param timeUs The seek time.
+ * @param allowTimeBeyondBuffer Whether the skip can succeed if {@code timeUs} is beyond the end
+ * of the buffer.
* @return Whether the skip was successful.
*/
- public boolean skipToKeyframeBefore(long timeUs) {
- long nextOffset = infoQueue.skipToKeyframeBefore(timeUs);
+ public boolean skipToKeyframeBefore(long timeUs, boolean allowTimeBeyondBuffer) {
+ long nextOffset = infoQueue.skipToKeyframeBefore(timeUs, allowTimeBeyondBuffer);
if (nextOffset == C.POSITION_UNSET) {
return false;
}
@@ -247,38 +262,41 @@ public final class DefaultTrackOutput implements TrackOutput {
* @param buffer A {@link DecoderInputBuffer} to populate in the case of reading a sample or the
* end of the stream. If the end of the stream has been reached, the
* {@link C#BUFFER_FLAG_END_OF_STREAM} flag will be set on the buffer.
+ * @param formatRequired Whether the caller requires that the format of the stream be read even if
+ * it's not changing. A sample will never be read if set to true, however it is still possible
+ * for the end of stream or nothing to be read.
* @param loadingFinished True if an empty queue should be considered the end of the stream.
* @param decodeOnlyUntilUs If a buffer is read, the {@link C#BUFFER_FLAG_DECODE_ONLY} flag will
* be set if the buffer's timestamp is less than this value.
* @return The result, which can be {@link C#RESULT_NOTHING_READ}, {@link C#RESULT_FORMAT_READ} or
* {@link C#RESULT_BUFFER_READ}.
*/
- public int readData(FormatHolder formatHolder, DecoderInputBuffer buffer, boolean loadingFinished,
- long decodeOnlyUntilUs) {
- switch (infoQueue.readData(formatHolder, buffer, downstreamFormat, extrasHolder)) {
- case C.RESULT_NOTHING_READ:
- if (loadingFinished) {
- buffer.setFlags(C.BUFFER_FLAG_END_OF_STREAM);
- return C.RESULT_BUFFER_READ;
- }
- return C.RESULT_NOTHING_READ;
+ public int readData(FormatHolder formatHolder, DecoderInputBuffer buffer, boolean formatRequired,
+ boolean loadingFinished, long decodeOnlyUntilUs) {
+ int result = infoQueue.readData(formatHolder, buffer, formatRequired, loadingFinished,
+ downstreamFormat, extrasHolder);
+ switch (result) {
case C.RESULT_FORMAT_READ:
downstreamFormat = formatHolder.format;
return C.RESULT_FORMAT_READ;
case C.RESULT_BUFFER_READ:
- if (buffer.timeUs < decodeOnlyUntilUs) {
- buffer.addFlag(C.BUFFER_FLAG_DECODE_ONLY);
+ if (!buffer.isEndOfStream()) {
+ if (buffer.timeUs < decodeOnlyUntilUs) {
+ buffer.addFlag(C.BUFFER_FLAG_DECODE_ONLY);
+ }
+ // Read encryption data if the sample is encrypted.
+ if (buffer.isEncrypted()) {
+ readEncryptionData(buffer, extrasHolder);
+ }
+ // Write the sample data into the holder.
+ buffer.ensureSpaceForWrite(extrasHolder.size);
+ readData(extrasHolder.offset, buffer.data, extrasHolder.size);
+ // Advance the read head.
+ dropDownstreamTo(extrasHolder.nextOffset);
}
- // Read encryption data if the sample is encrypted.
- if (buffer.isEncrypted()) {
- readEncryptionData(buffer, extrasHolder);
- }
- // Write the sample data into the holder.
- buffer.ensureSpaceForWrite(extrasHolder.size);
- readData(extrasHolder.offset, buffer.data, extrasHolder.size);
- // Advance the read head.
- dropDownstreamTo(extrasHolder.nextOffset);
return C.RESULT_BUFFER_READ;
+ case C.RESULT_NOTHING_READ:
+ return C.RESULT_NOTHING_READ;
default:
throw new IllegalStateException();
}
@@ -425,23 +443,24 @@ public final class DefaultTrackOutput implements TrackOutput {
}
/**
- * Like {@link #format(Format)}, but with an offset that will be added to the timestamps of
- * samples subsequently queued to the buffer. The offset is also used to adjust
- * {@link Format#subsampleOffsetUs} for both the {@link Format} passed and those subsequently
- * passed to {@link #format(Format)}.
+ * Sets an offset that will be added to the timestamps (and sub-sample timestamps) of samples
+ * subsequently queued to the buffer.
*
- * @param format The format.
* @param sampleOffsetUs The timestamp offset in microseconds.
*/
- public void formatWithOffset(Format format, long sampleOffsetUs) {
- this.sampleOffsetUs = sampleOffsetUs;
- format(format);
+ public void setSampleOffsetUs(long sampleOffsetUs) {
+ if (this.sampleOffsetUs != sampleOffsetUs) {
+ this.sampleOffsetUs = sampleOffsetUs;
+ pendingFormatAdjustment = true;
+ }
}
@Override
public void format(Format format) {
Format adjustedFormat = getAdjustedSampleFormat(format, sampleOffsetUs);
boolean formatChanged = infoQueue.format(adjustedFormat);
+ lastUnadjustedFormat = format;
+ pendingFormatAdjustment = false;
if (upstreamFormatChangeListener != null && formatChanged) {
upstreamFormatChangeListener.onUpstreamFormatChanged(adjustedFormat);
}
@@ -498,6 +517,9 @@ public final class DefaultTrackOutput implements TrackOutput {
@Override
public void sampleMetadata(long timeUs, @C.BufferFlags int flags, int size, int offset,
byte[] encryptionKey) {
+ if (pendingFormatAdjustment) {
+ format(lastUnadjustedFormat);
+ }
if (!startWriteOperation()) {
infoQueue.commitSampleTimestamp(timeUs);
return;
@@ -509,12 +531,6 @@ public final class DefaultTrackOutput implements TrackOutput {
}
pendingSplice = false;
}
- if (needKeyframe) {
- if ((flags & C.BUFFER_FLAG_KEY_FRAME) == 0) {
- return;
- }
- needKeyframe = false;
- }
timeUs += sampleOffsetUs;
long absoluteOffset = totalBytesWritten - size - offset;
infoQueue.commitSample(timeUs, flags, absoluteOffset, size, encryptionKey);
@@ -544,7 +560,6 @@ public final class DefaultTrackOutput implements TrackOutput {
totalBytesWritten = 0;
lastAllocation = null;
lastAllocationOffset = allocationLength;
- needKeyframe = true;
}
/**
@@ -601,6 +616,7 @@ public final class DefaultTrackOutput implements TrackOutput {
private long largestDequeuedTimestampUs;
private long largestQueuedTimestampUs;
+ private boolean upstreamKeyframeRequired;
private boolean upstreamFormatRequired;
private Format upstreamFormat;
private int upstreamSourceId;
@@ -617,6 +633,7 @@ public final class DefaultTrackOutput implements TrackOutput {
largestDequeuedTimestampUs = Long.MIN_VALUE;
largestQueuedTimestampUs = Long.MIN_VALUE;
upstreamFormatRequired = true;
+ upstreamKeyframeRequired = true;
}
public void clearSampleData() {
@@ -624,6 +641,7 @@ public final class DefaultTrackOutput implements TrackOutput {
relativeReadIndex = 0;
relativeWriteIndex = 0;
queueSize = 0;
+ upstreamKeyframeRequired = true;
}
// Called by the consuming thread, but only when there is no loading thread.
@@ -732,28 +750,44 @@ public final class DefaultTrackOutput implements TrackOutput {
* about the sample, but not its data. The size and absolute position of the data in the
* rolling buffer is stored in {@code extrasHolder}, along with an encryption id if present
* and the absolute position of the first byte that may still be required after the current
- * sample has been read.
+ * sample has been read. May be null if the caller requires that the format of the stream be
+ * read even if it's not changing.
+ * @param formatRequired Whether the caller requires that the format of the stream be read even
+ * if it's not changing. A sample will never be read if set to true, however it is still
+ * possible for the end of stream or nothing to be read.
+ * @param loadingFinished True if an empty queue should be considered the end of the stream.
* @param downstreamFormat The current downstream {@link Format}. If the format of the next
* sample is different to the current downstream format then a format will be read.
* @param extrasHolder The holder into which extra sample information should be written.
* @return The result, which can be {@link C#RESULT_NOTHING_READ}, {@link C#RESULT_FORMAT_READ}
* or {@link C#RESULT_BUFFER_READ}.
*/
+ @SuppressWarnings("ReferenceEquality")
public synchronized int readData(FormatHolder formatHolder, DecoderInputBuffer buffer,
- Format downstreamFormat, BufferExtrasHolder extrasHolder) {
+ boolean formatRequired, boolean loadingFinished, Format downstreamFormat,
+ BufferExtrasHolder extrasHolder) {
if (queueSize == 0) {
- if (upstreamFormat != null && upstreamFormat != downstreamFormat) {
+ if (loadingFinished) {
+ buffer.setFlags(C.BUFFER_FLAG_END_OF_STREAM);
+ return C.RESULT_BUFFER_READ;
+ } else if (upstreamFormat != null
+ && (formatRequired || upstreamFormat != downstreamFormat)) {
formatHolder.format = upstreamFormat;
return C.RESULT_FORMAT_READ;
+ } else {
+ return C.RESULT_NOTHING_READ;
}
- return C.RESULT_NOTHING_READ;
}
- if (formats[relativeReadIndex] != downstreamFormat) {
+ if (formatRequired || formats[relativeReadIndex] != downstreamFormat) {
formatHolder.format = formats[relativeReadIndex];
return C.RESULT_FORMAT_READ;
}
+ if (buffer.isFlagsOnly()) {
+ return C.RESULT_NOTHING_READ;
+ }
+
buffer.timeUs = timesUs[relativeReadIndex];
buffer.setFlags(flags[relativeReadIndex]);
extrasHolder.size = sizes[relativeReadIndex];
@@ -775,20 +809,40 @@ public final class DefaultTrackOutput implements TrackOutput {
}
/**
- * Attempts to locate the keyframe before the specified time, if it's present in the buffer.
+ * Skips all samples in the buffer.
+ *
+ * @return The offset up to which data should be dropped, or {@link C#POSITION_UNSET} if no
+ * dropping of data is required.
+ */
+ public synchronized long skipAll() {
+ if (queueSize == 0) {
+ return C.POSITION_UNSET;
+ }
+
+ int lastSampleIndex = (relativeReadIndex + queueSize - 1) % capacity;
+ relativeReadIndex = (relativeReadIndex + queueSize) % capacity;
+ absoluteReadIndex += queueSize;
+ queueSize = 0;
+ return offsets[lastSampleIndex] + sizes[lastSampleIndex];
+ }
+
+ /**
+ * Attempts to locate the keyframe before or at the specified time. If
+ * {@code allowTimeBeyondBuffer} is {@code false} then it is also required that {@code timeUs}
+ * falls within the buffer.
*
* @param timeUs The seek time.
+ * @param allowTimeBeyondBuffer Whether the skip can succeed if {@code timeUs} is beyond the end
+ * of the buffer.
* @return The offset of the keyframe's data if the keyframe was present.
* {@link C#POSITION_UNSET} otherwise.
*/
- public synchronized long skipToKeyframeBefore(long timeUs) {
+ public synchronized long skipToKeyframeBefore(long timeUs, boolean allowTimeBeyondBuffer) {
if (queueSize == 0 || timeUs < timesUs[relativeReadIndex]) {
return C.POSITION_UNSET;
}
- int lastWriteIndex = (relativeWriteIndex == 0 ? capacity : relativeWriteIndex) - 1;
- long lastTimeUs = timesUs[lastWriteIndex];
- if (timeUs > lastTimeUs) {
+ if (timeUs > largestQueuedTimestampUs && !allowTimeBeyondBuffer) {
return C.POSITION_UNSET;
}
@@ -814,9 +868,9 @@ public final class DefaultTrackOutput implements TrackOutput {
return C.POSITION_UNSET;
}
- queueSize -= sampleCountToKeyframe;
relativeReadIndex = (relativeReadIndex + sampleCountToKeyframe) % capacity;
absoluteReadIndex += sampleCountToKeyframe;
+ queueSize -= sampleCountToKeyframe;
return offsets[relativeReadIndex];
}
@@ -839,6 +893,12 @@ public final class DefaultTrackOutput implements TrackOutput {
public synchronized void commitSample(long timeUs, @C.BufferFlags int sampleFlags, long offset,
int size, byte[] encryptionKey) {
+ if (upstreamKeyframeRequired) {
+ if ((sampleFlags & C.BUFFER_FLAG_KEY_FRAME) == 0) {
+ return;
+ }
+ upstreamKeyframeRequired = false;
+ }
Assertions.checkState(!upstreamFormatRequired);
commitSampleTimestamp(timeUs);
timesUs[relativeWriteIndex] = timeUs;
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/Extractor.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/Extractor.java
index 615e4d0aa..8e5c6bd06 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/Extractor.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/Extractor.java
@@ -93,12 +93,14 @@ public interface Extractor {
* position} in the stream. Valid random access positions are the start of the stream and
* positions that can be obtained from any {@link SeekMap} passed to the {@link ExtractorOutput}.
*
- * @param position The seek position.
+ * @param position The byte offset in the stream from which data will be provided.
+ * @param timeUs The seek time in microseconds.
*/
- void seek(long position);
+ void seek(long position, long timeUs);
/**
* Releases all kept resources.
*/
void release();
+
}
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/ExtractorOutput.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/ExtractorOutput.java
index 89f935dbd..c266fa239 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/ExtractorOutput.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/ExtractorOutput.java
@@ -23,17 +23,18 @@ public interface ExtractorOutput {
/**
* Called by the {@link Extractor} to get the {@link TrackOutput} for a specific track.
*
- * The same {@link TrackOutput} is returned if multiple calls are made with the same
- * {@code trackId}.
+ * The same {@link TrackOutput} is returned if multiple calls are made with the same {@code id}.
*
- * @param trackId A track identifier.
+ * @param id A track identifier.
+ * @param type The type of the track. Typically one of the {@link org.telegram.messenger.exoplayer2.C}
+ * {@code TRACK_TYPE_*} constants.
* @return The {@link TrackOutput} for the given track identifier.
*/
- TrackOutput track(int trackId);
+ TrackOutput track(int id, int type);
/**
* Called when all tracks have been identified, meaning no new {@code trackId} values will be
- * passed to {@link #track(int)}.
+ * passed to {@link #track(int, int)}.
*/
void endTracks();
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/GaplessInfoHolder.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/GaplessInfoHolder.java
index dd6dead24..7a0fa45b7 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/GaplessInfoHolder.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/GaplessInfoHolder.java
@@ -18,6 +18,7 @@ package org.telegram.messenger.exoplayer2.extractor;
import org.telegram.messenger.exoplayer2.Format;
import org.telegram.messenger.exoplayer2.metadata.Metadata;
import org.telegram.messenger.exoplayer2.metadata.id3.CommentFrame;
+import org.telegram.messenger.exoplayer2.metadata.id3.Id3Decoder.FramePredicate;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@@ -26,6 +27,18 @@ import java.util.regex.Pattern;
*/
public final class GaplessInfoHolder {
+ /**
+ * A {@link FramePredicate} suitable for use when decoding {@link Metadata} that will be passed
+ * to {@link #setFromMetadata(Metadata)}. Only frames that might contain gapless playback
+ * information are decoded.
+ */
+ public static final FramePredicate GAPLESS_INFO_ID3_FRAME_PREDICATE = new FramePredicate() {
+ @Override
+ public boolean evaluate(int majorVersion, int id0, int id1, int id2, int id3) {
+ return id0 == 'C' && id1 == 'O' && id2 == 'M' && (id3 == 'M' || majorVersion == 2);
+ }
+ };
+
private static final String GAPLESS_COMMENT_ID = "iTunSMPB";
private static final Pattern GAPLESS_COMMENT_PATTERN =
Pattern.compile("^ [0-9a-fA-F]{8} ([0-9a-fA-F]{8}) ([0-9a-fA-F]{8})");
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/flv/AudioTagPayloadReader.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/flv/AudioTagPayloadReader.java
index 8f764a7db..0637c9f24 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/flv/AudioTagPayloadReader.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/flv/AudioTagPayloadReader.java
@@ -29,21 +29,20 @@ import java.util.Collections;
*/
/* package */ final class AudioTagPayloadReader extends TagPayloadReader {
- // Audio format
+ private static final int AUDIO_FORMAT_MP3 = 2;
+ private static final int AUDIO_FORMAT_ALAW = 7;
+ private static final int AUDIO_FORMAT_ULAW = 8;
private static final int AUDIO_FORMAT_AAC = 10;
- // AAC PACKET TYPE
private static final int AAC_PACKET_TYPE_SEQUENCE_HEADER = 0;
private static final int AAC_PACKET_TYPE_AAC_RAW = 1;
- // SAMPLING RATES
- private static final int[] AUDIO_SAMPLING_RATE_TABLE = new int[] {
- 5500, 11000, 22000, 44000
- };
+ private static final int[] AUDIO_SAMPLING_RATE_TABLE = new int[] {5512, 11025, 22050, 44100};
// State variables
private boolean hasParsedAudioDataHeader;
private boolean hasOutputFormat;
+ private int audioFormat;
public AudioTagPayloadReader(TrackOutput output) {
super(output);
@@ -58,13 +57,23 @@ import java.util.Collections;
protected boolean parseHeader(ParsableByteArray data) throws UnsupportedFormatException {
if (!hasParsedAudioDataHeader) {
int header = data.readUnsignedByte();
- int audioFormat = (header >> 4) & 0x0F;
- int sampleRateIndex = (header >> 2) & 0x03;
- if (sampleRateIndex < 0 || sampleRateIndex >= AUDIO_SAMPLING_RATE_TABLE.length) {
- throw new UnsupportedFormatException("Invalid sample rate index: " + sampleRateIndex);
- }
- // TODO: Add support for MP3 and PCM.
- if (audioFormat != AUDIO_FORMAT_AAC) {
+ audioFormat = (header >> 4) & 0x0F;
+ if (audioFormat == AUDIO_FORMAT_MP3) {
+ int sampleRateIndex = (header >> 2) & 0x03;
+ int sampleRate = AUDIO_SAMPLING_RATE_TABLE[sampleRateIndex];
+ Format format = Format.createAudioSampleFormat(null, MimeTypes.AUDIO_MPEG, null,
+ Format.NO_VALUE, Format.NO_VALUE, 1, sampleRate, null, null, 0, null);
+ output.format(format);
+ hasOutputFormat = true;
+ } else if (audioFormat == AUDIO_FORMAT_ALAW || audioFormat == AUDIO_FORMAT_ULAW) {
+ String type = audioFormat == AUDIO_FORMAT_ALAW ? MimeTypes.AUDIO_ALAW
+ : MimeTypes.AUDIO_ULAW;
+ int pcmEncoding = (header & 0x01) == 1 ? C.ENCODING_PCM_16BIT : C.ENCODING_PCM_8BIT;
+ Format format = Format.createAudioSampleFormat(null, type, null, Format.NO_VALUE,
+ Format.NO_VALUE, 1, 8000, pcmEncoding, null, null, 0, null);
+ output.format(format);
+ hasOutputFormat = true;
+ } else if (audioFormat != AUDIO_FORMAT_AAC) {
throw new UnsupportedFormatException("Audio format not supported: " + audioFormat);
}
hasParsedAudioDataHeader = true;
@@ -77,23 +86,28 @@ import java.util.Collections;
@Override
protected void parsePayload(ParsableByteArray data, long timeUs) {
- int packetType = data.readUnsignedByte();
- // Parse sequence header just in case it was not done before.
- if (packetType == AAC_PACKET_TYPE_SEQUENCE_HEADER && !hasOutputFormat) {
- byte[] audioSpecifiConfig = new byte[data.bytesLeft()];
- data.readBytes(audioSpecifiConfig, 0, audioSpecifiConfig.length);
- Pair audioParams = CodecSpecificDataUtil.parseAacAudioSpecificConfig(
- audioSpecifiConfig);
- Format format = Format.createAudioSampleFormat(null, MimeTypes.AUDIO_AAC, null,
- Format.NO_VALUE, Format.NO_VALUE, audioParams.second, audioParams.first,
- Collections.singletonList(audioSpecifiConfig), null, 0, null);
- output.format(format);
- hasOutputFormat = true;
- } else if (packetType == AAC_PACKET_TYPE_AAC_RAW) {
- // Sample audio AAC frames
- int bytesToWrite = data.bytesLeft();
- output.sampleData(data, bytesToWrite);
- output.sampleMetadata(timeUs, C.BUFFER_FLAG_KEY_FRAME, bytesToWrite, 0, null);
+ if (audioFormat == AUDIO_FORMAT_MP3) {
+ int sampleSize = data.bytesLeft();
+ output.sampleData(data, sampleSize);
+ output.sampleMetadata(timeUs, C.BUFFER_FLAG_KEY_FRAME, sampleSize, 0, null);
+ } else {
+ int packetType = data.readUnsignedByte();
+ if (packetType == AAC_PACKET_TYPE_SEQUENCE_HEADER && !hasOutputFormat) {
+ // Parse the sequence header.
+ byte[] audioSpecificConfig = new byte[data.bytesLeft()];
+ data.readBytes(audioSpecificConfig, 0, audioSpecificConfig.length);
+ Pair audioParams = CodecSpecificDataUtil.parseAacAudioSpecificConfig(
+ audioSpecificConfig);
+ Format format = Format.createAudioSampleFormat(null, MimeTypes.AUDIO_AAC, null,
+ Format.NO_VALUE, Format.NO_VALUE, audioParams.second, audioParams.first,
+ Collections.singletonList(audioSpecificConfig), null, 0, null);
+ output.format(format);
+ hasOutputFormat = true;
+ } else if (audioFormat != AUDIO_FORMAT_AAC || packetType == AAC_PACKET_TYPE_AAC_RAW) {
+ int sampleSize = data.bytesLeft();
+ output.sampleData(data, sampleSize);
+ output.sampleMetadata(timeUs, C.BUFFER_FLAG_KEY_FRAME, sampleSize, 0, null);
+ }
}
}
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/flv/FlvExtractor.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/flv/FlvExtractor.java
index 0884b324c..142e57f21 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/flv/FlvExtractor.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/flv/FlvExtractor.java
@@ -15,6 +15,7 @@
*/
package org.telegram.messenger.exoplayer2.extractor.flv;
+import org.telegram.messenger.exoplayer2.C;
import org.telegram.messenger.exoplayer2.extractor.Extractor;
import org.telegram.messenger.exoplayer2.extractor.ExtractorInput;
import org.telegram.messenger.exoplayer2.extractor.ExtractorOutput;
@@ -126,7 +127,7 @@ public final class FlvExtractor implements Extractor, SeekMap {
}
@Override
- public void seek(long position) {
+ public void seek(long position, long timeUs) {
parserState = STATE_READING_FLV_HEADER;
bytesToNextTagHeader = 0;
}
@@ -183,10 +184,12 @@ public final class FlvExtractor implements Extractor, SeekMap {
boolean hasAudio = (flags & 0x04) != 0;
boolean hasVideo = (flags & 0x01) != 0;
if (hasAudio && audioReader == null) {
- audioReader = new AudioTagPayloadReader(extractorOutput.track(TAG_TYPE_AUDIO));
+ audioReader = new AudioTagPayloadReader(
+ extractorOutput.track(TAG_TYPE_AUDIO, C.TRACK_TYPE_AUDIO));
}
if (hasVideo && videoReader == null) {
- videoReader = new VideoTagPayloadReader(extractorOutput.track(TAG_TYPE_VIDEO));
+ videoReader = new VideoTagPayloadReader(
+ extractorOutput.track(TAG_TYPE_VIDEO, C.TRACK_TYPE_VIDEO));
}
if (metadataReader == null) {
metadataReader = new ScriptTagPayloadReader(null);
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/flv/ScriptTagPayloadReader.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/flv/ScriptTagPayloadReader.java
index 6cb9c1307..b2fcf3391 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/flv/ScriptTagPayloadReader.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/flv/ScriptTagPayloadReader.java
@@ -80,8 +80,8 @@ import java.util.Map;
}
int type = readAmfType(data);
if (type != AMF_TYPE_ECMA_ARRAY) {
- // Should never happen.
- throw new ParserException();
+ // We're not interested in this metadata.
+ return;
}
// Set the duration to the value contained in the metadata, if present.
Map metadata = readAmfEcmaArray(data);
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/flv/VideoTagPayloadReader.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/flv/VideoTagPayloadReader.java
index da931de90..f6c0835b5 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/flv/VideoTagPayloadReader.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/flv/VideoTagPayloadReader.java
@@ -93,7 +93,7 @@ import org.telegram.messenger.exoplayer2.video.AvcConfig;
avcConfig.initializationData, Format.NO_VALUE, avcConfig.pixelWidthAspectRatio, null);
output.format(format);
hasOutputFormat = true;
- } else if (packetType == AVC_PACKET_TYPE_AVC_NALU) {
+ } else if (packetType == AVC_PACKET_TYPE_AVC_NALU && hasOutputFormat) {
// TODO: Deduplicate with Mp4Extractor.
// Zero the top three bytes of the array that we'll use to decode nal unit lengths, in case
// they're only 1 or 2 bytes long.
diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/mkv/MatroskaExtractor.java b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/mkv/MatroskaExtractor.java
index 3e096d673..7ee1194b3 100755
--- a/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/mkv/MatroskaExtractor.java
+++ b/TMessagesProj/src/main/java/org/telegram/messenger/exoplayer2/extractor/mkv/MatroskaExtractor.java
@@ -15,6 +15,7 @@
*/
package org.telegram.messenger.exoplayer2.extractor.mkv;
+import android.support.annotation.IntDef;
import android.util.SparseArray;
import org.telegram.messenger.exoplayer2.C;
import org.telegram.messenger.exoplayer2.Format;
@@ -26,6 +27,7 @@ import org.telegram.messenger.exoplayer2.extractor.Extractor;
import org.telegram.messenger.exoplayer2.extractor.ExtractorInput;
import org.telegram.messenger.exoplayer2.extractor.ExtractorOutput;
import org.telegram.messenger.exoplayer2.extractor.ExtractorsFactory;
+import org.telegram.messenger.exoplayer2.extractor.MpegAudioHeader;
import org.telegram.messenger.exoplayer2.extractor.PositionHolder;
import org.telegram.messenger.exoplayer2.extractor.SeekMap;
import org.telegram.messenger.exoplayer2.extractor.TrackOutput;
@@ -35,8 +37,11 @@ import org.telegram.messenger.exoplayer2.util.NalUnitUtil;
import org.telegram.messenger.exoplayer2.util.ParsableByteArray;
import org.telegram.messenger.exoplayer2.util.Util;
import org.telegram.messenger.exoplayer2.video.AvcConfig;
+import org.telegram.messenger.exoplayer2.video.ColorInfo;
import org.telegram.messenger.exoplayer2.video.HevcConfig;
import java.io.IOException;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.ArrayList;
@@ -63,6 +68,22 @@ public final class MatroskaExtractor implements Extractor {
};
+ /**
+ * Flags controlling the behavior of the extractor.
+ */
+ @Retention(RetentionPolicy.SOURCE)
+ @IntDef(flag = true, value = {FLAG_DISABLE_SEEK_FOR_CUES})
+ public @interface Flags {}
+ /**
+ * Flag to disable seeking for cues.
+ *
+ * Normally (i.e. when this flag is not set) the extractor will seek to the cues element if its
+ * position is specified in the seek head and if it's after the first cluster. Setting this flag
+ * disables seeking to the cues element. If the cues element is after the first cluster then the
+ * media is treated as being unseekable.
+ */
+ public static final int FLAG_DISABLE_SEEK_FOR_CUES = 1;
+
private static final int UNSET_ENTRY_ID = -1;
private static final int BLOCK_STATE_START = 0;
@@ -84,6 +105,7 @@ public final class MatroskaExtractor implements Extractor {
private static final String CODEC_ID_VORBIS = "A_VORBIS";
private static final String CODEC_ID_OPUS = "A_OPUS";
private static final String CODEC_ID_AAC = "A_AAC";
+ private static final String CODEC_ID_MP2 = "A_MPEG/L2";
private static final String CODEC_ID_MP3 = "A_MPEG/L3";
private static final String CODEC_ID_AC3 = "A_AC3";
private static final String CODEC_ID_E_AC3 = "A_EAC3";
@@ -97,10 +119,10 @@ public final class MatroskaExtractor implements Extractor {
private static final String CODEC_ID_SUBRIP = "S_TEXT/UTF8";
private static final String CODEC_ID_VOBSUB = "S_VOBSUB";
private static final String CODEC_ID_PGS = "S_HDMV/PGS";
+ private static final String CODEC_ID_DVBSUB = "S_DVBSUB";
private static final int VORBIS_MAX_INPUT_SIZE = 8192;
private static final int OPUS_MAX_INPUT_SIZE = 5760;
- private static final int MP3_MAX_INPUT_SIZE = 4096;
private static final int ENCRYPTION_IV_SIZE = 8;
private static final int TRACK_TYPE_AUDIO = 2;
@@ -166,6 +188,23 @@ public final class MatroskaExtractor implements Extractor {
private static final int ID_PROJECTION = 0x7670;
private static final int ID_PROJECTION_PRIVATE = 0x7672;
private static final int ID_STEREO_MODE = 0x53B8;
+ private static final int ID_COLOUR = 0x55B0;
+ private static final int ID_COLOUR_RANGE = 0x55B9;
+ private static final int ID_COLOUR_TRANSFER = 0x55BA;
+ private static final int ID_COLOUR_PRIMARIES = 0x55BB;
+ private static final int ID_MAX_CLL = 0x55BC;
+ private static final int ID_MAX_FALL = 0x55BD;
+ private static final int ID_MASTERING_METADATA = 0x55D0;
+ private static final int ID_PRIMARY_R_CHROMATICITY_X = 0x55D1;
+ private static final int ID_PRIMARY_R_CHROMATICITY_Y = 0x55D2;
+ private static final int ID_PRIMARY_G_CHROMATICITY_X = 0x55D3;
+ private static final int ID_PRIMARY_G_CHROMATICITY_Y = 0x55D4;
+ private static final int ID_PRIMARY_B_CHROMATICITY_X = 0x55D5;
+ private static final int ID_PRIMARY_B_CHROMATICITY_Y = 0x55D6;
+ private static final int ID_WHITE_POINT_CHROMATICITY_X = 0x55D7;
+ private static final int ID_WHITE_POINT_CHROMATICITY_Y = 0x55D8;
+ private static final int ID_LUMNINANCE_MAX = 0x55D9;
+ private static final int ID_LUMNINANCE_MIN = 0x55DA;
private static final int LACING_NONE = 0;
private static final int LACING_XIPH = 1;
@@ -220,6 +259,7 @@ public final class MatroskaExtractor implements Extractor {
private final EbmlReader reader;
private final VarintReader varintReader;
private final SparseArray