public Ac3PassthroughTrackRenderer(SampleSource source, Handler eventHandler, EventListener listener) { mSource = source.register(); mEventHandler = eventHandler; mEventListener = listener; mTrackIndex = -1; mSampleHolder = new SampleHolder(SampleHolder.BUFFER_REPLACEMENT_MODE_DIRECT); mSampleHolder.ensureSpaceForWrite(DEFAULT_INPUT_BUFFER_SIZE); mOutputBuffer = ByteBuffer.allocate(DEFAULT_OUTPUT_BUFFER_SIZE); mFormatHolder = new MediaFormatHolder(); AUDIO_TRACK.restart(); mCodecCounters = new CodecCounters(); mMonitor = new AudioTrackMonitor(); mAudioClock = new AudioClock(); mTracksIndex = new ArrayList<>(); }
@Override public void buildRenderers(MpegTsPlayer mpegTsPlayer, DataSource dataSource, RendererBuilderCallback callback) { // Build the video and audio renderers. SampleExtractor extractor = dataSource == null ? new MpegTsSampleExtractor(mBufferManager, mBufferListener) : new MpegTsSampleExtractor(dataSource, mBufferManager, mBufferListener); SampleSource sampleSource = new MpegTsSampleSource(extractor); MpegTsVideoTrackRenderer videoRenderer = new MpegTsVideoTrackRenderer(mContext, sampleSource, mpegTsPlayer.getMainHandler(), mpegTsPlayer); // TODO: Only using Ac3PassthroughTrackRenderer for A/V sync issue. We will use // {@link Ac3TrackRenderer} when we use ExoPlayer's extractor. TrackRenderer audioRenderer = new Ac3PassthroughTrackRenderer(sampleSource, mpegTsPlayer.getMainHandler(), mpegTsPlayer); Cea708TextTrackRenderer textRenderer = new Cea708TextTrackRenderer(sampleSource); TrackRenderer[] renderers = new TrackRenderer[MpegTsPlayer.RENDERER_COUNT]; renderers[MpegTsPlayer.TRACK_TYPE_VIDEO] = videoRenderer; renderers[MpegTsPlayer.TRACK_TYPE_AUDIO] = audioRenderer; renderers[MpegTsPlayer.TRACK_TYPE_TEXT] = textRenderer; callback.onRenderers(null, renderers); }
@Override protected void doSomeWork(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException { source.continueBuffering(trackIndex, positionUs); if (!inputStreamEnded && pendingMetadata == null) { int result = source.readData(trackIndex, positionUs, formatHolder, sampleHolder, false); if (result == SampleSource.SAMPLE_READ) { pendingMetadataTimestamp = sampleHolder.timeUs; try { pendingMetadata = metadataParser.parse(sampleHolder.data.array(), sampleHolder.size); } catch (IOException e) { throw new ExoPlaybackException(e); } sampleHolder.data.clear(); } else if (result == SampleSource.END_OF_STREAM) { inputStreamEnded = true; } } if (pendingMetadata != null && pendingMetadataTimestamp <= positionUs) { invokeRenderer(pendingMetadata); pendingMetadata = null; } }
@Override protected void doSomeWork(long positionUs, long elapsedRealtimeUs, boolean sourceIsReady) throws ExoPlaybackException { if (!inputStreamEnded && pendingMetadata == null) { sampleHolder.clearData(); int result = readSource(positionUs, formatHolder, sampleHolder); if (result == SampleSource.SAMPLE_READ) { pendingMetadataTimestamp = sampleHolder.timeUs; try { pendingMetadata = metadataParser.parse(sampleHolder.data.array(), sampleHolder.size); } catch (IOException e) { throw new ExoPlaybackException(e); } } else if (result == SampleSource.END_OF_STREAM) { inputStreamEnded = true; } } if (pendingMetadata != null && pendingMetadataTimestamp <= positionUs) { invokeRenderer(pendingMetadata); pendingMetadata = null; } }
private void preparePlayer() { SampleSource sampleSource = new FrameworkSampleSource(this, Uri.parse(mVideo.getContentUrl()), /* headers */ null, RENDERER_COUNT); // Build the track renderers videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT); TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource); // Setup the player player = ExoPlayer.Factory.newInstance(RENDERER_COUNT, 1000, 5000); player.addListener(this); // Build the player controls mediaController.setMediaPlayer(new PlayerControl(player)); mediaController.setEnabled(true); player.prepare(videoRenderer, audioRenderer); }
/** * @param source The upstream source from which the renderer obtains samples. * @param drmSessionManager For use with encrypted content. May be null if support for encrypted * content is not required. * @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions. * For example a media file may start with a short clear region so as to allow playback to * begin in parallel with key acquisision. This parameter specifies whether the renderer is * permitted to play clear regions of encrypted media files before {@code drmSessionManager} * has obtained the keys necessary to decrypt encrypted regions of the media. * @param minBufferMultiplicationFactor When instantiating an underlying {@link android.media.AudioTrack}, * the size of the track's is calculated as this value multiplied by the minimum buffer size * obtained from {@link android.media.AudioTrack#getMinBufferSize(int, int, int)}. The multiplication * factor must be greater than or equal to 1. * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required. */ public MediaCodecAudioTrackRenderer(SampleSource source, DrmSessionManager drmSessionManager, boolean playClearSamplesWithoutKeys, float minBufferMultiplicationFactor, Handler eventHandler, EventListener eventListener) { super(source, drmSessionManager, playClearSamplesWithoutKeys, eventHandler, eventListener); Assertions.checkState(minBufferMultiplicationFactor >= 1); this.minBufferMultiplicationFactor = minBufferMultiplicationFactor; this.eventListener = eventListener; audioTrackReleasingConditionVariable = new ConditionVariable(true); if (Util.SDK_INT >= 19) { audioTimestampCompat = new AudioTimestampCompatV19(); } else { audioTimestampCompat = new NoopAudioTimestampCompat(); } if (Util.SDK_INT >= 18) { try { audioTrackGetLatencyMethod = AudioTrack.class.getMethod("getLatency", (Class<?>[]) null); } catch (NoSuchMethodException e) { // There's no guarantee this method exists. Do nothing. } } playheadOffsets = new long[MAX_PLAYHEAD_OFFSET_COUNT]; volume = 1.0f; }
/** * @param source The upstream source from which the renderer obtains samples. * @param drmSessionManager For use with encrypted content. May be null if support for encrypted * content is not required. * @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions. * For example a media file may start with a short clear region so as to allow playback to * begin in parallel with key acquisision. This parameter specifies whether the renderer is * permitted to play clear regions of encrypted media files before {@code drmSessionManager} * has obtained the keys necessary to decrypt encrypted regions of the media. * @param minBufferMultiplicationFactor When instantiating an underlying {@link AudioTrack}, * the size of the track's is calculated as this value multiplied by the minimum buffer size * obtained from {@link AudioTrack#getMinBufferSize(int, int, int)}. The multiplication * factor must be greater than or equal to 1. * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required. */ public MediaCodecSpeedControllableAudioTrackRenderer(SampleSource source, DrmSessionManager drmSessionManager, boolean playClearSamplesWithoutKeys, float minBufferMultiplicationFactor, Handler eventHandler, EventListener eventListener) { super(source, drmSessionManager, playClearSamplesWithoutKeys, eventHandler, eventListener); Assertions.checkState(minBufferMultiplicationFactor >= 1); this.minBufferMultiplicationFactor = minBufferMultiplicationFactor; this.eventListener = eventListener; audioTrackReleasingConditionVariable = new ConditionVariable(true); if (Util.SDK_INT >= 19) { audioTimestampCompat = new AudioTimestampCompatV19(); } else { audioTimestampCompat = new NoopAudioTimestampCompat(); } if (Util.SDK_INT >= 18) { try { audioTrackGetLatencyMethod = AudioTrack.class.getMethod("getLatency", (Class<?>[]) null); } catch (NoSuchMethodException e) { // There's no guarantee this method exists. Do nothing. } } playheadOffsets = new long[MAX_PLAYHEAD_OFFSET_COUNT]; volume = 1.0f; }
private void readFormat() throws IOException, ExoPlaybackException { int result = mSource.readData(mTrackIndex, mCurrentPositionUs, mFormatHolder, mSampleHolder); if (result == SampleSource.FORMAT_READ) { onInputFormatChanged(mFormatHolder); } }
private boolean feedInputBuffer() throws IOException, ExoPlaybackException { if (mInputStreamEnded) { return false; } mSampleHolder.data.clear(); mSampleHolder.size = 0; int result = mSource.readData(mTrackIndex, mPresentationTimeUs, mFormatHolder, mSampleHolder); switch (result) { case SampleSource.NOTHING_READ: { return false; } case SampleSource.FORMAT_READ: { Log.i(TAG, "Format was read again"); onInputFormatChanged(mFormatHolder); return true; } case SampleSource.END_OF_STREAM: { Log.i(TAG, "End of stream from SampleSource"); mInputStreamEnded = true; return false; } default: { mSampleHolder.data.flip(); decodeDone(mSampleHolder.data, mSampleHolder.timeUs); return true; } } }
@Override public int readSample(int track, SampleHolder sampleHolder) { if (track == mCea708TextTrackIndex) { if (mCea708TextTrackSelected && !mPendingCcSamples.isEmpty()) { SampleHolder holder = mPendingCcSamples.remove(0); holder.data.flip(); sampleHolder.timeUs = holder.timeUs; sampleHolder.data.put(holder.data); mCcSamplePool.releaseSample(holder); return SampleSource.SAMPLE_READ; } else { return mVideoTrackIndex < 0 || mReachedEos.get(mVideoTrackIndex) ? SampleSource.END_OF_STREAM : SampleSource.NOTHING_READ; } } int result = mSampleExtractor.readSample(track, sampleHolder); switch (result) { case SampleSource.END_OF_STREAM: { mReachedEos.set(track, true); break; } case SampleSource.SAMPLE_READ: { if (mCea708TextTrackSelected && track == mVideoTrackIndex && sampleHolder.data != null) { mCcParser.mayParseClosedCaption(sampleHolder.data, sampleHolder.timeUs); } break; } } return result; }
public MpegTsVideoTrackRenderer(Context context, SampleSource source, Handler handler, MediaCodecVideoTrackRenderer.EventListener listener) { super(context, source, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, VIDEO_PLAYBACK_DEADLINE_IN_MS, handler, listener, DROPPED_FRAMES_NOTIFICATION_THRESHOLD); mIsSwCodecEnabled = CommonFeatures.USE_SW_CODEC_FOR_SD.isEnabled(context); }
private int fetchSample(int track, SampleHolder sample, ConditionVariable conditionVariable) { mSampleSourceReader.continueBuffering(track, mCurrentPosition); MediaFormatHolder formatHolder = new MediaFormatHolder(); sample.clearData(); int ret = mSampleSourceReader.readData(track, mCurrentPosition, formatHolder, sample); if (ret == SampleSource.SAMPLE_READ) { if (mCurrentPosition < sample.timeUs) { mCurrentPosition = sample.timeUs; } try { Long lastExtractedPositionUs = mLastExtractedPositionUsMap.get(track); if (lastExtractedPositionUs == null) { mLastExtractedPositionUsMap.put(track, sample.timeUs); } else { mLastExtractedPositionUsMap.put(track, Math.max(lastExtractedPositionUs, sample.timeUs)); } queueSample(track, sample, conditionVariable); } catch (IOException e) { mLastExtractedPositionUsMap.clear(); mMetEos = true; mSampleBuffer.setEos(); } } else if (ret == SampleSource.END_OF_STREAM) { mTrackMetEos[track] = true; for (int i = 0; i < mTrackMetEos.length; ++i) { if (!mTrackMetEos[i]) { break; } if (i == mTrackMetEos.length -1) { mMetEos = true; mSampleBuffer.setEos(); } } } // TODO: Handle SampleSource.FORMAT_READ for dynamic resolution change. b/28169263 return ret; }
public Cea708TextTrackRenderer(SampleSource source) { mSource = source.register(); mTrackIndex = -1; mSampleHolder = new SampleHolder(SampleHolder.BUFFER_REPLACEMENT_MODE_DIRECT); mSampleHolder.ensureSpaceForWrite(DEFAULT_INPUT_BUFFER_SIZE); mFormatHolder = new MediaFormatHolder(); }
public int dequeueSample(SampleHolder sample) { SampleHolder sampleFromQueue = mQueue.poll(); if (sampleFromQueue == null) { return SampleSource.NOTHING_READ; } sample.size = sampleFromQueue.size; sample.flags = sampleFromQueue.flags; sample.timeUs = sampleFromQueue.timeUs; sample.clearData(); sampleFromQueue.data.position(0).limit(sample.size); sample.data.put(sampleFromQueue.data); mSamplePool.releaseSample(sampleFromQueue); return SampleSource.SAMPLE_READ; }
@Override public synchronized int readSample(int track, SampleHolder sampleHolder) { SampleQueue queue = mPlayingSampleQueues[track]; Assert.assertNotNull(queue); int result = queue.dequeueSample(sampleHolder); if (result != SampleSource.SAMPLE_READ && reachedEos()) { return SampleSource.END_OF_STREAM; } return result; }
@Override public int readSample(int track, SampleHolder outSample) { Assertions.checkState(mTrackSelected[track]); maybeReadSample(mReadSampleQueues.get(track), track); int result = mReadSampleQueues.get(track).dequeueSample(outSample); if ((result != SampleSource.SAMPLE_READ && mEos) || mError) { return SampleSource.END_OF_STREAM; } return result; }
/** * @param source A source from which samples containing EIA-608 closed captions can be read. * @param textRenderer The text renderer. * @param textRendererLooper The looper associated with the thread on which textRenderer should be * invoked. If the renderer makes use of standard Android UI components, then this should * normally be the looper associated with the applications' main thread, which can be * obtained using {@link android.app.Activity#getMainLooper()}. Null may be passed if the * renderer should be invoked directly on the player's internal rendering thread. */ public Eia608TrackRenderer(SampleSource source, TextRenderer textRenderer, Looper textRendererLooper) { this.source = source.register(); this.textRenderer = Assertions.checkNotNull(textRenderer); textRendererHandler = textRendererLooper == null ? null : new Handler(textRendererLooper, this); eia608Parser = new Eia608Parser(); formatHolder = new MediaFormatHolder(); sampleHolder = new SampleHolder(SampleHolder.BUFFER_REPLACEMENT_MODE_NORMAL); captionStringBuilder = new StringBuilder(); pendingCaptionLists = new TreeSet<>(); }
@Override protected void doSomeWork(long positionUs, long elapsedRealtimeUs) throws ExoPlaybackException { source.continueBuffering(trackIndex, positionUs); if (isSamplePending()) { maybeParsePendingSample(positionUs); } int result = inputStreamEnded ? SampleSource.END_OF_STREAM : SampleSource.SAMPLE_READ; while (!isSamplePending() && result == SampleSource.SAMPLE_READ) { result = source.readData(trackIndex, positionUs, formatHolder, sampleHolder, false); if (result == SampleSource.SAMPLE_READ) { maybeParsePendingSample(positionUs); } else if (result == SampleSource.END_OF_STREAM) { inputStreamEnded = true; } } while (!pendingCaptionLists.isEmpty()) { if (pendingCaptionLists.first().timeUs > positionUs) { // We're too early to render any of the pending caption lists. return; } // Remove and consume the next caption list. ClosedCaptionList nextCaptionList = pendingCaptionLists.pollFirst(); consumeCaptionList(nextCaptionList); // Update the renderer, unless the caption list was marked for decoding only. if (!nextCaptionList.decodeOnly) { invokeRenderer(caption); } } }
/** * @param source A source from which samples containing metadata can be read. * @param metadataParser A parser for parsing the metadata. * @param metadataRenderer The metadata renderer to receive the parsed metadata. * @param metadataRendererLooper The looper associated with the thread on which metadataRenderer * should be invoked. If the renderer makes use of standard Android UI components, then this * should normally be the looper associated with the applications' main thread, which can be * obtained using {@link android.app.Activity#getMainLooper()}. Null may be passed if the * renderer should be invoked directly on the player's internal rendering thread. */ public MetadataTrackRenderer(SampleSource source, MetadataParser<T> metadataParser, MetadataRenderer<T> metadataRenderer, Looper metadataRendererLooper) { this.source = source.register(); this.metadataParser = Assertions.checkNotNull(metadataParser); this.metadataRenderer = Assertions.checkNotNull(metadataRenderer); this.metadataHandler = metadataRendererLooper == null ? null : new Handler(metadataRendererLooper, this); formatHolder = new MediaFormatHolder(); sampleHolder = new SampleHolder(SampleHolder.BUFFER_REPLACEMENT_MODE_NORMAL); }
public DebugMediaCodecVideoTrackRenderer(Context context, SampleSource source, MediaCodecSelector mediaCodecSelector, int videoScalingMode, long allowedJoiningTimeMs, Handler eventHandler, EventListener eventListener, int maxDroppedFrameCountToNotify) { super(context, source, mediaCodecSelector, videoScalingMode, allowedJoiningTimeMs, null, false, eventHandler, eventListener, maxDroppedFrameCountToNotify); startIndex = 0; queueSize = 0; }
/** * @param source The upstream source from which the renderer obtains samples. * @param scaleToFit Boolean that indicates if video frames should be scaled to fit when * rendering. * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required. * @param maxDroppedFrameCountToNotify The maximum number of frames that can be dropped between * invocations of {@link EventListener#onDroppedFrames(int, long)}. */ public LibvpxVideoTrackRenderer(SampleSource source, boolean scaleToFit, Handler eventHandler, EventListener eventListener, int maxDroppedFrameCountToNotify) { super(source); this.scaleToFit = scaleToFit; this.eventHandler = eventHandler; this.eventListener = eventListener; this.maxDroppedFrameCountToNotify = maxDroppedFrameCountToNotify; previousWidth = -1; previousHeight = -1; formatHolder = new MediaFormatHolder(); outputMode = VpxDecoder.OUTPUT_MODE_UNKNOWN; }
private boolean feedInputBuffer(long positionUs) throws VpxDecoderException { if (inputStreamEnded) { return false; } if (inputBuffer == null) { inputBuffer = decoder.dequeueInputBuffer(); if (inputBuffer == null) { return false; } } int result = readSource(positionUs, formatHolder, inputBuffer.sampleHolder); if (result == SampleSource.NOTHING_READ) { return false; } if (result == SampleSource.FORMAT_READ) { format = formatHolder.format; return true; } if (result == SampleSource.END_OF_STREAM) { inputBuffer.setFlag(Buffer.FLAG_END_OF_STREAM); decoder.queueInputBuffer(inputBuffer); inputBuffer = null; inputStreamEnded = true; return false; } inputBuffer.width = format.width; inputBuffer.height = format.height; if (inputBuffer.sampleHolder.isDecodeOnly()) { inputBuffer.setFlag(Buffer.FLAG_DECODE_ONLY); } decoder.queueInputBuffer(inputBuffer); inputBuffer = null; return true; }
private boolean readFormat(long positionUs) { int result = readSource(positionUs, formatHolder, null); if (result == SampleSource.FORMAT_READ) { format = formatHolder.format; return true; } return false; }
/** * @param source The upstream source from which the renderer obtains samples. * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required. */ public LibopusAudioTrackRenderer(SampleSource source, Handler eventHandler, EventListener eventListener) { super(source); this.eventHandler = eventHandler; this.eventListener = eventListener; this.audioSessionId = AudioTrack.SESSION_ID_NOT_SET; audioTrack = new AudioTrack(); formatHolder = new MediaFormatHolder(); }
private boolean feedInputBuffer(long positionUs) throws OpusDecoderException { if (inputStreamEnded) { return false; } if (inputBuffer == null) { inputBuffer = decoder.dequeueInputBuffer(); if (inputBuffer == null) { return false; } } int result = readSource(positionUs, formatHolder, inputBuffer.sampleHolder); if (result == SampleSource.NOTHING_READ) { return false; } if (result == SampleSource.FORMAT_READ) { format = formatHolder.format; return true; } if (result == SampleSource.END_OF_STREAM) { inputBuffer.setFlag(Buffer.FLAG_END_OF_STREAM); decoder.queueInputBuffer(inputBuffer); inputBuffer = null; inputStreamEnded = true; return false; } if (inputBuffer.sampleHolder.isDecodeOnly()) { inputBuffer.setFlag(Buffer.FLAG_DECODE_ONLY); } decoder.queueInputBuffer(inputBuffer); inputBuffer = null; return true; }
private boolean readFormat(long positionUs) { int result = readSource(positionUs, formatHolder, null); if (result == SampleSource.FORMAT_READ) { format = formatHolder.format; audioTrack.configure(MimeTypes.AUDIO_RAW, format.channelCount, format.sampleRate, C.ENCODING_PCM_16BIT); return true; } return false; }
/** * @param source The upstream source from which the renderer obtains samples. * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required. */ public LibflacAudioTrackRenderer(SampleSource source, Handler eventHandler, EventListener eventListener) { super(source); this.eventHandler = eventHandler; this.eventListener = eventListener; this.audioSessionId = AudioTrack.SESSION_ID_NOT_SET; this.audioTrack = new AudioTrack(); formatHolder = new MediaFormatHolder(); }
private boolean feedInputBuffer(long positionUs) throws FlacDecoderException { if (inputStreamEnded) { return false; } if (inputBuffer == null) { inputBuffer = decoder.dequeueInputBuffer(); if (inputBuffer == null) { return false; } } int result = readSource(positionUs, formatHolder, inputBuffer.sampleHolder); if (result == SampleSource.NOTHING_READ) { return false; } if (result == SampleSource.FORMAT_READ) { format = formatHolder.format; return true; } if (result == SampleSource.END_OF_STREAM) { inputBuffer.setFlag(Buffer.FLAG_END_OF_STREAM); decoder.queueInputBuffer(inputBuffer); inputBuffer = null; inputStreamEnded = true; return false; } if (inputBuffer.sampleHolder.isDecodeOnly()) { inputBuffer.setFlag(Buffer.FLAG_DECODE_ONLY); } decoder.queueInputBuffer(inputBuffer); inputBuffer = null; return true; }
/** * @param source A source from which samples containing EIA-608 closed captions can be read. * @param textRenderer The text renderer. * @param textRendererLooper The looper associated with the thread on which textRenderer should be * invoked. If the renderer makes use of standard Android UI components, then this should * normally be the looper associated with the applications' main thread, which can be * obtained using {@link android.app.Activity#getMainLooper()}. Null may be passed if the * renderer should be invoked directly on the player's internal rendering thread. */ public Eia608TrackRenderer(SampleSource source, TextRenderer textRenderer, Looper textRendererLooper) { super(source); this.textRenderer = Assertions.checkNotNull(textRenderer); textRendererHandler = textRendererLooper == null ? null : new Handler(textRendererLooper, this); eia608Parser = new Eia608Parser(); formatHolder = new MediaFormatHolder(); sampleHolder = new SampleHolder(SampleHolder.BUFFER_REPLACEMENT_MODE_NORMAL); captionStringBuilder = new StringBuilder(); pendingCaptionLists = new TreeSet<>(); }
@Override protected void doSomeWork(long positionUs, long elapsedRealtimeUs, boolean sourceIsReady) throws ExoPlaybackException { if (isSamplePending()) { maybeParsePendingSample(positionUs); } int result = inputStreamEnded ? SampleSource.END_OF_STREAM : SampleSource.SAMPLE_READ; while (!isSamplePending() && result == SampleSource.SAMPLE_READ) { result = readSource(positionUs, formatHolder, sampleHolder); if (result == SampleSource.SAMPLE_READ) { maybeParsePendingSample(positionUs); } else if (result == SampleSource.END_OF_STREAM) { inputStreamEnded = true; } } while (!pendingCaptionLists.isEmpty()) { if (pendingCaptionLists.first().timeUs > positionUs) { // We're too early to render any of the pending caption lists. return; } // Remove and consume the next caption list. ClosedCaptionList nextCaptionList = pendingCaptionLists.pollFirst(); consumeCaptionList(nextCaptionList); // Update the renderer, unless the caption list was marked for decoding only. if (!nextCaptionList.decodeOnly) { invokeRenderer(caption); } } }
/** * @param source A source from which samples containing metadata can be read. * @param metadataParser A parser for parsing the metadata. * @param metadataRenderer The metadata renderer to receive the parsed metadata. * @param metadataRendererLooper The looper associated with the thread on which metadataRenderer * should be invoked. If the renderer makes use of standard Android UI components, then this * should normally be the looper associated with the applications' main thread, which can be * obtained using {@link android.app.Activity#getMainLooper()}. Null may be passed if the * renderer should be invoked directly on the player's internal rendering thread. */ public MetadataTrackRenderer(SampleSource source, MetadataParser<T> metadataParser, MetadataRenderer<T> metadataRenderer, Looper metadataRendererLooper) { super(source); this.metadataParser = Assertions.checkNotNull(metadataParser); this.metadataRenderer = Assertions.checkNotNull(metadataRenderer); this.metadataHandler = metadataRendererLooper == null ? null : new Handler(metadataRendererLooper, this); formatHolder = new MediaFormatHolder(); sampleHolder = new SampleHolder(SampleHolder.BUFFER_REPLACEMENT_MODE_NORMAL); }
private void preparePlayer() { Log.d(TAG, "preparePlayer()"); SampleSource sampleSource = new FrameworkSampleSource(this, Uri.parse(mVideo.getContentUrl()), /* headers */ null, RENDERER_COUNT); // Build the track renderers videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT); TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource); // Setup the player player = ExoPlayer.Factory.newInstance(RENDERER_COUNT, 1000, 5000); player.addListener(this); player.prepare(videoRenderer, audioRenderer); if (mIsOnTv) { // This PlayerControl must stay in sync with PlaybackOverlayFragment. // We created methods such as PlaybackOverlayFragment.pressPlay() to request // that the fragment change the playback state. When the fragment receives a playback // request, it updates the UI and then calls a method in this activity according to // PlaybackOverlayFragment.OnPlayPauseClickedListener. playerControl = new PlayerControl(player); } else { // Build the player controls mediaController.setMediaPlayer(new PlayerControl(player)); mediaController.setEnabled(true); } maybeStartPlayback(); }
@Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_player); View root = findViewById(R.id.root); mediaController = new MediaController(this); //overscan safe on 1980 * 1080 TV mediaController.setPadding(48, 27, 48, 27); mediaController.setAnchorView(root); shutterView = findViewById(R.id.shutter); surfaceView = (VideoSurfaceView) findViewById(R.id.surface_view); surfaceView.getHolder().addCallback(this); SampleSource sampleSource = new FrameworkSampleSource(this, Uri.parse(url), /* headers */ null, RENDERER_COUNT); // Build the track renderers videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT); TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource); // Setup the player player = ExoPlayer.Factory.newInstance(RENDERER_COUNT, 1000, 5000); player.addListener(this); // Build the player controls mediaController.setMediaPlayer(new PlayerControl(player)); mediaController.setEnabled(true); player.prepare(videoRenderer, audioRenderer); }
private SampleSource createSource() { Allocator allocator = new DefaultAllocator(BUFFER_SEGMENT_SIZE); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(handler, null); DataSource dataSource = new DefaultUriDataSource(getContext(), bandwidthMeter, "Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.76 Mobile Safari/537.36"); ExtractorSampleSource sampleSource = new ExtractorSampleSource(mUri, dataSource, allocator, BUFFER_SEGMENT_COUNT * BUFFER_SEGMENT_SIZE); return sampleSource; }