public Ac3PassthroughTrackRenderer(SampleSource source, Handler eventHandler, EventListener listener) { mSource = source.register(); mEventHandler = eventHandler; mEventListener = listener; mTrackIndex = -1; mSampleHolder = new SampleHolder(SampleHolder.BUFFER_REPLACEMENT_MODE_DIRECT); mSampleHolder.ensureSpaceForWrite(DEFAULT_INPUT_BUFFER_SIZE); mOutputBuffer = ByteBuffer.allocate(DEFAULT_OUTPUT_BUFFER_SIZE); mFormatHolder = new MediaFormatHolder(); AUDIO_TRACK.restart(); mCodecCounters = new CodecCounters(); mMonitor = new AudioTrackMonitor(); mAudioClock = new AudioClock(); mTracksIndex = new ArrayList<>(); }
@Override public int readData(int track, long positionUs, MediaFormatHolder formatHolder, SampleHolder sampleHolder) { Assertions.checkState(mPrepared); Assertions.checkState(mTrackStates.get(track) != TRACK_STATE_DISABLED); if (mPendingDiscontinuities.get(track)) { return NOTHING_READ; } if (mTrackStates.get(track) != TRACK_STATE_FORMAT_SENT) { mSampleExtractor.getTrackMediaFormat(track, formatHolder); mTrackStates.set(track, TRACK_STATE_FORMAT_SENT); return FORMAT_READ; } mPendingSeekPositionUs = C.UNKNOWN_TIME_US; return mSampleExtractor.readSample(track, sampleHolder); }
@Override public int readData(int track, long positionUs, MediaFormatHolder formatHolder, SampleHolder sampleHolder, boolean onlyReadDiscontinuity) throws IOException { Assertions.checkState(prepared); Assertions.checkState(trackStates[track] != TRACK_STATE_DISABLED); if (pendingDiscontinuities[track]) { pendingDiscontinuities[track] = false; return DISCONTINUITY_READ; } if (onlyReadDiscontinuity) { return NOTHING_READ; } if (trackStates[track] != TRACK_STATE_FORMAT_SENT) { sampleExtractor.getTrackMediaFormat(track, formatHolder); trackStates[track] = TRACK_STATE_FORMAT_SENT; return FORMAT_READ; } seekPositionUs = C.UNKNOWN_TIME_US; return sampleExtractor.readSample(track, sampleHolder); }
private void onInputFormatChanged(MediaFormatHolder formatHolder) throws ExoPlaybackException { mFormat = formatHolder.format; if (DEBUG) { Log.d(TAG, "AudioTrack was configured to FORMAT: " + mFormat.toString()); } clearDecodeState(); AUDIO_TRACK.reconfigure(mFormat.getFrameworkMediaFormatV16()); }
private int fetchSample(int track, SampleHolder sample, ConditionVariable conditionVariable) { mSampleSourceReader.continueBuffering(track, mCurrentPosition); MediaFormatHolder formatHolder = new MediaFormatHolder(); sample.clearData(); int ret = mSampleSourceReader.readData(track, mCurrentPosition, formatHolder, sample); if (ret == SampleSource.SAMPLE_READ) { if (mCurrentPosition < sample.timeUs) { mCurrentPosition = sample.timeUs; } try { Long lastExtractedPositionUs = mLastExtractedPositionUsMap.get(track); if (lastExtractedPositionUs == null) { mLastExtractedPositionUsMap.put(track, sample.timeUs); } else { mLastExtractedPositionUsMap.put(track, Math.max(lastExtractedPositionUs, sample.timeUs)); } queueSample(track, sample, conditionVariable); } catch (IOException e) { mLastExtractedPositionUsMap.clear(); mMetEos = true; mSampleBuffer.setEos(); } } else if (ret == SampleSource.END_OF_STREAM) { mTrackMetEos[track] = true; for (int i = 0; i < mTrackMetEos.length; ++i) { if (!mTrackMetEos[i]) { break; } if (i == mTrackMetEos.length -1) { mMetEos = true; mSampleBuffer.setEos(); } } } // TODO: Handle SampleSource.FORMAT_READ for dynamic resolution change. b/28169263 return ret; }
public Cea708TextTrackRenderer(SampleSource source) { mSource = source.register(); mTrackIndex = -1; mSampleHolder = new SampleHolder(SampleHolder.BUFFER_REPLACEMENT_MODE_DIRECT); mSampleHolder.ensureSpaceForWrite(DEFAULT_INPUT_BUFFER_SIZE); mFormatHolder = new MediaFormatHolder(); }
@Override protected void onInputFormatChanged(MediaFormatHolder holder) throws ExoPlaybackException { super.onInputFormatChanged(holder); pendingPixelWidthHeightRatio = holder.format.pixelWidthHeightRatio == MediaFormat.NO_VALUE ? 1 : holder.format.pixelWidthHeightRatio; pendingRotationDegrees = holder.format.rotationDegrees == MediaFormat.NO_VALUE ? 0 : holder.format.rotationDegrees; }
@Override public int readData(int track, long playbackPositionUs, MediaFormatHolder formatHolder, SampleHolder sampleHolder, boolean onlyReadDiscontinuity) { downstreamPositionUs = playbackPositionUs; if (pendingDiscontinuities[track]) { pendingDiscontinuities[track] = false; return DISCONTINUITY_READ; } if (onlyReadDiscontinuity || isPendingReset()) { return NOTHING_READ; } InternalTrackOutput sampleQueue = sampleQueues.valueAt(track); if (pendingMediaFormat[track]) { formatHolder.format = sampleQueue.getFormat(); formatHolder.drmInitData = drmInitData; pendingMediaFormat[track] = false; return FORMAT_READ; } if (sampleQueue.getSample(sampleHolder)) { boolean decodeOnly = sampleHolder.timeUs < lastSeekPositionUs; sampleHolder.flags |= decodeOnly ? C.SAMPLE_FLAG_DECODE_ONLY : 0; if (havePendingNextSampleUs) { // Set the offset to make the timestamp of this sample equal to pendingNextSampleUs. sampleTimeOffsetUs = pendingNextSampleUs - sampleHolder.timeUs; havePendingNextSampleUs = false; } sampleHolder.timeUs += sampleTimeOffsetUs; return SAMPLE_READ; } if (loadingFinished) { return END_OF_STREAM; } return NOTHING_READ; }
/** * @param source A source from which samples containing EIA-608 closed captions can be read. * @param textRenderer The text renderer. * @param textRendererLooper The looper associated with the thread on which textRenderer should be * invoked. If the renderer makes use of standard Android UI components, then this should * normally be the looper associated with the applications' main thread, which can be * obtained using {@link android.app.Activity#getMainLooper()}. Null may be passed if the * renderer should be invoked directly on the player's internal rendering thread. */ public Eia608TrackRenderer(SampleSource source, TextRenderer textRenderer, Looper textRendererLooper) { this.source = source.register(); this.textRenderer = Assertions.checkNotNull(textRenderer); textRendererHandler = textRendererLooper == null ? null : new Handler(textRendererLooper, this); eia608Parser = new Eia608Parser(); formatHolder = new MediaFormatHolder(); sampleHolder = new SampleHolder(SampleHolder.BUFFER_REPLACEMENT_MODE_NORMAL); captionStringBuilder = new StringBuilder(); pendingCaptionLists = new TreeSet<>(); }
/** * @param source A source from which samples containing metadata can be read. * @param metadataParser A parser for parsing the metadata. * @param metadataRenderer The metadata renderer to receive the parsed metadata. * @param metadataRendererLooper The looper associated with the thread on which metadataRenderer * should be invoked. If the renderer makes use of standard Android UI components, then this * should normally be the looper associated with the applications' main thread, which can be * obtained using {@link android.app.Activity#getMainLooper()}. Null may be passed if the * renderer should be invoked directly on the player's internal rendering thread. */ public MetadataTrackRenderer(SampleSource source, MetadataParser<T> metadataParser, MetadataRenderer<T> metadataRenderer, Looper metadataRendererLooper) { this.source = source.register(); this.metadataParser = Assertions.checkNotNull(metadataParser); this.metadataRenderer = Assertions.checkNotNull(metadataRenderer); this.metadataHandler = metadataRendererLooper == null ? null : new Handler(metadataRendererLooper, this); formatHolder = new MediaFormatHolder(); sampleHolder = new SampleHolder(SampleHolder.BUFFER_REPLACEMENT_MODE_NORMAL); }
/** * @param source The upstream source from which the renderer obtains samples. * @param scaleToFit Boolean that indicates if video frames should be scaled to fit when * rendering. * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required. * @param maxDroppedFrameCountToNotify The maximum number of frames that can be dropped between * invocations of {@link EventListener#onDroppedFrames(int, long)}. */ public LibvpxVideoTrackRenderer(SampleSource source, boolean scaleToFit, Handler eventHandler, EventListener eventListener, int maxDroppedFrameCountToNotify) { super(source); this.scaleToFit = scaleToFit; this.eventHandler = eventHandler; this.eventListener = eventListener; this.maxDroppedFrameCountToNotify = maxDroppedFrameCountToNotify; previousWidth = -1; previousHeight = -1; formatHolder = new MediaFormatHolder(); outputMode = VpxDecoder.OUTPUT_MODE_UNKNOWN; }
/** * @param source The upstream source from which the renderer obtains samples. * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required. */ public LibopusAudioTrackRenderer(SampleSource source, Handler eventHandler, EventListener eventListener) { super(source); this.eventHandler = eventHandler; this.eventListener = eventListener; this.audioSessionId = AudioTrack.SESSION_ID_NOT_SET; audioTrack = new AudioTrack(); formatHolder = new MediaFormatHolder(); }
/** * @param source The upstream source from which the renderer obtains samples. * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required. */ public LibflacAudioTrackRenderer(SampleSource source, Handler eventHandler, EventListener eventListener) { super(source); this.eventHandler = eventHandler; this.eventListener = eventListener; this.audioSessionId = AudioTrack.SESSION_ID_NOT_SET; this.audioTrack = new AudioTrack(); formatHolder = new MediaFormatHolder(); }
@Override public int readData(int track, long playbackPositionUs, MediaFormatHolder formatHolder, SampleHolder sampleHolder) { downstreamPositionUs = playbackPositionUs; if (pendingDiscontinuities[track] || isPendingReset()) { return NOTHING_READ; } InternalTrackOutput sampleQueue = sampleQueues.valueAt(track); if (pendingMediaFormat[track]) { formatHolder.format = sampleQueue.getFormat(); formatHolder.drmInitData = drmInitData; pendingMediaFormat[track] = false; return FORMAT_READ; } if (sampleQueue.getSample(sampleHolder)) { boolean decodeOnly = sampleHolder.timeUs < lastSeekPositionUs; sampleHolder.flags |= decodeOnly ? C.SAMPLE_FLAG_DECODE_ONLY : 0; if (havePendingNextSampleUs) { // Set the offset to make the timestamp of this sample equal to pendingNextSampleUs. sampleTimeOffsetUs = pendingNextSampleUs - sampleHolder.timeUs; havePendingNextSampleUs = false; } sampleHolder.timeUs += sampleTimeOffsetUs; return SAMPLE_READ; } if (loadingFinished) { return END_OF_STREAM; } return NOTHING_READ; }
/** * @param source A source from which samples containing EIA-608 closed captions can be read. * @param textRenderer The text renderer. * @param textRendererLooper The looper associated with the thread on which textRenderer should be * invoked. If the renderer makes use of standard Android UI components, then this should * normally be the looper associated with the applications' main thread, which can be * obtained using {@link android.app.Activity#getMainLooper()}. Null may be passed if the * renderer should be invoked directly on the player's internal rendering thread. */ public Eia608TrackRenderer(SampleSource source, TextRenderer textRenderer, Looper textRendererLooper) { super(source); this.textRenderer = Assertions.checkNotNull(textRenderer); textRendererHandler = textRendererLooper == null ? null : new Handler(textRendererLooper, this); eia608Parser = new Eia608Parser(); formatHolder = new MediaFormatHolder(); sampleHolder = new SampleHolder(SampleHolder.BUFFER_REPLACEMENT_MODE_NORMAL); captionStringBuilder = new StringBuilder(); pendingCaptionLists = new TreeSet<>(); }
/** * @param source A source from which samples containing metadata can be read. * @param metadataParser A parser for parsing the metadata. * @param metadataRenderer The metadata renderer to receive the parsed metadata. * @param metadataRendererLooper The looper associated with the thread on which metadataRenderer * should be invoked. If the renderer makes use of standard Android UI components, then this * should normally be the looper associated with the applications' main thread, which can be * obtained using {@link android.app.Activity#getMainLooper()}. Null may be passed if the * renderer should be invoked directly on the player's internal rendering thread. */ public MetadataTrackRenderer(SampleSource source, MetadataParser<T> metadataParser, MetadataRenderer<T> metadataRenderer, Looper metadataRendererLooper) { super(source); this.metadataParser = Assertions.checkNotNull(metadataParser); this.metadataRenderer = Assertions.checkNotNull(metadataRenderer); this.metadataHandler = metadataRendererLooper == null ? null : new Handler(metadataRendererLooper, this); formatHolder = new MediaFormatHolder(); sampleHolder = new SampleHolder(SampleHolder.BUFFER_REPLACEMENT_MODE_NORMAL); }
@Override public void getTrackMediaFormat(int track, MediaFormatHolder mediaFormatHolder) { Assertions.checkState(prepared); if (track < 0 || track >= vlctracks.length) { ExoVlcUtil.log(this, "getTrackMediaFormat() out of range : " + track + "; track len=" + vlctracks.length); return; } mediaFormatHolder.format = MediaFormat.createFromFrameworkMediaFormatV16(ExoVlcUtil .track2mediaFormat(vlctracks[track])); mediaFormatHolder.drmInitData = null; }
/** * @param source A source from which samples containing EIA-608 closed captions can be read. * @param textRenderer The text renderer. * @param textRendererLooper The looper associated with the thread on which textRenderer should be * invoked. If the renderer makes use of standard Android UI components, then this should * normally be the looper associated with the applications' main thread, which can be * obtained using {@link android.app.Activity#getMainLooper()}. Null may be passed if the * renderer should be invoked directly on the player's internal rendering thread. */ public Eia608TrackRenderer(SampleSource source, TextRenderer textRenderer, Looper textRendererLooper) { this.source = Assertions.checkNotNull(source); this.textRenderer = Assertions.checkNotNull(textRenderer); textRendererHandler = textRendererLooper == null ? null : new Handler(textRendererLooper, this); eia608Parser = new Eia608Parser(); formatHolder = new MediaFormatHolder(); sampleHolder = new SampleHolder(SampleHolder.BUFFER_REPLACEMENT_MODE_NORMAL); captionStringBuilder = new StringBuilder(); pendingCaptionLists = new TreeSet<ClosedCaptionList>(); }
/** * @param source A source from which samples containing metadata can be read. * @param metadataParser A parser for parsing the metadata. * @param metadataRenderer The metadata renderer to receive the parsed metadata. * @param metadataRendererLooper The looper associated with the thread on which metadataRenderer * should be invoked. If the renderer makes use of standard Android UI components, then this * should normally be the looper associated with the applications' main thread, which can be * obtained using {@link android.app.Activity#getMainLooper()}. Null may be passed if the * renderer should be invoked directly on the player's internal rendering thread. */ public MetadataTrackRenderer(SampleSource source, MetadataParser<T> metadataParser, MetadataRenderer<T> metadataRenderer, Looper metadataRendererLooper) { this.source = Assertions.checkNotNull(source); this.metadataParser = Assertions.checkNotNull(metadataParser); this.metadataRenderer = Assertions.checkNotNull(metadataRenderer); this.metadataHandler = metadataRendererLooper == null ? null : new Handler(metadataRendererLooper, this); formatHolder = new MediaFormatHolder(); sampleHolder = new SampleHolder(SampleHolder.BUFFER_REPLACEMENT_MODE_NORMAL); }
/** * Attempts to read either a sample, a new format or or a discontinuity from the source. * <p> * This method should not be called until after the source has been successfully prepared. * <p> * Note that where multiple tracks are enabled, {@link #NOTHING_READ} may be returned if the * next piece of data to be read from the {@link SampleSource} corresponds to a different track * than the one for which data was requested. * * @param track The track from which to read. * @param positionUs The current playback position. * @param formatHolder A {@link MediaFormatHolder} object to populate in the case of a new format. * @param sampleHolder A {@link SampleHolder} object to populate in the case of a new sample. If * the caller requires the sample data then it must ensure that {@link SampleHolder#data} * references a valid output buffer. * @param onlyReadDiscontinuity Whether to only read a discontinuity. If true, only * {@link #DISCONTINUITY_READ} or {@link #NOTHING_READ} can be returned. * @return The result, which can be {@link #SAMPLE_READ}, {@link #FORMAT_READ}, * {@link #DISCONTINUITY_READ}, {@link #NOTHING_READ} or {@link #END_OF_STREAM}. * @throws IOException If an error occurred reading from the source. */ @Override public int readData(int track, long positionUs, MediaFormatHolder formatHolder, SampleHolder sampleHolder, boolean onlyReadDiscontinuity) throws IOException { Assertions.checkState(state == STATE_PREPARED); Assertions.checkState(trackEnabledStates[track]); //Log.d(TAG, "readData(track="+track+", pos="+positionUs+"): --> <-- hasSamples="+this.extractor.hasSamples(track)); this.downstreamPositionUs = positionUs; if (pendingDiscontinuities[track]) { pendingDiscontinuities[track] = false; Log.d(TAG, "readData(track="+track+", pos="+positionUs+"): pendingDiscontinuities"); return DISCONTINUITY_READ; } if (onlyReadDiscontinuity) { //Log.d(TAG, "readData(track="+track+", pos="+positionUs+"): onlyReadDiscontinuity"); return NOTHING_READ; } MediaFormat mediaFormat = this.extractor.getFormat(track); if (mediaFormat != null && !mediaFormat.equals(trackMediaFormats[track], true)) { //Log.d(TAG, "readData(track="+track+", pos="+positionUs+"): read mediaformat="+mediaFormat.toString()); formatHolder.format = mediaFormat; this.trackMediaFormats[track] = mediaFormat; this.trackMediaFormats[track].setMaxVideoDimensions(mediaFormat.getMaxVideoWidth(), mediaFormat.getMaxVideoHeight()); return FORMAT_READ; } //Log.d(TAG, "readData(track="+track+", pos="+positionUs+"): b tr="+haveSamplesForEnabledTracks(this.extractor)); if( this.extractor.hasSamples(track) ) { if (this.extractor.getSample(track, sampleHolder)) { //Log.d(TAG, "readData(track="+track+", pos="+positionUs+"): read sample, size="+sampleHolder.size+", ms="+sampleHolder.timeUs); sampleHolder.decodeOnly = false; //frameAccurateSeeking && sampleHolder.timeUs < lastSeekPositionUs; //positionUs = sampleHolder.timeUs; return SAMPLE_READ; } } //Log.d(TAG, "readData(track="+track+", pos="+positionUs+"): NOTHING TO READ "); return NOTHING_READ; }
/** * @param source A source from which samples containing subtitle data can be read. * @param subtitleParser A subtitle parser that will parse Subtitle objects from the source. * @param textRenderer The text renderer. * @param textRendererLooper The looper associated with the thread on which textRenderer should be * invoked. If the renderer makes use of standard Android UI components, then this should * normally be the looper associated with the applications' main thread, which can be * obtained using {@link android.app.Activity#getMainLooper()}. Null may be passed if the * renderer should be invoked directly on the player's internal rendering thread. */ public TextTrackRenderer(SampleSource source, SubtitleParser subtitleParser, TextRenderer textRenderer, Looper textRendererLooper) { this.source = Assertions.checkNotNull(source); this.subtitleParser = Assertions.checkNotNull(subtitleParser); this.textRenderer = Assertions.checkNotNull(textRenderer); this.textRendererHandler = textRendererLooper == null ? null : new Handler(textRendererLooper, this); formatHolder = new MediaFormatHolder(); sampleHolder = new SampleHolder(true); }
/** * @param source The upstream source from which the renderer obtains samples. * @param scaleToFit Boolean that indicates if video frames should be scaled to fit when * rendering. * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required. * @param maxDroppedFrameCountToNotify The maximum number of frames that can be dropped between * invocations of {@link EventListener#onDroppedFrames(int, long)}. */ public LibvpxVideoTrackRenderer(SampleSource source, boolean scaleToFit, Handler eventHandler, EventListener eventListener, int maxDroppedFrameCountToNotify) { this.source = source.register(); this.scaleToFit = scaleToFit; this.eventHandler = eventHandler; this.eventListener = eventListener; this.maxDroppedFrameCountToNotify = maxDroppedFrameCountToNotify; previousWidth = -1; previousHeight = -1; formatHolder = new MediaFormatHolder(); }
/** * @param source The upstream source from which the renderer obtains samples. * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required. */ public LibopusAudioTrackRenderer(SampleSource source, Handler eventHandler, EventListener eventListener) { this.source = source.register(); this.eventHandler = eventHandler; this.eventListener = eventListener; this.audioSessionId = AudioTrack.SESSION_ID_NOT_SET; this.audioTrack = new AudioTrack(); formatHolder = new MediaFormatHolder(); }
@Override public void getTrackMediaFormat(int track, MediaFormatHolder outMediaFormatHolder) { if (track != mCea708TextTrackIndex) { mSampleExtractor.getTrackMediaFormat(track, outMediaFormatHolder); } }
@Override protected void onInputFormatChanged(MediaFormatHolder holder) throws ExoPlaybackException { mCodecIsSwPreferred = MIMETYPE_MPEG2.equalsIgnoreCase(holder.format.mimeType) && holder.format.height < MIN_HD_HEIGHT; super.onInputFormatChanged(holder); }
@Override public void getTrackMediaFormat(int track, MediaFormatHolder outMediaFormatHolder) { outMediaFormatHolder.format = mTrackFormats.get(track); outMediaFormatHolder.drmInitData = null; }
/** Stores the {@link MediaFormat} of {@code track}. */ void getTrackMediaFormat(int track, MediaFormatHolder outMediaFormatHolder);
@Override public int readData(int track, long playbackPositionUs, MediaFormatHolder formatHolder, SampleHolder sampleHolder, boolean onlyReadDiscontinuity) { Assertions.checkState(prepared); downstreamPositionUs = playbackPositionUs; if (pendingDiscontinuities[track]) { pendingDiscontinuities[track] = false; return DISCONTINUITY_READ; } if (onlyReadDiscontinuity) { return NOTHING_READ; } if (isPendingReset()) { return NOTHING_READ; } HlsExtractorWrapper extractor = getCurrentExtractor(); if (!extractor.isPrepared()) { return NOTHING_READ; } if (downstreamFormat == null || !downstreamFormat.equals(extractor.format)) { // Notify a change in the downstream format. notifyDownstreamFormatChanged(extractor.format, extractor.trigger, extractor.startTimeUs); downstreamFormat = extractor.format; } if (extractors.size() > 1) { // If there's more than one extractor, attempt to configure a seamless splice from the // current one to the next one. extractor.configureSpliceTo(extractors.get(1)); } int extractorIndex = 0; while (extractors.size() > extractorIndex + 1 && !extractor.hasSamples(track)) { // We're finished reading from the extractor for this particular track, so advance to the // next one for the current read. extractor = extractors.get(++extractorIndex); if (!extractor.isPrepared()) { return NOTHING_READ; } } MediaFormat mediaFormat = extractor.getMediaFormat(track); if (mediaFormat != null && !mediaFormat.equals(downstreamMediaFormats[track], true)) { chunkSource.getMaxVideoDimensions(mediaFormat); formatHolder.format = mediaFormat; downstreamMediaFormats[track] = mediaFormat; return FORMAT_READ; } if (extractor.getSample(track, sampleHolder)) { boolean decodeOnly = sampleHolder.timeUs < lastSeekPositionUs; sampleHolder.flags |= decodeOnly ? C.SAMPLE_FLAG_DECODE_ONLY : 0; return SAMPLE_READ; } if (loadingFinished) { return END_OF_STREAM; } return NOTHING_READ; }
@Override public int readData(int track, long positionUs, MediaFormatHolder formatHolder, SampleHolder sampleHolder, boolean onlyReadDiscontinuity) { Assertions.checkState(state == STATE_ENABLED); Assertions.checkState(track == 0); downstreamPositionUs = positionUs; if (pendingDiscontinuity) { pendingDiscontinuity = false; return DISCONTINUITY_READ; } if (onlyReadDiscontinuity) { return NOTHING_READ; } if (isPendingReset()) { return NOTHING_READ; } boolean haveSamples = !sampleQueue.isEmpty(); BaseMediaChunk currentChunk = mediaChunks.getFirst(); while (haveSamples && mediaChunks.size() > 1 && mediaChunks.get(1).getFirstSampleIndex() == sampleQueue.getReadIndex()) { mediaChunks.removeFirst(); currentChunk = mediaChunks.getFirst(); } if (downstreamFormat == null || !downstreamFormat.equals(currentChunk.format)) { notifyDownstreamFormatChanged(currentChunk.format, currentChunk.trigger, currentChunk.startTimeUs); downstreamFormat = currentChunk.format; } if (haveSamples || currentChunk.isMediaFormatFinal) { MediaFormat mediaFormat = currentChunk.getMediaFormat(); if (!mediaFormat.equals(downstreamMediaFormat, true)) { chunkSource.getMaxVideoDimensions(mediaFormat); formatHolder.format = mediaFormat; formatHolder.drmInitData = currentChunk.getDrmInitData(); downstreamMediaFormat = mediaFormat; return FORMAT_READ; } } if (!haveSamples) { if (loadingFinished) { return END_OF_STREAM; } return NOTHING_READ; } if (sampleQueue.getSample(sampleHolder)) { boolean decodeOnly = sampleHolder.timeUs < lastSeekPositionUs; sampleHolder.flags |= decodeOnly ? C.SAMPLE_FLAG_DECODE_ONLY : 0; onSampleRead(currentChunk, sampleHolder); return SAMPLE_READ; } return NOTHING_READ; }
@Override public int readData(int track, long playbackPositionUs, MediaFormatHolder formatHolder, SampleHolder sampleHolder) { Assertions.checkState(prepared); downstreamPositionUs = playbackPositionUs; if (pendingDiscontinuities[track] || isPendingReset()) { return NOTHING_READ; } HlsExtractorWrapper extractor = getCurrentExtractor(); if (!extractor.isPrepared()) { return NOTHING_READ; } Format format = extractor.format; if (!format.equals(downstreamFormat)) { notifyDownstreamFormatChanged(format, extractor.trigger, extractor.startTimeUs); } downstreamFormat = format; if (extractors.size() > 1) { // If there's more than one extractor, attempt to configure a seamless splice from the // current one to the next one. extractor.configureSpliceTo(extractors.get(1)); } int extractorTrack = extractorTrackIndices[track]; int extractorIndex = 0; while (extractors.size() > extractorIndex + 1 && !extractor.hasSamples(extractorTrack)) { // We're finished reading from the extractor for this particular track, so advance to the // next one for the current read. extractor = extractors.get(++extractorIndex); if (!extractor.isPrepared()) { return NOTHING_READ; } } MediaFormat mediaFormat = extractor.getMediaFormat(extractorTrack); if (mediaFormat != null) { if (!mediaFormat.equals(downstreamMediaFormats[track])) { formatHolder.format = mediaFormat; downstreamMediaFormats[track] = mediaFormat; return FORMAT_READ; } // If mediaFormat and downstreamMediaFormat[track] are equal but different objects then the // equality check above will have been expensive, comparing the fields in each format. We // update downstreamMediaFormat here so that referential equality can be cheaply established // during subsequent calls. downstreamMediaFormats[track] = mediaFormat; } if (extractor.getSample(extractorTrack, sampleHolder)) { boolean decodeOnly = sampleHolder.timeUs < lastSeekPositionUs; sampleHolder.flags |= decodeOnly ? C.SAMPLE_FLAG_DECODE_ONLY : 0; return SAMPLE_READ; } if (loadingFinished) { return END_OF_STREAM; } return NOTHING_READ; }
@Override public int readData(int track, long positionUs, MediaFormatHolder formatHolder, SampleHolder sampleHolder) { Assertions.checkState(state == STATE_ENABLED); downstreamPositionUs = positionUs; if (pendingDiscontinuity || isPendingReset()) { return NOTHING_READ; } boolean haveSamples = !sampleQueue.isEmpty(); BaseMediaChunk currentChunk = mediaChunks.getFirst(); while (haveSamples && mediaChunks.size() > 1 && mediaChunks.get(1).getFirstSampleIndex() <= sampleQueue.getReadIndex()) { mediaChunks.removeFirst(); currentChunk = mediaChunks.getFirst(); } Format format = currentChunk.format; if (!format.equals(downstreamFormat)) { notifyDownstreamFormatChanged(format, currentChunk.trigger, currentChunk.startTimeUs); } downstreamFormat = format; if (haveSamples || currentChunk.isMediaFormatFinal) { MediaFormat mediaFormat = currentChunk.getMediaFormat(); if (!mediaFormat.equals(downstreamMediaFormat)) { formatHolder.format = mediaFormat; formatHolder.drmInitData = currentChunk.getDrmInitData(); downstreamMediaFormat = mediaFormat; return FORMAT_READ; } // If mediaFormat and downstreamMediaFormat are equal but different objects then the equality // check above will have been expensive, comparing the fields in each format. We update // downstreamMediaFormat here so that referential equality can be cheaply established during // subsequent calls. downstreamMediaFormat = mediaFormat; } if (!haveSamples) { if (loadingFinished) { return END_OF_STREAM; } return NOTHING_READ; } if (sampleQueue.getSample(sampleHolder)) { boolean decodeOnly = sampleHolder.timeUs < lastSeekPositionUs; sampleHolder.flags |= decodeOnly ? C.SAMPLE_FLAG_DECODE_ONLY : 0; onSampleRead(currentChunk, sampleHolder); return SAMPLE_READ; } return NOTHING_READ; }
@Override public int readData(int track, long positionUs, MediaFormatHolder formatHolder, SampleHolder sampleHolder, boolean onlyReadDiscontinuity) throws IOException { Assertions.checkState(prepared); return extractor.readSample(track, sampleHolder); }
@Override public int readData(int track, long playbackPositionUs, MediaFormatHolder formatHolder, SampleHolder sampleHolder, boolean onlyReadDiscontinuity) throws IOException { Assertions.checkState(prepared); downstreamPositionUs = playbackPositionUs; if (pendingDiscontinuities[track]) { pendingDiscontinuities[track] = false; return DISCONTINUITY_READ; } if (onlyReadDiscontinuity || isPendingReset() || extractors.isEmpty()) { maybeThrowLoadableException(); return NOTHING_READ; } HlsExtractor extractor = getCurrentExtractor(); if (extractors.size() > 1) { // If there's more than one extractor, attempt to configure a seamless splice from the // current one to the next one. extractor.configureSpliceTo(extractors.get(1)); } int extractorIndex = 0; while (extractors.size() > extractorIndex + 1 && !extractor.hasSamples(track)) { // We're finished reading from the extractor for this particular track, so advance to the // next one for the current read. extractor = extractors.get(++extractorIndex); } if (!extractor.isPrepared()) { maybeThrowLoadableException(); return NOTHING_READ; } MediaFormat mediaFormat = extractor.getFormat(track); if (mediaFormat != null && !mediaFormat.equals(downstreamMediaFormats[track], true)) { chunkSource.getMaxVideoDimensions(mediaFormat); formatHolder.format = mediaFormat; downstreamMediaFormats[track] = mediaFormat; return FORMAT_READ; } if (extractor.getSample(track, sampleHolder)) { sampleHolder.decodeOnly = frameAccurateSeeking && sampleHolder.timeUs < lastSeekPositionUs; return SAMPLE_READ; } if (loadingFinished) { return END_OF_STREAM; } maybeThrowLoadableException(); return NOTHING_READ; }
@Override public void getTrackMediaFormat(int track, MediaFormatHolder mediaFormatHolder) { mediaFormatHolder.format = MediaFormat.createFromFrameworkMediaFormatV16(mediaExtractor.getTrackFormat(track)); mediaFormatHolder.drmInitData = Util.SDK_INT >= 18 ? getPsshInfoV18() : null; }
/** Stores the {@link MediaFormat} of {@code track}. */ void getTrackMediaFormat(int track, MediaFormatHolder mediaFormatHolder);