@Override public void load() throws IOException, InterruptedException { // We always load from the beginning, so reset the sampleSize to 0. sampleSize = 0; try { // Create and open the input. dataSource.open(new DataSpec(uri)); // Load the sample data. int result = 0; while (result != C.RESULT_END_OF_INPUT) { sampleSize += result; if (sampleSize == sampleData.length) { sampleData = Arrays.copyOf(sampleData, sampleData.length * 2); } result = dataSource.read(sampleData, sampleSize, sampleData.length - sampleSize); } } finally { dataSource.close(); } }
@SuppressWarnings("NonAtomicVolatileUpdate") @Override public void load() throws IOException, InterruptedException { DataSpec loadDataSpec = Util.getRemainderDataSpec(dataSpec, bytesLoaded); try { // Create and open the input. ExtractorInput input = new DefaultExtractorInput(dataSource, loadDataSpec.absoluteStreamPosition, dataSource.open(loadDataSpec)); if (bytesLoaded == 0) { // Set the target to ourselves. extractorWrapper.init(this); } // Load and parse the initialization data. try { int result = Extractor.RESULT_CONTINUE; while (result == Extractor.RESULT_CONTINUE && !loadCanceled) { result = extractorWrapper.read(input); } } finally { bytesLoaded = (int) (input.getPosition() - dataSpec.absoluteStreamPosition); } } finally { dataSource.close(); } }
@SuppressWarnings("NonAtomicVolatileUpdate") @Override public final void load() throws IOException, InterruptedException { DataSpec loadDataSpec = Util.getRemainderDataSpec(dataSpec, bytesLoaded); try { // Create and open the input. ExtractorInput input = new DefaultExtractorInput(dataSource, loadDataSpec.absoluteStreamPosition, dataSource.open(loadDataSpec)); if (bytesLoaded == 0) { // Set the target to ourselves. extractorWrapper.init(this); } // Load and parse the initialization data. try { int result = Extractor.RESULT_CONTINUE; while (result == Extractor.RESULT_CONTINUE && !loadCanceled) { result = extractorWrapper.read(input); } } finally { bytesLoaded = (int) (input.getPosition() - dataSpec.absoluteStreamPosition); } } finally { dataSource.close(); } }
private Chunk newInitializationChunk(RangedUri initializationUri, RangedUri indexUri, Representation representation, ChunkExtractorWrapper extractor, DataSource dataSource, int trigger) { RangedUri requestUri; if (initializationUri != null) { // It's common for initialization and index data to be stored adjacently. Attempt to merge // the two requests together to request both at once. requestUri = initializationUri.attemptMerge(indexUri); if (requestUri == null) { requestUri = initializationUri; } } else { requestUri = indexUri; } DataSpec dataSpec = new DataSpec(requestUri.getUri(), requestUri.start, requestUri.length, representation.getCacheKey()); return new InitializationChunk(dataSource, dataSpec, trigger, representation.format, extractor); }
@SuppressWarnings("NonAtomicVolatileUpdate") @Override public void load() throws IOException, InterruptedException { DataSpec loadDataSpec = Util.getRemainderDataSpec(dataSpec, bytesLoaded); try { // Create and open the input. dataSource.open(loadDataSpec); // Load the sample data. int result = 0; while (result != C.RESULT_END_OF_INPUT) { bytesLoaded += result; result = getOutput().sampleData(dataSource, Integer.MAX_VALUE, true); } int sampleSize = bytesLoaded; getOutput().sampleMetadata(startTimeUs, C.SAMPLE_FLAG_SYNC, sampleSize, 0, null); } finally { dataSource.close(); } }
@SuppressWarnings("NonAtomicVolatileUpdate") @Override public final void load() throws IOException, InterruptedException { DataSpec loadDataSpec = Util.getRemainderDataSpec(dataSpec, bytesLoaded); try { // Create and open the input. ExtractorInput input = new DefaultExtractorInput(dataSource, loadDataSpec.absoluteStreamPosition, dataSource.open(loadDataSpec)); if (bytesLoaded == 0) { // Set the target to ourselves. extractorWrapper.init(this); } // Load and parse the sample data. try { int result = Extractor.RESULT_CONTINUE; while (result == Extractor.RESULT_CONTINUE && !loadCanceled) { result = extractorWrapper.read(input); } } finally { bytesLoaded = (int) (input.getPosition() - dataSpec.absoluteStreamPosition); } } finally { dataSource.close(); } }
private Chunk newInitializationChunk(RangedUri initializationUri, RangedUri indexUri, Representation representation, ChunkExtractorWrapper extractor, DataSource dataSource, int manifestIndex, int trigger) { RangedUri requestUri; if (initializationUri != null) { // It's common for initialization and index data to be stored adjacently. Attempt to merge // the two requests together to request both at once. requestUri = initializationUri.attemptMerge(indexUri); if (requestUri == null) { requestUri = initializationUri; } } else { requestUri = indexUri; } DataSpec dataSpec = new DataSpec(requestUri.getUri(), requestUri.start, requestUri.length, representation.getCacheKey()); return new InitializationChunk(dataSource, dataSpec, trigger, representation.format, extractor, manifestIndex); }
protected Chunk newMediaChunk( PeriodHolder periodHolder, RepresentationHolder representationHolder, DataSource dataSource, MediaFormat mediaFormat, ExposedTrack enabledTrack, int segmentNum, int trigger) { Representation representation = representationHolder.representation; Format format = representation.format; long startTimeUs = representationHolder.getSegmentStartTimeUs(segmentNum); long endTimeUs = representationHolder.getSegmentEndTimeUs(segmentNum); RangedUri segmentUri = representationHolder.getSegmentUrl(segmentNum); DataSpec dataSpec = new DataSpec(segmentUri.getUri(), segmentUri.start, segmentUri.length, representation.getCacheKey()); long sampleOffsetUs = periodHolder.startTimeUs - representation.presentationTimeOffsetUs; if (mimeTypeIsRawText(format.mimeType)) { return new SingleSampleMediaChunk(dataSource, dataSpec, Chunk.TRIGGER_INITIAL, format, startTimeUs, endTimeUs, segmentNum, enabledTrack.trackFormat, null, periodHolder.localIndex); } else { boolean isMediaFormatFinal = (mediaFormat != null); return new ContainerMediaChunk(dataSource, dataSpec, trigger, format, startTimeUs, endTimeUs, segmentNum, sampleOffsetUs, representationHolder.extractorWrapper, mediaFormat, enabledTrack.adaptiveMaxWidth, enabledTrack.adaptiveMaxHeight, periodHolder.drmInitData, isMediaFormatFinal, periodHolder.localIndex); } }
public void testSkipFullyLarge() throws IOException, InterruptedException { // Tests skipping an amount of data that's larger than any internal scratch space. int largeSkipSize = 1024 * 1024; FakeDataSource.Builder builder = new FakeDataSource.Builder(); builder.appendReadData(new byte[largeSkipSize]); FakeDataSource testDataSource = builder.build(); testDataSource.open(new DataSpec(Uri.parse(TEST_URI))); DefaultExtractorInput input = new DefaultExtractorInput(testDataSource, 0, C.LENGTH_UNBOUNDED); input.skipFully(largeSkipSize); assertEquals(largeSkipSize, input.getPosition()); // Check that we fail with EOFException we skip again. try { input.skipFully(1); fail(); } catch (EOFException e) { // Expected. } }
@Override public long open(DataSpec dataSpec) throws IOException { Assertions.checkState(dataSpec.uriIsFullStream); // TODO: Support caching for unbounded requests. This requires storing the source length // into the cache (the simplest approach is to incorporate it into each cache file's name). Assertions.checkState(dataSpec.length != C.LENGTH_UNBOUNDED); try { uri = dataSpec.uri; key = dataSpec.key; readPosition = dataSpec.position; bytesRemaining = dataSpec.length; openNextSource(); return dataSpec.length; } catch (IOException e) { handleBeforeThrow(e); throw e; } }
public void testReadVarintEndOfInputAtStart() throws IOException, InterruptedException { VarintReader reader = new VarintReader(); // Build an input, and read to the end. DataSource dataSource = buildDataSource(new byte[1]); dataSource.open(new DataSpec(Uri.parse(TEST_URI))); ExtractorInput input = new DefaultExtractorInput(dataSource, 0, C.LENGTH_UNBOUNDED); int bytesRead = input.read(new byte[1], 0, 1); assertEquals(1, bytesRead); // End of input allowed. long result = reader.readUnsignedVarint(input, true, false); assertEquals(-1, result); // End of input not allowed. try { reader.readUnsignedVarint(input, false, false); fail(); } catch (EOFException e) { // Expected. } }
private void loadDrmInitData(Representation representation) throws IOException { Uri initFile = representation.getInitializationUri().getUri(); FileDataSource initChunkSource = new FileDataSource(); DataSpec initDataSpec = new DataSpec(initFile); int trigger = 2; ChunkExtractorWrapper extractorWrapper = new ChunkExtractorWrapper(new FragmentedMp4Extractor()); InitializationChunk chunk = new InitializationChunk(initChunkSource, initDataSpec, trigger, format, extractorWrapper); try { chunk.load(); } catch (InterruptedException e) { Log.d(TAG, "Interrupted!", e); } if (!chunk.isLoadCanceled()) { drmInitData = chunk.getDrmInitData(); } if (drmInitData != null) { DrmInitData.SchemeInitData schemeInitData = OfflineDrmManager.getWidevineInitData(drmInitData); if (schemeInitData != null) { widevineInitData = schemeInitData.data; } } }
@Override public void buildRenderers(DemoPlayer player) { Allocator allocator = new DefaultAllocator(BUFFER_SEGMENT_SIZE); Handler mainHandler = player.getMainHandler(); // Build the video and audio renderers. DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, null); dataSource = new CameraDataSource( bandwidthMeter); try { dataSource.open(new DataSpec(uri)); } catch (IOException e) { e.printStackTrace(); } ExtractorSampleSource sampleSource = new ExtractorSampleSource(uri, dataSource, allocator, BUFFER_SEGMENT_COUNT * BUFFER_SEGMENT_SIZE, mainHandler, player, 0); MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, sampleSource, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, mainHandler, player, 50); MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource, MediaCodecSelector.DEFAULT, null, true, mainHandler, player, AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC); TrackRenderer textRenderer = new TextTrackRenderer(sampleSource, player, mainHandler.getLooper()); // Invoke the callback. TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT]; renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer; renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer; renderers[DemoPlayer.TYPE_TEXT] = textRenderer; player.onRenderers(renderers, bandwidthMeter); }
/** * Establishes a connection. */ private Request makeRequest(DataSpec dataSpec) { long position = dataSpec.position; long length = dataSpec.length; boolean allowGzip = (dataSpec.flags & DataSpec.FLAG_ALLOW_GZIP) != 0; HttpUrl url = HttpUrl.parse(dataSpec.uri.toString()); Request.Builder builder = new Request.Builder().url(url); if (cacheControl != null) { builder.cacheControl(cacheControl); } synchronized (requestProperties) { for (Map.Entry<String, String> property : requestProperties.entrySet()) { builder.addHeader(property.getKey(), property.getValue()); } } if (!(position == 0 && length == C.LENGTH_UNBOUNDED)) { String rangeRequest = "bytes=" + position + "-"; if (length != C.LENGTH_UNBOUNDED) { rangeRequest += (position + length - 1); } builder.addHeader("Range", rangeRequest); } builder.addHeader("User-Agent", userAgent); if (!allowGzip) { builder.addHeader("Accept-Encoding", "identity"); } if (dataSpec.postBody != null) { builder.post(RequestBody.create(null, dataSpec.postBody)); } return builder.build(); }
@Override public void load() throws IOException, InterruptedException { // If we previously fed part of this chunk to the extractor, we need to skip it this time. For // encrypted content we need to skip the data by reading it through the source, so as to ensure // correct decryption of the remainder of the chunk. For clear content, we can request the // remainder of the chunk directly. DataSpec loadDataSpec; boolean skipLoadedBytes; if (isEncrypted) { loadDataSpec = dataSpec; skipLoadedBytes = bytesLoaded != 0; } else { loadDataSpec = Util.getRemainderDataSpec(dataSpec, bytesLoaded); skipLoadedBytes = false; } try { ExtractorInput input = new DefaultExtractorInput(dataSource, loadDataSpec.absoluteStreamPosition, dataSource.open(loadDataSpec)); if (skipLoadedBytes) { input.skipFully(bytesLoaded); } try { int result = Extractor.RESULT_CONTINUE; while (result == Extractor.RESULT_CONTINUE && !loadCanceled) { result = extractorWrapper.read(input); } } finally { bytesLoaded = (int) (input.getPosition() - dataSpec.absoluteStreamPosition); } } finally { dataSource.close(); } }
private MediaPlaylistChunk newMediaPlaylistChunk(int variantIndex) { Uri mediaPlaylistUri = UriUtil.resolveToUri(baseUri, variants[variantIndex].url); DataSpec dataSpec = new DataSpec(mediaPlaylistUri, 0, C.LENGTH_UNBOUNDED, null, DataSpec.FLAG_ALLOW_GZIP); return new MediaPlaylistChunk(dataSource, dataSpec, scratchSpace, playlistParser, variantIndex, mediaPlaylistUri.toString()); }
public MediaPlaylistChunk(DataSource dataSource, DataSpec dataSpec, byte[] scratchSpace, HlsPlaylistParser playlistParser, int variantIndex, String playlistUrl) { super(dataSource, dataSpec, Chunk.TYPE_MANIFEST, Chunk.TRIGGER_UNSPECIFIED, null, scratchSpace); this.variantIndex = variantIndex; this.playlistParser = playlistParser; this.playlistUrl = playlistUrl; }
@Override public void load() throws IOException, InterruptedException { int result = Extractor.RESULT_CONTINUE; while (result == Extractor.RESULT_CONTINUE && !loadCanceled) { ExtractorInput input = null; try { long position = positionHolder.position; long length = dataSource.open(new DataSpec(uri, position, C.LENGTH_UNBOUNDED, null)); if (length != C.LENGTH_UNBOUNDED) { length += position; } input = new DefaultExtractorInput(dataSource, position, length); Extractor extractor = extractorHolder.selectExtractor(input); if (pendingExtractorSeek) { extractor.seek(); pendingExtractorSeek = false; } while (result == Extractor.RESULT_CONTINUE && !loadCanceled) { allocator.blockWhileTotalBytesAllocatedExceeds(requestedBufferSize); result = extractor.read(input, positionHolder); // TODO: Implement throttling to stop us from buffering data too often. } } finally { if (result == Extractor.RESULT_SEEK) { result = Extractor.RESULT_CONTINUE; } else if (input != null) { positionHolder.position = input.getPosition(); } dataSource.close(); } } }
/** * @param dataSource A {@link DataSource} suitable for loading the sample data. * @param dataSpec Defines the location of the sample. * @param format The format of the sample. * @param durationUs The duration of the sample in microseconds, or {@link C#UNKNOWN_TIME_US} if * the duration is unknown, or {@link C#MATCH_LONGEST_US} if the duration should match the * duration of the longest track whose duration is known. * @param mediaFormat The sample media format. May be null. */ public SingleSampleChunkSource(DataSource dataSource, DataSpec dataSpec, Format format, long durationUs, MediaFormat mediaFormat) { this.dataSource = dataSource; this.dataSpec = dataSpec; this.format = format; this.durationUs = durationUs; this.mediaFormat = mediaFormat; trackInfo = new TrackInfo(format.mimeType, durationUs); }
@SuppressWarnings("NonAtomicVolatileUpdate") @Override public void load() throws IOException, InterruptedException { if (!writtenHeader) { if (headerData != null) { getOutput().sampleData(new ParsableByteArray(headerData), headerData.length); } writtenHeader = true; } DataSpec loadDataSpec = Util.getRemainderDataSpec(dataSpec, bytesLoaded); try { // Create and open the input. dataSource.open(loadDataSpec); // Load the sample data. int result = 0; while (result != C.RESULT_END_OF_INPUT) { bytesLoaded += result; result = getOutput().sampleData(dataSource, Integer.MAX_VALUE, true); } int sampleSize = bytesLoaded; if (headerData != null) { sampleSize += headerData.length; } getOutput().sampleMetadata(startTimeUs, C.SAMPLE_FLAG_SYNC, sampleSize, 0, null); } finally { dataSource.close(); } }
/** * Given a {@link DataSpec} and a number of bytes already loaded, returns a {@link DataSpec} * that represents the remainder of the data. * * @param dataSpec The original {@link DataSpec}. * @param bytesLoaded The number of bytes already loaded. * @return A {@link DataSpec} that represents the remainder of the data. */ public static DataSpec getRemainderDataSpec(DataSpec dataSpec, int bytesLoaded) { if (bytesLoaded == 0) { return dataSpec; } else { long remainingLength = dataSpec.length == C.LENGTH_UNBOUNDED ? C.LENGTH_UNBOUNDED : dataSpec.length - bytesLoaded; return new DataSpec(dataSpec.uri, dataSpec.position + bytesLoaded, remainingLength, dataSpec.key, dataSpec.flags); } }
private Chunk newMediaChunk(RepresentationHolder representationHolder, DataSource dataSource, int segmentNum, int trigger) { Representation representation = representationHolder.representation; DashSegmentIndex segmentIndex = representationHolder.segmentIndex; long startTimeUs = segmentIndex.getTimeUs(segmentNum); long endTimeUs = startTimeUs + segmentIndex.getDurationUs(segmentNum); int absoluteSegmentNum = segmentNum + representationHolder.segmentNumShift; boolean isLastSegment = !currentManifest.dynamic && segmentNum == segmentIndex.getLastSegmentNum(); RangedUri segmentUri = segmentIndex.getSegmentUrl(segmentNum); DataSpec dataSpec = new DataSpec(segmentUri.getUri(), segmentUri.start, segmentUri.length, representation.getCacheKey()); long sampleOffsetUs = representation.periodStartMs * 1000 - representation.presentationTimeOffsetUs; if (representation.format.mimeType.equals(MimeTypes.TEXT_VTT)) { if (representationHolder.vttHeaderOffsetUs != sampleOffsetUs) { // Update the VTT header. headerBuilder.setLength(0); headerBuilder.append(C.WEBVTT_EXO_HEADER).append("=") .append(C.WEBVTT_EXO_HEADER_OFFSET).append(sampleOffsetUs) .append("\n"); representationHolder.vttHeader = headerBuilder.toString().getBytes(); representationHolder.vttHeaderOffsetUs = sampleOffsetUs; } return new SingleSampleMediaChunk(dataSource, dataSpec, Chunk.TRIGGER_INITIAL, representation.format, startTimeUs, endTimeUs, absoluteSegmentNum, isLastSegment, MediaFormat.createTextFormat(MimeTypes.TEXT_VTT), null, representationHolder.vttHeader); } else { return new ContainerMediaChunk(dataSource, dataSpec, trigger, representation.format, startTimeUs, endTimeUs, absoluteSegmentNum, isLastSegment, sampleOffsetUs, representationHolder.extractorWrapper, representationHolder.format, drmInitData, true); } }
@Override public long open(DataSpec dataSpec) throws IOException { try { uri = dataSpec.uri; flags = dataSpec.flags; key = dataSpec.key; readPosition = dataSpec.position; bytesRemaining = dataSpec.length; openNextSource(); return dataSpec.length; } catch (IOException e) { handleBeforeThrow(e); throw e; } }
@Override public DataSink open(DataSpec dataSpec) throws CacheDataSinkException { // TODO: Support caching for unbounded requests. See TODO in {@link CacheDataSource} for // more details. Assertions.checkState(dataSpec.length != C.LENGTH_UNBOUNDED); try { this.dataSpec = dataSpec; dataSpecBytesWritten = 0; openNextOutputStream(); return this; } catch (FileNotFoundException e) { throw new CacheDataSinkException(e); } }
private static MediaChunk newMediaChunk(Format formatInfo, Uri uri, String cacheKey, ChunkExtractorWrapper extractorWrapper, DrmInitData drmInitData, DataSource dataSource, int chunkIndex, boolean isLast, long chunkStartTimeUs, long chunkEndTimeUs, int trigger, MediaFormat mediaFormat) { long offset = 0; DataSpec dataSpec = new DataSpec(uri, offset, -1, cacheKey); // In SmoothStreaming each chunk contains sample timestamps relative to the start of the chunk. // To convert them the absolute timestamps, we need to set sampleOffsetUs to -chunkStartTimeUs. return new ContainerMediaChunk(dataSource, dataSpec, trigger, formatInfo, chunkStartTimeUs, chunkEndTimeUs, chunkIndex, isLast, chunkStartTimeUs, extractorWrapper, mediaFormat, drmInitData, true); }
public MediaPlaylistChunk(DataSource dataSource, DataSpec dataSpec, byte[] scratchSpace, HlsPlaylistParser playlistParser, int variantIndex, String playlistUrl) { super(dataSource, dataSpec, Chunk.TYPE_MANIFEST, Chunk.TRIGGER_UNSPECIFIED, null, Chunk.NO_PARENT_ID, scratchSpace); this.variantIndex = variantIndex; this.playlistParser = playlistParser; this.playlistUrl = playlistUrl; }
public EncryptionKeyChunk(DataSource dataSource, DataSpec dataSpec, byte[] scratchSpace, String iv, int variantIndex) { super(dataSource, dataSpec, Chunk.TYPE_DRM, Chunk.TRIGGER_UNSPECIFIED, null, Chunk.NO_PARENT_ID, scratchSpace); this.iv = iv; this.variantIndex = variantIndex; }