DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher, MediaPresentationDescription initialManifest, DashTrackSelector trackSelector, DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator, Clock systemClock, long liveEdgeLatencyUs, long elapsedRealtimeOffsetUs, boolean startAtLiveEdge, Handler eventHandler, EventListener eventListener, int eventSourceId) { this.manifestFetcher = manifestFetcher; this.currentManifest = initialManifest; this.trackSelector = trackSelector; this.dataSource = dataSource; this.adaptiveFormatEvaluator = adaptiveFormatEvaluator; this.systemClock = systemClock; this.liveEdgeLatencyUs = liveEdgeLatencyUs; this.elapsedRealtimeOffsetUs = elapsedRealtimeOffsetUs; this.startAtLiveEdge = startAtLiveEdge; this.eventHandler = eventHandler; this.eventListener = eventListener; this.eventSourceId = eventSourceId; this.evaluation = new Evaluation(); this.availableRangeValues = new long[2]; periodHolders = new SparseArray<>(); tracks = new ArrayList<>(); live = initialManifest.dynamic; }
@Override public TrackRenderer[] buildRenderers(HostActivity host, ExoPlayer player, Surface surface) { Handler handler = new Handler(); LogcatLogger logger = new LogcatLogger(TAG, player); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); String userAgent = TestUtil.getUserAgent(host); // Build the video renderer. DataSource videoDataSource = new DefaultUriDataSource(host, null, userAgent); videoTrackSelector = new TrackSelector(AdaptationSet.TYPE_VIDEO, canIncludeAdditionalVideoFormats, videoFormats); ChunkSource videoChunkSource = new DashChunkSource(mpd, videoTrackSelector, videoDataSource, new FormatEvaluator.RandomEvaluator(0)); ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, handler, logger, VIDEO_EVENT_ID, MIN_LOADABLE_RETRY_COUNT); DebugMediaCodecVideoTrackRenderer videoRenderer = new DebugMediaCodecVideoTrackRenderer(host, videoSampleSource, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 0, handler, logger, 50); videoCounters = videoRenderer.codecCounters; player.sendMessage(videoRenderer, DebugMediaCodecVideoTrackRenderer.MSG_SET_SURFACE, surface); // Build the audio renderer. DataSource audioDataSource = new DefaultUriDataSource(host, null, userAgent); TrackSelector audioTrackSelector = new TrackSelector(AdaptationSet.TYPE_AUDIO, false, audioFormats); ChunkSource audioChunkSource = new DashChunkSource(mpd, audioTrackSelector, audioDataSource, null); ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, handler, logger, AUDIO_EVENT_ID, MIN_LOADABLE_RETRY_COUNT); MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer( audioSampleSource, MediaCodecSelector.DEFAULT, handler, logger); audioCounters = audioRenderer.codecCounters; TrackRenderer[] renderers = new TrackRenderer[RENDERER_COUNT]; renderers[VIDEO_RENDERER_INDEX] = videoRenderer; renderers[AUDIO_RENDERER_INDEX] = audioRenderer; return renderers; }
private SmoothStreamingChunkSource(ManifestFetcher<SmoothStreamingManifest> manifestFetcher, SmoothStreamingManifest initialManifest, SmoothStreamingTrackSelector trackSelector, DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator, long liveEdgeLatencyMs) { this.manifestFetcher = manifestFetcher; this.currentManifest = initialManifest; this.trackSelector = trackSelector; this.dataSource = dataSource; this.adaptiveFormatEvaluator = adaptiveFormatEvaluator; this.liveEdgeLatencyUs = liveEdgeLatencyMs * 1000; evaluation = new Evaluation(); tracks = new ArrayList<>(); extractorWrappers = new SparseArray<>(); mediaFormats = new SparseArray<>(); live = initialManifest.isLive; ProtectionElement protectionElement = initialManifest.protectionElement; if (protectionElement != null) { byte[] keyId = getProtectionElementKeyId(protectionElement.data); trackEncryptionBoxes = new TrackEncryptionBox[1]; trackEncryptionBoxes[0] = new TrackEncryptionBox(true, INITIALIZATION_VECTOR_SIZE, keyId); drmInitData = new DrmInitData.Mapped(); drmInitData.put(protectionElement.uuid, new SchemeInitData(MimeTypes.VIDEO_MP4, protectionElement.data)); } else { trackEncryptionBoxes = null; drmInitData = null; } }
private DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher, MediaPresentationDescription initialManifest, int adaptationSetIndex, int[] representationIndices, DataSource dataSource, FormatEvaluator formatEvaluator, long liveEdgeLatencyUs) { this.manifestFetcher = manifestFetcher; this.currentManifest = initialManifest; this.adaptationSetIndex = adaptationSetIndex; this.representationIndices = representationIndices; this.dataSource = dataSource; this.evaluator = formatEvaluator; this.liveEdgeLatencyUs = liveEdgeLatencyUs; this.evaluation = new Evaluation(); this.headerBuilder = new StringBuilder(); psshInfo = getPsshInfo(currentManifest, adaptationSetIndex); Representation[] representations = getFilteredRepresentations(currentManifest, adaptationSetIndex, representationIndices); long periodDurationUs = (representations[0].periodDurationMs == TrackRenderer.UNKNOWN_TIME_US) ? TrackRenderer.UNKNOWN_TIME_US : representations[0].periodDurationMs * 1000; this.trackInfo = new TrackInfo(representations[0].format.mimeType, periodDurationUs); this.formats = new Format[representations.length]; this.representationHolders = new HashMap<String, RepresentationHolder>(); int maxWidth = 0; int maxHeight = 0; for (int i = 0; i < representations.length; i++) { formats[i] = representations[i].format; maxWidth = Math.max(formats[i].width, maxWidth); maxHeight = Math.max(formats[i].height, maxHeight); Extractor extractor = mimeTypeIsWebm(formats[i].mimeType) ? new WebmExtractor() : new FragmentedMp4Extractor(); representationHolders.put(formats[i].id, new RepresentationHolder(representations[i], extractor)); } this.maxWidth = maxWidth; this.maxHeight = maxHeight; Arrays.sort(formats, new DecreasingBandwidthComparator()); }
/** * @param dataSource A {@link DataSource} suitable for loading the media data. * @param evaluator Selects from the available formats. * @param representations The representations to be considered by the source. */ public DashChunkSource(DataSource dataSource, FormatEvaluator evaluator, Representation... representations) { this.dataSource = dataSource; this.evaluator = evaluator; this.formats = new Format[representations.length]; this.extractors = new HashMap<String, Extractor>(); this.segmentIndexes = new HashMap<String, DashSegmentIndex>(); this.representations = new HashMap<String, Representation>(); this.trackInfo = new TrackInfo(representations[0].format.mimeType, representations[0].periodDurationMs * 1000); this.evaluation = new Evaluation(); int maxWidth = 0; int maxHeight = 0; for (int i = 0; i < representations.length; i++) { formats[i] = representations[i].format; maxWidth = Math.max(formats[i].width, maxWidth); maxHeight = Math.max(formats[i].height, maxHeight); Extractor extractor = formats[i].mimeType.startsWith(MimeTypes.VIDEO_WEBM) ? new WebmExtractor() : new FragmentedMp4Extractor(); extractors.put(formats[i].id, extractor); this.representations.put(formats[i].id, representations[i]); DashSegmentIndex segmentIndex = representations[i].getIndex(); if (segmentIndex != null) { segmentIndexes.put(formats[i].id, segmentIndex); } } this.maxWidth = maxWidth; this.maxHeight = maxHeight; Arrays.sort(formats, new DecreasingBandwidthComparator()); }
public void testGetSeekRangeOnVod() { DashChunkSource chunkSource = new DashChunkSource(generateVodMpd(), AdaptationSet.TYPE_VIDEO, null, null, mock(FormatEvaluator.class)); chunkSource.enable(); TimeRange seekRange = chunkSource.getSeekRange(); checkSeekRange(seekRange, 0, VOD_DURATION_MS * 1000); long[] seekRangeValuesMs = seekRange.getCurrentBoundsMs(null); assertEquals(0, seekRangeValuesMs[0]); assertEquals(VOD_DURATION_MS, seekRangeValuesMs[1]); }
DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher, MediaPresentationDescription initialManifest, int adaptationSetIndex, int[] representationIndices, DataSource dataSource, FormatEvaluator formatEvaluator, Clock systemClock, long liveEdgeLatencyUs, long elapsedRealtimeOffsetUs, boolean startAtLiveEdge, Handler eventHandler, EventListener eventListener) { this.manifestFetcher = manifestFetcher; this.currentManifest = initialManifest; this.adaptationSetIndex = adaptationSetIndex; this.representationIndices = representationIndices; this.dataSource = dataSource; this.formatEvaluator = formatEvaluator; this.systemClock = systemClock; this.liveEdgeLatencyUs = liveEdgeLatencyUs; this.elapsedRealtimeOffsetUs = elapsedRealtimeOffsetUs; this.startAtLiveEdge = startAtLiveEdge; this.eventHandler = eventHandler; this.eventListener = eventListener; this.evaluation = new Evaluation(); this.headerBuilder = new StringBuilder(); this.seekRangeValues = new long[2]; drmInitData = getDrmInitData(currentManifest, adaptationSetIndex); Representation[] representations = getFilteredRepresentations(currentManifest, adaptationSetIndex, representationIndices); long periodDurationUs = (representations[0].periodDurationMs == TrackRenderer.UNKNOWN_TIME_US) ? TrackRenderer.UNKNOWN_TIME_US : representations[0].periodDurationMs * 1000; this.trackInfo = new TrackInfo(representations[0].format.mimeType, periodDurationUs); this.formats = new Format[representations.length]; this.representationHolders = new HashMap<>(); int maxWidth = 0; int maxHeight = 0; for (int i = 0; i < representations.length; i++) { formats[i] = representations[i].format; maxWidth = Math.max(formats[i].width, maxWidth); maxHeight = Math.max(formats[i].height, maxHeight); Extractor extractor = mimeTypeIsWebm(formats[i].mimeType) ? new WebmExtractor() : new FragmentedMp4Extractor(); representationHolders.put(formats[i].id, new RepresentationHolder(representations[i], new ChunkExtractorWrapper(extractor))); } this.maxWidth = maxWidth; this.maxHeight = maxHeight; Arrays.sort(formats, new DecreasingBandwidthComparator()); }
private SmoothStreamingChunkSource(ManifestFetcher<SmoothStreamingManifest> manifestFetcher, SmoothStreamingManifest initialManifest, int streamElementIndex, int[] trackIndices, DataSource dataSource, FormatEvaluator formatEvaluator, long liveEdgeLatencyMs) { this.manifestFetcher = manifestFetcher; this.streamElementIndex = streamElementIndex; this.currentManifest = initialManifest; this.dataSource = dataSource; this.formatEvaluator = formatEvaluator; this.liveEdgeLatencyUs = liveEdgeLatencyMs * 1000; StreamElement streamElement = getElement(initialManifest); trackInfo = new TrackInfo(streamElement.tracks[0].format.mimeType, initialManifest.durationUs); evaluation = new Evaluation(); TrackEncryptionBox[] trackEncryptionBoxes = null; ProtectionElement protectionElement = initialManifest.protectionElement; if (protectionElement != null) { byte[] keyId = getKeyId(protectionElement.data); trackEncryptionBoxes = new TrackEncryptionBox[1]; trackEncryptionBoxes[0] = new TrackEncryptionBox(true, INITIALIZATION_VECTOR_SIZE, keyId); DrmInitData.Mapped drmInitData = new DrmInitData.Mapped(MimeTypes.VIDEO_MP4); drmInitData.put(protectionElement.uuid, protectionElement.data); this.drmInitData = drmInitData; } else { drmInitData = null; } int trackCount = trackIndices != null ? trackIndices.length : streamElement.tracks.length; formats = new Format[trackCount]; extractorWrappers = new SparseArray<>(); mediaFormats = new SparseArray<>(); int maxWidth = 0; int maxHeight = 0; for (int i = 0; i < trackCount; i++) { int trackIndex = trackIndices != null ? trackIndices[i] : i; formats[i] = streamElement.tracks[trackIndex].format; maxWidth = Math.max(maxWidth, formats[i].width); maxHeight = Math.max(maxHeight, formats[i].height); MediaFormat mediaFormat = getMediaFormat(streamElement, trackIndex); int trackType = streamElement.type == StreamElement.TYPE_VIDEO ? Track.TYPE_VIDEO : Track.TYPE_AUDIO; FragmentedMp4Extractor extractor = new FragmentedMp4Extractor( FragmentedMp4Extractor.WORKAROUND_EVERY_VIDEO_FRAME_IS_SYNC_FRAME); extractor.setTrack(new Track(trackIndex, trackType, streamElement.timescale, initialManifest.durationUs, mediaFormat, trackEncryptionBoxes, trackType == Track.TYPE_VIDEO ? 4 : -1)); extractorWrappers.put(trackIndex, new ChunkExtractorWrapper(extractor)); mediaFormats.put(trackIndex, mediaFormat); } this.maxWidth = maxWidth; this.maxHeight = maxHeight; Arrays.sort(formats, new DecreasingBandwidthComparator()); }
protected void buildTrackRenderers(DrmSessionManager drmSessionManager) { Handler mainHandler = player.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player); //Create the Sample Source to be used by the Video Renderer DataSource dataSourceVideo = createDataSource(okHttpClient, bandwidthMeter, userAgent); SmoothStreamingTrackSelector trackSelectorVideo = DefaultSmoothStreamingTrackSelector.newVideoInstance(context, true, false); ChunkSource chunkSourceVideo = new SmoothStreamingChunkSource(manifestFetcher, trackSelectorVideo, dataSourceVideo, new FormatEvaluator.AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS); ChunkSampleSource sampleSourceVideo = new ChunkSampleSource(chunkSourceVideo, loadControl, BUFFER_SEGMENTS_VIDEO * BUFFER_SEGMENT_SIZE, mainHandler, player, EMExoPlayer.RENDER_VIDEO); //Create the Sample Source to be used by the Audio Renderer DataSource dataSourceAudio = createDataSource(okHttpClient, bandwidthMeter, userAgent); SmoothStreamingTrackSelector trackSelectorAudio = DefaultSmoothStreamingTrackSelector.newAudioInstance(); ChunkSource chunkSourceAudio = new SmoothStreamingChunkSource(manifestFetcher, trackSelectorAudio, dataSourceAudio, null, LIVE_EDGE_LATENCY_MS); ChunkSampleSource sampleSourceAudio = new ChunkSampleSource(chunkSourceAudio, loadControl, BUFFER_SEGMENTS_AUDIO * BUFFER_SEGMENT_SIZE, mainHandler, player, EMExoPlayer.RENDER_AUDIO); //Create the Sample Source to be used by the Closed Captions Renderer DataSource dataSourceCC = createDataSource(okHttpClient, bandwidthMeter, userAgent); SmoothStreamingTrackSelector trackSelectorCC = DefaultSmoothStreamingTrackSelector.newTextInstance(); ChunkSource chunkSourceCC = new SmoothStreamingChunkSource(manifestFetcher, trackSelectorCC, dataSourceCC, null, LIVE_EDGE_LATENCY_MS); ChunkSampleSource sampleSourceCC = new ChunkSampleSource(chunkSourceCC, loadControl, BUFFER_SEGMENTS_TEXT * BUFFER_SEGMENT_SIZE, mainHandler, player, EMExoPlayer.RENDER_CLOSED_CAPTION); // Build the renderers MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, sampleSourceVideo, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, MAX_JOIN_TIME, drmSessionManager, true, mainHandler, player, DROPPED_FRAME_NOTIFICATION_AMOUNT); EMMediaCodecAudioTrackRenderer audioRenderer = new EMMediaCodecAudioTrackRenderer(sampleSourceAudio, MediaCodecSelector.DEFAULT, drmSessionManager, true, mainHandler, player, AudioCapabilities.getCapabilities(context), streamType); TextTrackRenderer captionsRenderer = new TextTrackRenderer(sampleSourceCC, player, mainHandler.getLooper()); // Invoke the callback TrackRenderer[] renderers = new TrackRenderer[EMExoPlayer.RENDER_COUNT]; renderers[EMExoPlayer.RENDER_VIDEO] = videoRenderer; renderers[EMExoPlayer.RENDER_AUDIO] = audioRenderer; renderers[EMExoPlayer.RENDER_CLOSED_CAPTION] = captionsRenderer; player.onRenderers(renderers, bandwidthMeter); }
private void buildRenderers() { Period period = manifest.getPeriod(0); Handler mainHandler = player.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player); boolean hasContentProtection = false; for (int i = 0; i < period.adaptationSets.size(); i++) { AdaptationSet adaptationSet = period.adaptationSets.get(i); if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) { hasContentProtection |= adaptationSet.hasContentProtection(); } } // Check drm support if necessary. boolean filterHdContent = false; StreamingDrmSessionManager drmSessionManager = null; if (hasContentProtection) { if (Util.SDK_INT < 18) { player.onRenderersError( new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME)); return; } try { drmSessionManager = StreamingDrmSessionManager.newWidevineInstance( player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player); filterHdContent = getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1; } catch (UnsupportedDrmException e) { player.onRenderersError(e); return; } } // Build the video renderer. DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent), videoDataSource, new FormatEvaluator.AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, ExoPlayerWrapper.TYPE_VIDEO); ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, ExoPlayerWrapper.TYPE_VIDEO); TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, drmSessionManager, true, mainHandler, player, 50); // Build the audio renderer. DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, ExoPlayerWrapper.TYPE_AUDIO); ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, ExoPlayerWrapper.TYPE_AUDIO); TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, MediaCodecSelector.DEFAULT, drmSessionManager, true, mainHandler, player, AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC); // Build the text renderer. DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource textChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, ExoPlayerWrapper.TYPE_TEXT); ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl, TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, ExoPlayerWrapper.TYPE_TEXT); TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player, mainHandler.getLooper()); // Invoke the callback. TrackRenderer[] renderers = new TrackRenderer[ExoPlayerWrapper.RENDERER_COUNT]; renderers[ExoPlayerWrapper.TYPE_VIDEO] = videoRenderer; renderers[ExoPlayerWrapper.TYPE_AUDIO] = audioRenderer; renderers[ExoPlayerWrapper.TYPE_TEXT] = textRenderer; player.onRenderers(renderers, bandwidthMeter); }
@Override public void onManifest(String contentId, SmoothStreamingManifest manifest) { Handler mainHandler = playerActivity.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new BufferPool(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(); // Obtain stream elements for playback. int maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize(); int audioStreamElementIndex = -1; int videoStreamElementIndex = -1; ArrayList<Integer> videoTrackIndexList = new ArrayList<Integer>(); for (int i = 0; i < manifest.streamElements.length; i++) { if (audioStreamElementIndex == -1 && manifest.streamElements[i].type == StreamElement.TYPE_AUDIO) { audioStreamElementIndex = i; } else if (videoStreamElementIndex == -1 && manifest.streamElements[i].type == StreamElement.TYPE_VIDEO) { videoStreamElementIndex = i; StreamElement streamElement = manifest.streamElements[i]; for (int j = 0; j < streamElement.tracks.length; j++) { TrackElement trackElement = streamElement.tracks[j]; if (trackElement.maxWidth * trackElement.maxHeight <= maxDecodableFrameSize) { videoTrackIndexList.add(j); } else { // The device isn't capable of playing this stream. } } } } int[] videoTrackIndices = Util.toArray(videoTrackIndexList); // Build the video renderer. DataSource videoDataSource = new UriDataSource(userAgent, bandwidthMeter); ChunkSource videoChunkSource = new SmoothStreamingChunkSource(manifestFetcher, videoStreamElementIndex, videoTrackIndices, videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS); ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true); MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 0, mainHandler, playerActivity, 50); // Build the audio renderer. DataSource audioDataSource = new UriDataSource(userAgent, bandwidthMeter); ChunkSource audioChunkSource = new SmoothStreamingChunkSource(manifestFetcher, audioStreamElementIndex, new int[] {0}, audioDataSource, new FormatEvaluator.FixedEvaluator(), LIVE_EDGE_LATENCY_MS); SampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true); MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer( audioSampleSource); callback.onRenderers(videoRenderer, audioRenderer); }
private SmoothStreamingChunkSource(ManifestFetcher<SmoothStreamingManifest> manifestFetcher, SmoothStreamingManifest initialManifest, int streamElementIndex, int[] trackIndices, DataSource dataSource, FormatEvaluator formatEvaluator, long liveEdgeLatencyMs) { this.manifestFetcher = manifestFetcher; this.streamElementIndex = streamElementIndex; this.currentManifest = initialManifest; this.dataSource = dataSource; this.formatEvaluator = formatEvaluator; this.liveEdgeLatencyUs = liveEdgeLatencyMs * 1000; StreamElement streamElement = getElement(initialManifest); trackInfo = new TrackInfo(streamElement.tracks[0].mimeType, initialManifest.durationUs); evaluation = new Evaluation(); TrackEncryptionBox[] trackEncryptionBoxes = null; ProtectionElement protectionElement = initialManifest.protectionElement; if (protectionElement != null) { byte[] keyId = getKeyId(protectionElement.data); trackEncryptionBoxes = new TrackEncryptionBox[1]; trackEncryptionBoxes[0] = new TrackEncryptionBox(true, INITIALIZATION_VECTOR_SIZE, keyId); psshInfo = Collections.singletonMap(protectionElement.uuid, protectionElement.data); } else { psshInfo = null; } int trackCount = trackIndices != null ? trackIndices.length : streamElement.tracks.length; formats = new SmoothStreamingFormat[trackCount]; extractors = new SparseArray<FragmentedMp4Extractor>(); int maxWidth = 0; int maxHeight = 0; for (int i = 0; i < trackCount; i++) { int trackIndex = trackIndices != null ? trackIndices[i] : i; TrackElement trackElement = streamElement.tracks[trackIndex]; formats[i] = new SmoothStreamingFormat(String.valueOf(trackIndex), trackElement.mimeType, trackElement.maxWidth, trackElement.maxHeight, trackElement.numChannels, trackElement.sampleRate, trackElement.bitrate, trackIndex); maxWidth = Math.max(maxWidth, trackElement.maxWidth); maxHeight = Math.max(maxHeight, trackElement.maxHeight); MediaFormat mediaFormat = getMediaFormat(streamElement, trackIndex); int trackType = streamElement.type == StreamElement.TYPE_VIDEO ? Track.TYPE_VIDEO : Track.TYPE_AUDIO; FragmentedMp4Extractor extractor = new FragmentedMp4Extractor( FragmentedMp4Extractor.WORKAROUND_EVERY_VIDEO_FRAME_IS_SYNC_FRAME); extractor.setTrack(new Track(trackIndex, trackType, streamElement.timescale, mediaFormat, trackEncryptionBoxes)); extractors.put(trackIndex, extractor); } this.maxHeight = maxHeight; this.maxWidth = maxWidth; Arrays.sort(formats, new DecreasingBandwidthComparator()); }
private SmoothStreamingChunkSource(ManifestFetcher<SmoothStreamingManifest> manifestFetcher, SmoothStreamingManifest initialManifest, int streamElementIndex, int[] trackIndices, DataSource dataSource, FormatEvaluator formatEvaluator, long liveEdgeLatencyMs) { this.manifestFetcher = manifestFetcher; this.streamElementIndex = streamElementIndex; this.currentManifest = initialManifest; this.dataSource = dataSource; this.formatEvaluator = formatEvaluator; this.liveEdgeLatencyUs = liveEdgeLatencyMs * 1000; StreamElement streamElement = getElement(initialManifest); trackInfo = new TrackInfo(streamElement.tracks[0].mimeType, initialManifest.durationUs); evaluation = new Evaluation(); TrackEncryptionBox[] trackEncryptionBoxes = null; ProtectionElement protectionElement = initialManifest.protectionElement; if (protectionElement != null) { byte[] keyId = getKeyId(protectionElement.data); trackEncryptionBoxes = new TrackEncryptionBox[1]; trackEncryptionBoxes[0] = new TrackEncryptionBox(true, INITIALIZATION_VECTOR_SIZE, keyId); psshInfo = Collections.singletonMap(protectionElement.uuid, protectionElement.data); } else { psshInfo = null; } int trackCount = trackIndices != null ? trackIndices.length : streamElement.tracks.length; formats = new SmoothStreamingFormat[trackCount]; extractors = new SparseArray<FragmentedMp4Extractor>(); int maxWidth = 0; int maxHeight = 0; for (int i = 0; i < trackCount; i++) { int trackIndex = trackIndices != null ? trackIndices[i] : i; TrackElement trackElement = streamElement.tracks[trackIndex]; formats[i] = new SmoothStreamingFormat(String.valueOf(trackIndex), trackElement.mimeType, trackElement.maxWidth, trackElement.maxHeight, trackElement.numChannels, trackElement.sampleRate, trackElement.bitrate, trackIndex); maxWidth = Math.max(maxWidth, trackElement.maxWidth); maxHeight = Math.max(maxHeight, trackElement.maxHeight); MediaFormat mediaFormat = getMediaFormat(streamElement, trackIndex); int trackType = streamElement.type == StreamElement.TYPE_VIDEO ? Track.TYPE_VIDEO : Track.TYPE_AUDIO; FragmentedMp4Extractor extractor = new FragmentedMp4Extractor( FragmentedMp4Extractor.WORKAROUND_EVERY_VIDEO_FRAME_IS_SYNC_FRAME); extractor.setTrack(new Track(trackIndex, trackType, streamElement.timescale, initialManifest.durationUs, mediaFormat, trackEncryptionBoxes)); extractors.put(trackIndex, extractor); } this.maxHeight = maxHeight; this.maxWidth = maxWidth; Arrays.sort(formats, new DecreasingBandwidthComparator()); }
/** * @param baseUrl The base URL for the streams. * @param manifest The manifest parsed from {@code baseUrl + "/Manifest"}. * @param streamElementIndex The index of the stream element in the manifest to be provided by * the source. * @param trackIndices The indices of the tracks within the stream element to be considered by * the source. May be null if all tracks within the element should be considered. * @param dataSource A {@link DataSource} suitable for loading the media data. * @param formatEvaluator Selects from the available formats. */ public SmoothStreamingChunkSource(String baseUrl, SmoothStreamingManifest manifest, int streamElementIndex, int[] trackIndices, DataSource dataSource, FormatEvaluator formatEvaluator) { this.baseUrl = baseUrl; this.streamElement = manifest.streamElements[streamElementIndex]; this.trackInfo = new TrackInfo(streamElement.tracks[0].mimeType, manifest.getDurationUs()); this.dataSource = dataSource; this.formatEvaluator = formatEvaluator; this.evaluation = new Evaluation(); TrackEncryptionBox[] trackEncryptionBoxes = null; ProtectionElement protectionElement = manifest.protectionElement; if (protectionElement != null) { byte[] keyId = getKeyId(protectionElement.data); trackEncryptionBoxes = new TrackEncryptionBox[1]; trackEncryptionBoxes[0] = new TrackEncryptionBox(true, INITIALIZATION_VECTOR_SIZE, keyId); } int trackCount = trackIndices != null ? trackIndices.length : streamElement.tracks.length; formats = new SmoothStreamingFormat[trackCount]; extractors = new SparseArray<FragmentedMp4Extractor>(); int maxWidth = 0; int maxHeight = 0; for (int i = 0; i < trackCount; i++) { int trackIndex = trackIndices != null ? trackIndices[i] : i; TrackElement trackElement = streamElement.tracks[trackIndex]; formats[i] = new SmoothStreamingFormat(String.valueOf(trackIndex), trackElement.mimeType, trackElement.maxWidth, trackElement.maxHeight, trackElement.numChannels, trackElement.sampleRate, trackElement.bitrate, trackIndex); maxWidth = Math.max(maxWidth, trackElement.maxWidth); maxHeight = Math.max(maxHeight, trackElement.maxHeight); MediaFormat mediaFormat = getMediaFormat(streamElement, trackIndex); int trackType = streamElement.type == StreamElement.TYPE_VIDEO ? Track.TYPE_VIDEO : Track.TYPE_AUDIO; FragmentedMp4Extractor extractor = new FragmentedMp4Extractor( FragmentedMp4Extractor.WORKAROUND_EVERY_VIDEO_FRAME_IS_SYNC_FRAME); extractor.setTrack(new Track(trackIndex, trackType, streamElement.timeScale, mediaFormat, trackEncryptionBoxes)); if (protectionElement != null) { extractor.putPsshInfo(protectionElement.uuid, protectionElement.data); } extractors.put(trackIndex, extractor); } this.maxHeight = maxHeight; this.maxWidth = maxWidth; Arrays.sort(formats, new DecreasingBandwidthComparator()); }
/** * Constructor to use for live streaming. * <p> * May also be used for fixed duration content, in which case the call is equivalent to calling * the other constructor, passing {@code manifestFetcher.getManifest()} is the first argument. * * @param manifestFetcher A fetcher for the manifest, which must have already successfully * completed an initial load. * @param adaptationSetIndex The index of the adaptation set that should be used. * @param representationIndices The indices of the representations within the adaptations set * that should be used. May be null if all representations within the adaptation set should * be considered. * @param dataSource A {@link DataSource} suitable for loading the media data. * @param formatEvaluator Selects from the available formats. * @param liveEdgeLatencyMs For live streams, the number of milliseconds that the playback should * lag behind the "live edge" (i.e. the end of the most recently defined media in the * manifest). Choosing a small value will minimize latency introduced by the player, however * note that the value sets an upper bound on the length of media that the player can buffer. * Hence a small value may increase the probability of rebuffering and playback failures. * @param elapsedRealtimeOffsetMs If known, an estimate of the instantaneous difference between * server-side unix time and {@link SystemClock#elapsedRealtime()} in milliseconds, specified * as the server's unix time minus the local elapsed time. It unknown, set to 0. * @param eventHandler A handler to use when delivering events to {@code EventListener}. May be * null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required. */ public DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher, int adaptationSetIndex, int[] representationIndices, DataSource dataSource, FormatEvaluator formatEvaluator, long liveEdgeLatencyMs, long elapsedRealtimeOffsetMs, Handler eventHandler, EventListener eventListener) { this(manifestFetcher, manifestFetcher.getManifest(), adaptationSetIndex, representationIndices, dataSource, formatEvaluator, new SystemClock(), liveEdgeLatencyMs * 1000, elapsedRealtimeOffsetMs * 1000, true, eventHandler, eventListener); }
/** * Constructor to use for live DVR streaming. * * @param manifestFetcher A fetcher for the manifest, which must have already successfully * completed an initial load. * @param adaptationSetIndex The index of the adaptation set that should be used. * @param representationIndices The indices of the representations within the adaptations set * that should be used. May be null if all representations within the adaptation set should * be considered. * @param dataSource A {@link DataSource} suitable for loading the media data. * @param formatEvaluator Selects from the available formats. * @param liveEdgeLatencyMs For live streams, the number of milliseconds that the playback should * lag behind the "live edge" (i.e. the end of the most recently defined media in the * manifest). Choosing a small value will minimize latency introduced by the player, however * note that the value sets an upper bound on the length of media that the player can buffer. * Hence a small value may increase the probability of rebuffering and playback failures. * @param elapsedRealtimeOffsetMs If known, an estimate of the instantaneous difference between * server-side unix time and {@link SystemClock#elapsedRealtime()} in milliseconds, specified * as the server's unix time minus the local elapsed time. It unknown, set to 0. * @param startAtLiveEdge True if the stream should start at the live edge; false if it should * at the beginning of the live window. * @param eventHandler A handler to use when delivering events to {@code EventListener}. May be * null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required. */ public DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher, int adaptationSetIndex, int[] representationIndices, DataSource dataSource, FormatEvaluator formatEvaluator, long liveEdgeLatencyMs, long elapsedRealtimeOffsetMs, boolean startAtLiveEdge, Handler eventHandler, EventListener eventListener) { this(manifestFetcher, manifestFetcher.getManifest(), adaptationSetIndex, representationIndices, dataSource, formatEvaluator, new SystemClock(), liveEdgeLatencyMs * 1000, elapsedRealtimeOffsetMs * 1000, startAtLiveEdge, eventHandler, eventListener); }
/** * Constructor to use for live streaming. * <p> * May also be used for fixed duration content, in which case the call is equivalent to calling * the other constructor, passing {@code manifestFetcher.getManifest()} is the first argument. * * @param manifestFetcher A fetcher for the manifest, which must have already successfully * completed an initial load. * @param streamElementIndex The index of the stream element in the manifest to be provided by * the source. * @param trackIndices The indices of the tracks within the stream element to be considered by * the source. May be null if all tracks within the element should be considered. * @param dataSource A {@link DataSource} suitable for loading the media data. * @param formatEvaluator Selects from the available formats. * @param liveEdgeLatencyMs For live streams, the number of milliseconds that the playback should * lag behind the "live edge" (i.e. the end of the most recently defined media in the * manifest). Choosing a small value will minimize latency introduced by the player, however * note that the value sets an upper bound on the length of media that the player can buffer. * Hence a small value may increase the probability of rebuffering and playback failures. */ public SmoothStreamingChunkSource(ManifestFetcher<SmoothStreamingManifest> manifestFetcher, int streamElementIndex, int[] trackIndices, DataSource dataSource, FormatEvaluator formatEvaluator, long liveEdgeLatencyMs) { this(manifestFetcher, manifestFetcher.getManifest(), streamElementIndex, trackIndices, dataSource, formatEvaluator, liveEdgeLatencyMs); }
/** * Lightweight constructor to use for fixed duration content. * * @param trackSelector Selects tracks to be exposed by this source. * @param dataSource A {@link DataSource} suitable for loading the media data. * @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats. * @param durationMs The duration of the content. * @param adaptationSetType The type of the adaptation set to which the representations belong. * One of {@link AdaptationSet#TYPE_AUDIO}, {@link AdaptationSet#TYPE_VIDEO} and * {@link AdaptationSet#TYPE_TEXT}. * @param representations The representations to be considered by the source. */ public DashChunkSource(DashTrackSelector trackSelector, DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator, long durationMs, int adaptationSetType, Representation... representations) { this(trackSelector, dataSource, adaptiveFormatEvaluator, durationMs, adaptationSetType, Arrays.asList(representations)); }
/** * Lightweight constructor to use for fixed duration content. * * @param trackSelector Selects tracks to be exposed by this source. * @param dataSource A {@link DataSource} suitable for loading the media data. * @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats. * @param durationMs The duration of the content. * @param adaptationSetType The type of the adaptation set to which the representations belong. * One of {@link AdaptationSet#TYPE_AUDIO}, {@link AdaptationSet#TYPE_VIDEO} and * {@link AdaptationSet#TYPE_TEXT}. * @param representations The representations to be considered by the source. */ public DashChunkSource(DashTrackSelector trackSelector, DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator, long durationMs, int adaptationSetType, List<Representation> representations) { this(buildManifest(durationMs, adaptationSetType, representations), trackSelector, dataSource, adaptiveFormatEvaluator); }
/** * Constructor to use for live streaming. * <p> * May also be used for fixed duration content, in which case the call is equivalent to calling * the other constructor, passing {@code manifestFetcher.getManifest()} is the first argument. * * @param manifestFetcher A fetcher for the manifest, which must have already successfully * completed an initial load. * @param trackSelector Selects tracks from manifest periods to be exposed by this source. * @param dataSource A {@link DataSource} suitable for loading the media data. * @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats. * @param liveEdgeLatencyMs For live streams, the number of milliseconds that the playback should * lag behind the "live edge" (i.e. the end of the most recently defined media in the * manifest). Choosing a small value will minimize latency introduced by the player, however * note that the value sets an upper bound on the length of media that the player can buffer. * Hence a small value may increase the probability of rebuffering and playback failures. * @param elapsedRealtimeOffsetMs If known, an estimate of the instantaneous difference between * server-side unix time and {@link SystemClock#elapsedRealtime()} in milliseconds, specified * as the server's unix time minus the local elapsed time. It unknown, set to 0. * @param eventHandler A handler to use when delivering events to {@code EventListener}. May be * null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required. * @param eventSourceId An identifier that gets passed to {@code eventListener} methods. */ public DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher, DashTrackSelector trackSelector, DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator, long liveEdgeLatencyMs, long elapsedRealtimeOffsetMs, Handler eventHandler, EventListener eventListener, int eventSourceId) { this(manifestFetcher, manifestFetcher.getManifest(), trackSelector, dataSource, adaptiveFormatEvaluator, new SystemClock(), liveEdgeLatencyMs * 1000, elapsedRealtimeOffsetMs * 1000, true, eventHandler, eventListener, eventSourceId); }
/** * Constructor to use for live DVR streaming. * * @param manifestFetcher A fetcher for the manifest, which must have already successfully * completed an initial load. * @param trackSelector Selects tracks from manifest periods to be exposed by this source. * @param dataSource A {@link DataSource} suitable for loading the media data. * @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats. * @param liveEdgeLatencyMs For live streams, the number of milliseconds that the playback should * lag behind the "live edge" (i.e. the end of the most recently defined media in the * manifest). Choosing a small value will minimize latency introduced by the player, however * note that the value sets an upper bound on the length of media that the player can buffer. * Hence a small value may increase the probability of rebuffering and playback failures. * @param elapsedRealtimeOffsetMs If known, an estimate of the instantaneous difference between * server-side unix time and {@link SystemClock#elapsedRealtime()} in milliseconds, specified * as the server's unix time minus the local elapsed time. It unknown, set to 0. * @param startAtLiveEdge True if the stream should start at the live edge; false if it should * at the beginning of the live window. * @param eventHandler A handler to use when delivering events to {@code EventListener}. May be * null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required. * @param eventSourceId An identifier that gets passed to {@code eventListener} methods. */ public DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher, DashTrackSelector trackSelector, DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator, long liveEdgeLatencyMs, long elapsedRealtimeOffsetMs, boolean startAtLiveEdge, Handler eventHandler, EventListener eventListener, int eventSourceId) { this(manifestFetcher, manifestFetcher.getManifest(), trackSelector, dataSource, adaptiveFormatEvaluator, new SystemClock(), liveEdgeLatencyMs * 1000, elapsedRealtimeOffsetMs * 1000, startAtLiveEdge, eventHandler, eventListener, eventSourceId); }
/** * Constructor to use for live streaming. * <p> * May also be used for fixed duration content, in which case the call is equivalent to calling * the other constructor, passing {@code manifestFetcher.getManifest()} is the first argument. * * @param manifestFetcher A fetcher for the manifest, which must have already successfully * completed an initial load. * @param trackSelector Selects tracks from the manifest to be exposed by this source. * @param dataSource A {@link DataSource} suitable for loading the media data. * @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats. * @param liveEdgeLatencyMs For live streams, the number of milliseconds that the playback should * lag behind the "live edge" (i.e. the end of the most recently defined media in the * manifest). Choosing a small value will minimize latency introduced by the player, however * note that the value sets an upper bound on the length of media that the player can buffer. * Hence a small value may increase the probability of rebuffering and playback failures. */ public SmoothStreamingChunkSource(ManifestFetcher<SmoothStreamingManifest> manifestFetcher, SmoothStreamingTrackSelector trackSelector, DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator, long liveEdgeLatencyMs) { this(manifestFetcher, manifestFetcher.getManifest(), trackSelector, dataSource, adaptiveFormatEvaluator, liveEdgeLatencyMs); }
/** * Constructor to use for live streaming. * <p> * May also be used for fixed duration content, in which case the call is equivalent to calling * the other constructor, passing {@code manifestFetcher.getManifest()} is the first argument. * * @param manifestFetcher A fetcher for the manifest, which must have already successfully * completed an initial load. * @param adaptationSetIndex The index of the adaptation set that should be used. * @param representationIndices The indices of the representations within the adaptations set * that should be used. May be null if all representations within the adaptation set should * be considered. * @param dataSource A {@link DataSource} suitable for loading the media data. * @param formatEvaluator Selects from the available formats. * @param liveEdgeLatencyMs For live streams, the number of milliseconds that the playback should * lag behind the "live edge" (i.e. the end of the most recently defined media in the * manifest). Choosing a small value will minimize latency introduced by the player, however * note that the value sets an upper bound on the length of media that the player can buffer. * Hence a small value may increase the probability of rebuffering and playback failures. */ public DashChunkSource(ManifestFetcher<MediaPresentationDescription> manifestFetcher, int adaptationSetIndex, int[] representationIndices, DataSource dataSource, FormatEvaluator formatEvaluator, long liveEdgeLatencyMs) { this(manifestFetcher, manifestFetcher.getManifest(), adaptationSetIndex, representationIndices, dataSource, formatEvaluator, liveEdgeLatencyMs * 1000); }
/** * Lightweight constructor to use for fixed duration content. * * @param dataSource A {@link DataSource} suitable for loading the media data. * @param formatEvaluator Selects from the available formats. * @param representations The representations to be considered by the source. */ public DashChunkSource(DataSource dataSource, FormatEvaluator formatEvaluator, Representation... representations) { this(buildManifest(Arrays.asList(representations)), 0, null, dataSource, formatEvaluator); }
/** * Lightweight constructor to use for fixed duration content. * * @param dataSource A {@link DataSource} suitable for loading the media data. * @param formatEvaluator Selects from the available formats. * @param representations The representations to be considered by the source. */ public DashChunkSource(DataSource dataSource, FormatEvaluator formatEvaluator, List<Representation> representations) { this(buildManifest(representations), 0, null, dataSource, formatEvaluator); }
/** * Constructor to use for fixed duration content. * * @param manifest The manifest. * @param adaptationSetIndex The index of the adaptation set that should be used. * @param representationIndices The indices of the representations within the adaptations set * that should be used. May be null if all representations within the adaptation set should * be considered. * @param dataSource A {@link DataSource} suitable for loading the media data. * @param formatEvaluator Selects from the available formats. */ public DashChunkSource(MediaPresentationDescription manifest, int adaptationSetIndex, int[] representationIndices, DataSource dataSource, FormatEvaluator formatEvaluator) { this(null, manifest, adaptationSetIndex, representationIndices, dataSource, formatEvaluator, new SystemClock(), 0, 0, false, null, null); }
/** * Constructor to use for fixed duration content. * * @param manifest The manifest parsed from {@code baseUrl + "/Manifest"}. * @param streamElementIndex The index of the stream element in the manifest to be provided by * the source. * @param trackIndices The indices of the tracks within the stream element to be considered by * the source. May be null if all tracks within the element should be considered. * @param dataSource A {@link DataSource} suitable for loading the media data. * @param formatEvaluator Selects from the available formats. */ public SmoothStreamingChunkSource(SmoothStreamingManifest manifest, int streamElementIndex, int[] trackIndices, DataSource dataSource, FormatEvaluator formatEvaluator) { this(null, manifest, streamElementIndex, trackIndices, dataSource, formatEvaluator, 0); }
/** * Constructor to use for fixed duration content. * * @param manifest The manifest. * @param trackSelector Selects tracks from manifest periods to be exposed by this source. * @param dataSource A {@link DataSource} suitable for loading the media data. * @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats. */ public DashChunkSource(MediaPresentationDescription manifest, DashTrackSelector trackSelector, DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator) { this(null, manifest, trackSelector, dataSource, adaptiveFormatEvaluator, new SystemClock(), 0, 0, false, null, null, 0); }
/** * Constructor to use for fixed duration content. * * @param manifest The manifest parsed from {@code baseUrl + "/Manifest"}. * @param trackSelector Selects tracks from the manifest to be exposed by this source. * @param dataSource A {@link DataSource} suitable for loading the media data. * @param adaptiveFormatEvaluator For adaptive tracks, selects from the available formats. */ public SmoothStreamingChunkSource(SmoothStreamingManifest manifest, SmoothStreamingTrackSelector trackSelector, DataSource dataSource, FormatEvaluator adaptiveFormatEvaluator) { this(null, manifest, trackSelector, dataSource, adaptiveFormatEvaluator, 0); }