private static DrmInitData getDrmInitData(MediaPresentationDescription manifest, int adaptationSetIndex) { AdaptationSet adaptationSet = manifest.periods.get(0).adaptationSets.get(adaptationSetIndex); String drmInitMimeType = mimeTypeIsWebm(adaptationSet.representations.get(0).format.mimeType) ? MimeTypes.VIDEO_WEBM : MimeTypes.VIDEO_MP4; if (adaptationSet.contentProtections.isEmpty()) { return null; } else { DrmInitData.Mapped drmInitData = null; for (ContentProtection contentProtection : adaptationSet.contentProtections) { if (contentProtection.uuid != null && contentProtection.data != null) { if (drmInitData == null) { drmInitData = new DrmInitData.Mapped(drmInitMimeType); } drmInitData.put(contentProtection.uuid, contentProtection.data); } } return drmInitData; } }
@Override public void selectTracks(MediaPresentationDescription manifest, int periodIndex, Output output) throws IOException { Period period = manifest.getPeriod(periodIndex); int adaptationSetIndex = period.getAdaptationSetIndex(adaptationSetType); AdaptationSet adaptationSet = period.adaptationSets.get(adaptationSetIndex); int[] representationIndices = getRepresentationIndices(adaptationSet, representationIds, canIncludeAdditionalVideoRepresentations); if (representationIndices.length > representationIds.length) { includedAdditionalVideoRepresentations = true; } if (adaptationSetType == AdaptationSet.TYPE_VIDEO) { output.adaptiveTrack(manifest, periodIndex, adaptationSetIndex, representationIndices); } for (int i = 0; i < representationIndices.length; i++) { output.fixedTrack(manifest, periodIndex, adaptationSetIndex, representationIndices[i]); } }
@Override public void fixedTrack(MediaPresentationDescription manifest, int periodIndex, int adaptationSetIndex, int representationIndex) { List<AdaptationSet> adaptationSets = manifest.getPeriod(periodIndex).adaptationSets; AdaptationSet adaptationSet = adaptationSets.get(adaptationSetIndex); Format representationFormat = adaptationSet.representations.get(representationIndex).format; String mediaMimeType = getMediaMimeType(representationFormat); if (mediaMimeType == null) { Log.w(TAG, "Skipped track " + representationFormat.id + " (unknown media mime type)"); return; } MediaFormat trackFormat = getTrackFormat(adaptationSet.type, representationFormat, mediaMimeType, manifest.dynamic ? C.UNKNOWN_TIME_US : manifest.duration * 1000); if (trackFormat == null) { Log.w(TAG, "Skipped track " + representationFormat.id + " (unknown media format)"); return; } tracks.add(new ExposedTrack(trackFormat, adaptationSetIndex, representationFormat)); }
private static MediaFormat getTrackFormat(int adaptationSetType, Format format, String mediaMimeType, long durationUs) { switch (adaptationSetType) { case AdaptationSet.TYPE_VIDEO: return MediaFormat.createVideoFormat(format.id, mediaMimeType, format.bitrate, MediaFormat.NO_VALUE, durationUs, format.width, format.height, null); case AdaptationSet.TYPE_AUDIO: return MediaFormat.createAudioFormat(format.id, mediaMimeType, format.bitrate, MediaFormat.NO_VALUE, durationUs, format.audioChannels, format.audioSamplingRate, null, format.language); case AdaptationSet.TYPE_TEXT: return MediaFormat.createTextFormat(format.id, mediaMimeType, format.bitrate, durationUs, format.language); default: return null; } }
private static DrmInitData getDrmInitData(AdaptationSet adaptationSet) { if (adaptationSet.contentProtections.isEmpty()) { return null; } else { DrmInitData.Mapped drmInitData = null; for (int i = 0; i < adaptationSet.contentProtections.size(); i++) { ContentProtection contentProtection = adaptationSet.contentProtections.get(i); if (contentProtection.uuid != null && contentProtection.data != null) { if (drmInitData == null) { drmInitData = new DrmInitData.Mapped(); } drmInitData.put(contentProtection.uuid, contentProtection.data); } } return drmInitData; } }
private static MediaPresentationDescription buildManifest(List<Representation> representations) { Representation firstRepresentation = representations.get(0); AdaptationSet adaptationSet = new AdaptationSet(0, AdaptationSet.TYPE_UNKNOWN, representations); Period period = new Period(null, firstRepresentation.periodStartMs, firstRepresentation.periodDurationMs, Collections.singletonList(adaptationSet)); long duration = firstRepresentation.periodDurationMs - firstRepresentation.periodStartMs; return new MediaPresentationDescription(-1, duration, -1, false, -1, -1, null, null, Collections.singletonList(period)); }
@Override public TrackRenderer[] buildRenderers(HostActivity host, ExoPlayer player, Surface surface) { Handler handler = new Handler(); LogcatLogger logger = new LogcatLogger(TAG, player); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); String userAgent = TestUtil.getUserAgent(host); // Build the video renderer. DataSource videoDataSource = new DefaultUriDataSource(host, null, userAgent); videoTrackSelector = new TrackSelector(AdaptationSet.TYPE_VIDEO, canIncludeAdditionalVideoFormats, videoFormats); ChunkSource videoChunkSource = new DashChunkSource(mpd, videoTrackSelector, videoDataSource, new FormatEvaluator.RandomEvaluator(0)); ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, handler, logger, VIDEO_EVENT_ID, MIN_LOADABLE_RETRY_COUNT); DebugMediaCodecVideoTrackRenderer videoRenderer = new DebugMediaCodecVideoTrackRenderer(host, videoSampleSource, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 0, handler, logger, 50); videoCounters = videoRenderer.codecCounters; player.sendMessage(videoRenderer, DebugMediaCodecVideoTrackRenderer.MSG_SET_SURFACE, surface); // Build the audio renderer. DataSource audioDataSource = new DefaultUriDataSource(host, null, userAgent); TrackSelector audioTrackSelector = new TrackSelector(AdaptationSet.TYPE_AUDIO, false, audioFormats); ChunkSource audioChunkSource = new DashChunkSource(mpd, audioTrackSelector, audioDataSource, null); ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, handler, logger, AUDIO_EVENT_ID, MIN_LOADABLE_RETRY_COUNT); MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer( audioSampleSource, MediaCodecSelector.DEFAULT, handler, logger); audioCounters = audioRenderer.codecCounters; TrackRenderer[] renderers = new TrackRenderer[RENDERER_COUNT]; renderers[VIDEO_RENDERER_INDEX] = videoRenderer; renderers[AUDIO_RENDERER_INDEX] = audioRenderer; return renderers; }
private TrackSelector(int adaptationSetType, boolean canIncludeAdditionalVideoRepresentations, String[] representationIds) { Assertions.checkState(!canIncludeAdditionalVideoRepresentations || adaptationSetType == AdaptationSet.TYPE_VIDEO); this.adaptationSetType = adaptationSetType; this.canIncludeAdditionalVideoRepresentations = canIncludeAdditionalVideoRepresentations; this.representationIds = representationIds; }
private static int[] getRepresentationIndices(AdaptationSet adaptationSet, String[] representationIds, boolean canIncludeAdditionalVideoRepresentations) throws IOException { List<Representation> availableRepresentations = adaptationSet.representations; List<Integer> selectedRepresentationIndices = new ArrayList<>(); // Always select explicitly listed representations, failing if they're missing. for (int i = 0; i < representationIds.length; i++) { String representationId = representationIds[i]; boolean foundIndex = false; for (int j = 0; j < availableRepresentations.size() && !foundIndex; j++) { if (availableRepresentations.get(j).format.id.equals(representationId)) { selectedRepresentationIndices.add(j); foundIndex = true; } } if (!foundIndex) { throw new IllegalStateException("Representation " + representationId + " not found."); } } // Select additional video representations, if supported by the device. if (canIncludeAdditionalVideoRepresentations) { int[] supportedVideoRepresentationIndices = VideoFormatSelectorUtil.selectVideoFormats( availableRepresentations, null, false, true, -1, -1); for (int i = 0; i < supportedVideoRepresentationIndices.length; i++) { int representationIndex = supportedVideoRepresentationIndices[i]; if (!selectedRepresentationIndices.contains(representationIndex)) { Log.d(TAG, "Adding video format: " + availableRepresentations.get(i).format.id); selectedRepresentationIndices.add(representationIndex); } } } return Util.toArray(selectedRepresentationIndices); }
@Override public void selectTracks(MediaPresentationDescription manifest, int periodIndex, Output output) throws IOException { Period period = manifest.getPeriod(periodIndex); for (int i = 0; i < period.adaptationSets.size(); i++) { AdaptationSet adaptationSet = period.adaptationSets.get(i); if (adaptationSet.type == adaptationSetType) { if (adaptationSetType == AdaptationSet.TYPE_VIDEO) { int[] representations; if (filterVideoRepresentations) { representations = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay( context, adaptationSet.representations, null, filterProtectedHdContent && adaptationSet.hasContentProtection()); } else { representations = Util.firstIntegersArray(adaptationSet.representations.size()); } int representationCount = representations.length; if (representationCount > 1) { output.adaptiveTrack(manifest, periodIndex, i, representations); } for (int j = 0; j < representationCount; j++) { output.fixedTrack(manifest, periodIndex, i, representations[j]); } } else { for (int j = 0; j < adaptationSet.representations.size(); j++) { output.fixedTrack(manifest, periodIndex, i, j); } } } } }
@Override public void adaptiveTrack(MediaPresentationDescription manifest, int periodIndex, int adaptationSetIndex, int[] representationIndices) { if (adaptiveFormatEvaluator == null) { Log.w(TAG, "Skipping adaptive track (missing format evaluator)"); return; } AdaptationSet adaptationSet = manifest.getPeriod(periodIndex).adaptationSets.get( adaptationSetIndex); int maxWidth = 0; int maxHeight = 0; Format maxHeightRepresentationFormat = null; Format[] representationFormats = new Format[representationIndices.length]; for (int i = 0; i < representationFormats.length; i++) { Format format = adaptationSet.representations.get(representationIndices[i]).format; if (maxHeightRepresentationFormat == null || format.height > maxHeight) { maxHeightRepresentationFormat = format; } maxWidth = Math.max(maxWidth, format.width); maxHeight = Math.max(maxHeight, format.height); representationFormats[i] = format; } Arrays.sort(representationFormats, new DecreasingBandwidthComparator()); long trackDurationUs = live ? C.UNKNOWN_TIME_US : manifest.duration * 1000; String mediaMimeType = getMediaMimeType(maxHeightRepresentationFormat); if (mediaMimeType == null) { Log.w(TAG, "Skipped adaptive track (unknown media mime type)"); return; } MediaFormat trackFormat = getTrackFormat(adaptationSet.type, maxHeightRepresentationFormat, mediaMimeType, trackDurationUs); if (trackFormat == null) { Log.w(TAG, "Skipped adaptive track (unknown media format)"); return; } tracks.add(new ExposedTrack(trackFormat.copyAsAdaptive(null), adaptationSetIndex, representationFormats, maxWidth, maxHeight)); }
private static MediaPresentationDescription buildManifest(long durationMs, int adaptationSetType, List<Representation> representations) { AdaptationSet adaptationSet = new AdaptationSet(0, adaptationSetType, representations); Period period = new Period(null, 0, Collections.singletonList(adaptationSet)); return new MediaPresentationDescription(-1, durationMs, -1, false, -1, -1, null, null, Collections.singletonList(period)); }
public PeriodHolder(int localIndex, MediaPresentationDescription manifest, int manifestIndex, ExposedTrack selectedTrack) { this.localIndex = localIndex; Period period = manifest.getPeriod(manifestIndex); long periodDurationUs = getPeriodDurationUs(manifest, manifestIndex); AdaptationSet adaptationSet = period.adaptationSets.get(selectedTrack.adaptationSetIndex); List<Representation> representations = adaptationSet.representations; startTimeUs = period.startMs * 1000; drmInitData = getDrmInitData(adaptationSet); if (!selectedTrack.isAdaptive()) { representationIndices = new int[] { getRepresentationIndex(representations, selectedTrack.fixedFormat.id)}; } else { representationIndices = new int[selectedTrack.adaptiveFormats.length]; for (int j = 0; j < selectedTrack.adaptiveFormats.length; j++) { representationIndices[j] = getRepresentationIndex( representations, selectedTrack.adaptiveFormats[j].id); } } representationHolders = new HashMap<>(); for (int i = 0; i < representationIndices.length; i++) { Representation representation = representations.get(representationIndices[i]); RepresentationHolder representationHolder = new RepresentationHolder(startTimeUs, periodDurationUs, representation); representationHolders.put(representation.format.id, representationHolder); } updateRepresentationIndependentProperties(periodDurationUs, representations.get(representationIndices[0])); }
private static MediaPresentationDescription buildMpd(long durationMs, List<Representation> representations, boolean live, boolean limitTimeshiftBuffer) { AdaptationSet adaptationSet = new AdaptationSet(0, AdaptationSet.TYPE_VIDEO, representations); Period period = new Period(null, 0, Collections.singletonList(adaptationSet)); return new MediaPresentationDescription(AVAILABILITY_START_TIME_MS, durationMs, -1, live, -1, (limitTimeshiftBuffer) ? LIVE_TIMESHIFT_BUFFER_DEPTH_MS : -1, null, null, Collections.singletonList(period)); }
private static MediaPresentationDescription buildMultiPeriodVodMpd() { List<Period> periods = new ArrayList<>(); long timeMs = 0; long periodDurationMs = VOD_DURATION_MS; for (int i = 0; i < 2; i++) { Representation representation = buildVodRepresentation(REGULAR_VIDEO); AdaptationSet adaptationSet = new AdaptationSet(0, AdaptationSet.TYPE_VIDEO, Collections.singletonList(representation)); Period period = new Period(null, timeMs, Collections.singletonList(adaptationSet)); periods.add(period); timeMs += periodDurationMs; } return buildMultiPeriodMpd(timeMs, periods, false, false); }
private static MediaPresentationDescription buildMultiPeriodLiveMpdWithTimeline() { List<Period> periods = new ArrayList<>(); long periodStartTimeMs = 0; long periodDurationMs = LIVE_DURATION_MS; for (int i = 0; i < MULTI_PERIOD_COUNT; i++) { Representation representation = buildSegmentTimelineRepresentation(LIVE_DURATION_MS, 0); AdaptationSet adaptationSet = new AdaptationSet(0, AdaptationSet.TYPE_VIDEO, Collections.singletonList(representation)); Period period = new Period(null, periodStartTimeMs, Collections.singletonList(adaptationSet)); periods.add(period); periodStartTimeMs += periodDurationMs; } return buildMultiPeriodMpd(periodDurationMs, periods, true, false); }
private static MediaPresentationDescription buildMultiPeriodLiveMpdWithTemplate() { List<Period> periods = new ArrayList<>(); long periodStartTimeMs = 0; long periodDurationMs = LIVE_DURATION_MS; for (int i = 0; i < MULTI_PERIOD_COUNT; i++) { Representation representation = buildSegmentTemplateRepresentation(); AdaptationSet adaptationSet = new AdaptationSet(0, AdaptationSet.TYPE_VIDEO, Collections.singletonList(representation)); Period period = new Period(null, periodStartTimeMs, Collections.singletonList(adaptationSet)); periods.add(period); periodStartTimeMs += periodDurationMs; } return buildMultiPeriodMpd(MULTI_PERIOD_LIVE_DURATION_MS, periods, true, false); }
protected void buildRenderers() { boolean filterHdContent = false; boolean hasContentProtection = false; Period period = currentManifest.getPeriod(0); StreamingDrmSessionManager drmSessionManager = null; //Determines if the media has content protection for (int i = 0; i < period.adaptationSets.size(); i++) { AdaptationSet adaptationSet = period.adaptationSets.get(i); if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) { hasContentProtection |= adaptationSet.hasContentProtection(); } } // Check DRM support if the content is protected if (hasContentProtection) { if (Util.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR2) { player.onRenderersError(new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME)); return; } try { drmSessionManager = StreamingDrmSessionManager.newWidevineInstance(player.getPlaybackLooper(), null, null, player.getMainHandler(), player); filterHdContent = getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1; } catch (UnsupportedDrmException e) { player.onRenderersError(e); return; } } buildTrackRenderers(drmSessionManager, filterHdContent); }
private static Map<UUID, byte[]> getPsshInfo(MediaPresentationDescription manifest, int adaptationSetIndex) { AdaptationSet adaptationSet = manifest.periods.get(0).adaptationSets.get(adaptationSetIndex); if (adaptationSet.contentProtections.isEmpty()) { return null; } else { Map<UUID, byte[]> psshInfo = new HashMap<UUID, byte[]>(); for (ContentProtection contentProtection : adaptationSet.contentProtections) { if (contentProtection.uuid != null && contentProtection.data != null) { psshInfo.put(contentProtection.uuid, contentProtection.data); } } return psshInfo.isEmpty() ? null : psshInfo; } }
private static MediaPresentationDescription buildManifest(List<Representation> representations) { Representation firstRepresentation = representations.get(0); AdaptationSet adaptationSet = new AdaptationSet(0, AdaptationSet.TYPE_UNKNOWN, representations); Period period = new Period(null, firstRepresentation.periodStartMs, firstRepresentation.periodDurationMs, Collections.singletonList(adaptationSet)); long duration = firstRepresentation.periodDurationMs - firstRepresentation.periodStartMs; return new MediaPresentationDescription(-1, duration, -1, false, -1, -1, null, Collections.singletonList(period)); }
public void testMaxVideoDimensions() { DashChunkSource chunkSource = new DashChunkSource(generateVodMpd(), AdaptationSet.TYPE_VIDEO, null, null, null); MediaFormat out = MediaFormat.createVideoFormat("video/h264", 1, 1, 1, 1, null); chunkSource.getMaxVideoDimensions(out); assertEquals(WIDE_WIDTH, out.getMaxVideoWidth()); assertEquals(TALL_HEIGHT, out.getMaxVideoHeight()); }
public void testGetSeekRangeOnVod() { DashChunkSource chunkSource = new DashChunkSource(generateVodMpd(), AdaptationSet.TYPE_VIDEO, null, null, mock(FormatEvaluator.class)); chunkSource.enable(); TimeRange seekRange = chunkSource.getSeekRange(); checkSeekRange(seekRange, 0, VOD_DURATION_MS * 1000); long[] seekRangeValuesMs = seekRange.getCurrentBoundsMs(null); assertEquals(0, seekRangeValuesMs[0]); assertEquals(VOD_DURATION_MS, seekRangeValuesMs[1]); }
private static MediaPresentationDescription generateMpd(boolean live, List<Representation> representations, boolean limitTimeshiftBuffer) { Representation firstRepresentation = representations.get(0); AdaptationSet adaptationSet = new AdaptationSet(0, AdaptationSet.TYPE_UNKNOWN, representations); Period period = new Period(null, firstRepresentation.periodStartMs, firstRepresentation.periodDurationMs, Collections.singletonList(adaptationSet)); long duration = (live) ? TrackRenderer.UNKNOWN_TIME_US : firstRepresentation.periodDurationMs - firstRepresentation.periodStartMs; return new MediaPresentationDescription(AVAILABILITY_START_TIME_MS, duration, -1, live, -1, (limitTimeshiftBuffer) ? LIVE_TIMESHIFT_BUFFER_DEPTH_MS : -1, null, null, Collections.singletonList(period)); }
private DashChunkSource setupDashChunkSource(MediaPresentationDescription mpd, long periodStartMs, long liveEdgeLatencyMs) { @SuppressWarnings("unchecked") ManifestFetcher<MediaPresentationDescription> manifestFetcher = mock(ManifestFetcher.class); when(manifestFetcher.getManifest()).thenReturn(mpd); DashChunkSource chunkSource = new DashChunkSource(manifestFetcher, mpd, AdaptationSet.TYPE_VIDEO, null, mockDataSource, EVALUATOR, new FakeClock(AVAILABILITY_CURRENT_TIME_MS + periodStartMs), liveEdgeLatencyMs * 1000, AVAILABILITY_REALTIME_OFFSET_MS * 1000, false, null, null); chunkSource.enable(); return chunkSource; }
SimpleDashParser parse(String localPath) throws IOException { InputStream inputStream = new BufferedInputStream(new FileInputStream(localPath)); MediaPresentationDescriptionParser mpdParser = new MediaPresentationDescriptionParser(); MediaPresentationDescription mpd = mpdParser.parse(localPath, inputStream); if (mpd.getPeriodCount() < 1) { throw new IOException("At least one period is required"); } Period period = mpd.getPeriod(0); List<AdaptationSet> adaptationSets = period.adaptationSets; AdaptationSet videoAdaptation = adaptationSets.get(period.getAdaptationSetIndex(AdaptationSet.TYPE_VIDEO)); List<Representation> representations = videoAdaptation.representations; if (representations == null || representations.isEmpty()) { throw new IOException("At least one video representation is required"); } Representation representation = representations.get(0); format = representation.format; hasContentProtection = videoAdaptation.hasContentProtection(); if (hasContentProtection) { loadDrmInitData(representation); } return this; }
private void buildRenderers() { Period period = manifest.getPeriod(0); Handler mainHandler = player.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player); boolean hasContentProtection = false; for (int i = 0; i < period.adaptationSets.size(); i++) { AdaptationSet adaptationSet = period.adaptationSets.get(i); if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) { hasContentProtection |= adaptationSet.hasContentProtection(); } } // Check drm support if necessary. boolean filterHdContent = false; StreamingDrmSessionManager drmSessionManager = null; if (hasContentProtection) { if (Util.SDK_INT < 18) { player.onRenderersError( new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME)); return; } try { drmSessionManager = StreamingDrmSessionManager.newWidevineInstance( player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player); filterHdContent = getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1; } catch (UnsupportedDrmException e) { player.onRenderersError(e); return; } } // Build the video renderer. DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent), videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_VIDEO); ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO); TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, drmSessionManager, true, mainHandler, player, 50); // Build the audio renderer. DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_AUDIO); ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_AUDIO); TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, MediaCodecSelector.DEFAULT, drmSessionManager, true, mainHandler, player, AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC); // Build the text renderer. DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource textChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_TEXT); ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl, TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_TEXT); TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player, mainHandler.getLooper()); // Invoke the callback. TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT]; renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer; renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer; renderers[DemoPlayer.TYPE_TEXT] = textRenderer; player.onRenderers(renderers, bandwidthMeter); }
private void buildRenderers() { Period period = manifest.getPeriod(0); Handler mainHandler = player.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player); boolean hasContentProtection = false; for (int i = 0; i < period.adaptationSets.size(); i++) { AdaptationSet adaptationSet = period.adaptationSets.get(i); if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) { hasContentProtection |= adaptationSet.hasContentProtection(); } } // Check drm support if necessary. boolean filterHdContent = false; StreamingDrmSessionManager<FrameworkMediaCrypto> drmSessionManager = null; if (hasContentProtection) { if (Util.SDK_INT < 18) { player.onRenderersError( new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME)); return; } try { drmSessionManager = StreamingDrmSessionManager.newWidevineInstance( player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player); filterHdContent = getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1; } catch (UnsupportedDrmException e) { player.onRenderersError(e); return; } } // Build the video renderer. DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent), videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_VIDEO); ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO); TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, drmSessionManager, true, mainHandler, player, 50); // Build the audio renderer. DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_AUDIO); ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_AUDIO); TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, MediaCodecSelector.DEFAULT, drmSessionManager, true, mainHandler, player, AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC); // Build the text renderer. DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource textChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_TEXT); ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl, TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_TEXT); TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player, mainHandler.getLooper()); // Invoke the callback. TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT]; renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer; renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer; renderers[DemoPlayer.TYPE_TEXT] = textRenderer; player.onRenderers(renderers, bandwidthMeter); }
private void buildRenderers() { Period period = manifest.getPeriod(0); Handler mainHandler = player.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player); boolean hasContentProtection = false; for (int i = 0; i < period.adaptationSets.size(); i++) { AdaptationSet adaptationSet = period.adaptationSets.get(i); if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) { hasContentProtection |= adaptationSet.hasContentProtection(); } } // Check drm support if necessary. boolean filterHdContent = false; StreamingDrmSessionManager drmSessionManager = null; if (hasContentProtection) { if (Util.SDK_INT < 18) { player.onRenderersError( new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME)); return; } try { drmSessionManager = StreamingDrmSessionManager.newWidevineInstance( player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player); filterHdContent = getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1; } catch (UnsupportedDrmException e) { player.onRenderersError(e); return; } } // Build the video renderer. DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent), videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player); ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO); TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, drmSessionManager, true, mainHandler, player, 50); // Build the audio renderer. DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player); ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_AUDIO); TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true, mainHandler, player, AudioCapabilities.getCapabilities(context)); // Build the text renderer. DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource textChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player); ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl, TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_TEXT); TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player, mainHandler.getLooper()); // Invoke the callback. TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT]; renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer; renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer; renderers[DemoPlayer.TYPE_TEXT] = textRenderer; player.onRenderers(renderers, bandwidthMeter); }
public static DefaultDashTrackSelector newAudioInstance() { return new DefaultDashTrackSelector(AdaptationSet.TYPE_AUDIO, null, false, false); }
public static DefaultDashTrackSelector newTextInstance() { return new DefaultDashTrackSelector(AdaptationSet.TYPE_TEXT, null, false, false); }
private void buildRenderers() { Period period = manifest.getPeriod(0); Handler mainHandler = player.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player); boolean hasContentProtection = false; for (int i = 0; i < period.adaptationSets.size(); i++) { AdaptationSet adaptationSet = period.adaptationSets.get(i); if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) { hasContentProtection |= adaptationSet.hasContentProtection(); } } // Check drm support if necessary. boolean filterHdContent = false; StreamingDrmSessionManager drmSessionManager = null; if (hasContentProtection) { if (Util.SDK_INT < 18) { player.onRenderersError( new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME)); return; } try { drmSessionManager = StreamingDrmSessionManager.newWidevineInstance( player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player); filterHdContent = getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1; } catch (UnsupportedDrmException e) { player.onRenderersError(e); return; } } // Build the video renderer. DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent), videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player); ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, Player.TYPE_VIDEO); TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, drmSessionManager, true, mainHandler, player, 50); // Build the audio renderer. DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player); ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, Player.TYPE_AUDIO); TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true, mainHandler, player, AudioCapabilities.getCapabilities(context)); // Build the text renderer. DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource textChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player); ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl, TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, Player.TYPE_TEXT); TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player, mainHandler.getLooper()); // Invoke the callback. TrackRenderer[] renderers = new TrackRenderer[Player.RENDERER_COUNT]; renderers[Player.TYPE_VIDEO] = videoRenderer; renderers[Player.TYPE_AUDIO] = audioRenderer; renderers[Player.TYPE_TEXT] = textRenderer; player.onRenderers(renderers, bandwidthMeter); }
private void buildRenderers() { Period period = manifest.getPeriod(0); Handler mainHandler = player.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player); boolean hasContentProtection = false; for (int i = 0; i < period.adaptationSets.size(); i++) { AdaptationSet adaptationSet = period.adaptationSets.get(i); if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) { hasContentProtection |= adaptationSet.hasContentProtection(); } } if (hasContentProtection) { if (Util.SDK_INT < 18) { player.onRenderersError( new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME)); return; } } // Build the video renderer. DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newVideoInstance(context, true, false), videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_VIDEO); ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO); TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, true, mainHandler, player, 50); // Build the audio renderer. DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_AUDIO); ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_AUDIO); TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, MediaCodecSelector.DEFAULT, null, true, mainHandler, player, AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC); // Build the text renderer. DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource textChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_TEXT); ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl, TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_TEXT); TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player, mainHandler.getLooper()); // Invoke the callback. TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT]; renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer; renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer; renderers[DemoPlayer.TYPE_TEXT] = textRenderer; player.onRenderers(renderers, bandwidthMeter); }