public ChunkSampleSource(ChunkSource chunkSource, LoadControl loadControl, int bufferSizeContribution, Handler eventHandler, EventListener eventListener, int eventSourceId, int minLoadableRetryCount) { this.chunkSource = chunkSource; this.loadControl = loadControl; this.bufferSizeContribution = bufferSizeContribution; this.eventHandler = eventHandler; this.eventListener = eventListener; this.eventSourceId = eventSourceId; this.minLoadableRetryCount = minLoadableRetryCount; currentLoadableHolder = new ChunkOperationHolder(); mediaChunks = new LinkedList<>(); readOnlyMediaChunks = Collections.unmodifiableList(mediaChunks); sampleQueue = new DefaultTrackOutput(loadControl.getAllocator()); state = STATE_IDLE; pendingResetPositionUs = NO_RESET_PENDING; }
/** * @param chunkSource A {@link ChunkSource} from which chunks to load are obtained. * @param loadControl Controls when the source is permitted to load data. * @param bufferSizeContribution The contribution of this source to the media buffer, in bytes. * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required. * @param eventSourceId An identifier that gets passed to {@code eventListener} methods. * @param minLoadableRetryCount The minimum number of times that the source should retry a load * before propagating an error. */ public ChunkSampleSource(ChunkSource chunkSource, LoadControl loadControl, int bufferSizeContribution, Handler eventHandler, EventListener eventListener, int eventSourceId, int minLoadableRetryCount) { this.chunkSource = chunkSource; this.loadControl = loadControl; this.bufferSizeContribution = bufferSizeContribution; this.eventHandler = eventHandler; this.eventListener = eventListener; this.eventSourceId = eventSourceId; this.minLoadableRetryCount = minLoadableRetryCount; currentLoadableHolder = new ChunkOperationHolder(); mediaChunks = new LinkedList<>(); readOnlyMediaChunks = Collections.unmodifiableList(mediaChunks); sampleQueue = new DefaultTrackOutput(loadControl.getAllocator()); state = STATE_IDLE; pendingResetPositionUs = NO_RESET_PENDING; }
public ChunkSampleSource(ChunkSource chunkSource, LoadControl loadControl, int bufferSizeContribution, boolean frameAccurateSeeking, Handler eventHandler, EventListener eventListener, int eventSourceId, int minLoadableRetryCount) { this.chunkSource = chunkSource; this.loadControl = loadControl; this.bufferSizeContribution = bufferSizeContribution; this.frameAccurateSeeking = frameAccurateSeeking; this.eventHandler = eventHandler; this.eventListener = eventListener; this.eventSourceId = eventSourceId; this.minLoadableRetryCount = minLoadableRetryCount; currentLoadableHolder = new ChunkOperationHolder(); mediaChunks = new LinkedList<MediaChunk>(); readOnlyMediaChunks = Collections.unmodifiableList(mediaChunks); state = STATE_UNPREPARED; }
public HlsSampleSource(HlsChunkSource chunkSource, LoadControl loadControl, int bufferSizeContribution, Handler eventHandler, EventListener eventListener, int eventSourceId, int minLoadableRetryCount) { this.chunkSource = chunkSource; this.loadControl = loadControl; this.bufferSizeContribution = bufferSizeContribution; this.minLoadableRetryCount = minLoadableRetryCount; this.eventHandler = eventHandler; this.eventListener = eventListener; this.eventSourceId = eventSourceId; this.pendingResetPositionUs = NO_RESET_PENDING; extractors = new LinkedList<>(); }
@Override public TrackRenderer[] buildRenderers(HostActivity host, ExoPlayer player, Surface surface) { Handler handler = new Handler(); LogcatLogger logger = new LogcatLogger(TAG, player); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); String userAgent = TestUtil.getUserAgent(host); // Build the video renderer. DataSource videoDataSource = new DefaultUriDataSource(host, null, userAgent); videoTrackSelector = new TrackSelector(AdaptationSet.TYPE_VIDEO, canIncludeAdditionalVideoFormats, videoFormats); ChunkSource videoChunkSource = new DashChunkSource(mpd, videoTrackSelector, videoDataSource, new FormatEvaluator.RandomEvaluator(0)); ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, handler, logger, VIDEO_EVENT_ID, MIN_LOADABLE_RETRY_COUNT); DebugMediaCodecVideoTrackRenderer videoRenderer = new DebugMediaCodecVideoTrackRenderer(host, videoSampleSource, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 0, handler, logger, 50); videoCounters = videoRenderer.codecCounters; player.sendMessage(videoRenderer, DebugMediaCodecVideoTrackRenderer.MSG_SET_SURFACE, surface); // Build the audio renderer. DataSource audioDataSource = new DefaultUriDataSource(host, null, userAgent); TrackSelector audioTrackSelector = new TrackSelector(AdaptationSet.TYPE_AUDIO, false, audioFormats); ChunkSource audioChunkSource = new DashChunkSource(mpd, audioTrackSelector, audioDataSource, null); ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, handler, logger, AUDIO_EVENT_ID, MIN_LOADABLE_RETRY_COUNT); MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer( audioSampleSource, MediaCodecSelector.DEFAULT, handler, logger); audioCounters = audioRenderer.codecCounters; TrackRenderer[] renderers = new TrackRenderer[RENDERER_COUNT]; renderers[VIDEO_RENDERER_INDEX] = videoRenderer; renderers[AUDIO_RENDERER_INDEX] = audioRenderer; return renderers; }
public HlsSampleSource(HlsChunkSource chunkSource, LoadControl loadControl, int bufferSizeContribution, Handler eventHandler, EventListener eventListener, int eventSourceId, int minLoadableRetryCount) { this.chunkSource = chunkSource; this.loadControl = loadControl; this.bufferSizeContribution = bufferSizeContribution; this.minLoadableRetryCount = minLoadableRetryCount; this.eventHandler = eventHandler; this.eventListener = eventListener; this.eventSourceId = eventSourceId; this.pendingResetPositionUs = NO_RESET_PENDING; extractors = new LinkedList<>(); chunkOperationHolder = new ChunkOperationHolder(); }
@Override public void onSingleManifest(HlsPlaylist manifest) { if (canceled) { return; } Handler mainHandler = player.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(); PtsTimestampAdjusterProvider timestampAdjusterProvider = new PtsTimestampAdjusterProvider(); // Build the video/audio/metadata renderers. DataSource dataSource = createDataSource(context, userAgent, bandwidthMeter); HlsChunkSource chunkSource = new HlsChunkSource(true /* isMaster */, dataSource, url, manifest, DefaultHlsTrackSelector.newDefaultInstance(context), bandwidthMeter, timestampAdjusterProvider, HlsChunkSource.ADAPTIVE_MODE_SPLICE); HlsSampleSource sampleSource = new HlsSampleSource(chunkSource, loadControl, MAIN_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO); MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, sampleSource, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, mainHandler, player, 50); final MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer( sampleSource, MediaCodecSelector.DEFAULT, null, true, player.getMainHandler(), player, AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC); final MetadataTrackRenderer<List<Id3Frame>> id3Renderer = new MetadataTrackRenderer<>( sampleSource, new Id3Parser(), player, mainHandler.getLooper()); TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT]; renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer; renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer; renderers[DemoPlayer.TYPE_METADATA] = id3Renderer; player.onRenderers(renderers, bandwidthMeter); }
@Override public void onSingleManifest(HlsPlaylist manifest) { Handler mainHandler = player.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(); int[] variantIndices = null; if (manifest instanceof HlsMasterPlaylist) { HlsMasterPlaylist masterPlaylist = (HlsMasterPlaylist) manifest; try { variantIndices = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay( context, masterPlaylist.variants, null, false); } catch (DecoderQueryException e) { callback.onRenderersError(e); return; } } DataSource dataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); HlsChunkSource chunkSource = new HlsChunkSource(dataSource, url, manifest, bandwidthMeter, variantIndices, HlsChunkSource.ADAPTIVE_MODE_SPLICE, audioCapabilities); HlsSampleSource sampleSource = new HlsSampleSource(chunkSource, loadControl, BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true, mainHandler, player, DemoPlayer.TYPE_VIDEO); MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, mainHandler, player, 50); MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource); TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT]; renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer; renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer; callback.onRenderers(null, null, renderers, bandwidthMeter); }
public ChunkSampleSource(ChunkSource chunkSource, LoadControl loadControl, int bufferSizeContribution, boolean frameAccurateSeeking, Handler eventHandler, EventListener eventListener, int eventSourceId) { this.chunkSource = chunkSource; this.loadControl = loadControl; this.bufferSizeContribution = bufferSizeContribution; this.frameAccurateSeeking = frameAccurateSeeking; this.eventHandler = eventHandler; this.eventListener = eventListener; this.eventSourceId = eventSourceId; currentLoadableHolder = new ChunkOperationHolder(); mediaChunks = new LinkedList<MediaChunk>(); readOnlyMediaChunks = Collections.unmodifiableList(mediaChunks); state = STATE_UNPREPARED; }
private void buildRenderers() { Period period = manifest.getPeriod(0); Handler mainHandler = player.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player); boolean hasContentProtection = false; for (int i = 0; i < period.adaptationSets.size(); i++) { AdaptationSet adaptationSet = period.adaptationSets.get(i); if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) { hasContentProtection |= adaptationSet.hasContentProtection(); } } // Check drm support if necessary. boolean filterHdContent = false; StreamingDrmSessionManager drmSessionManager = null; if (hasContentProtection) { if (Util.SDK_INT < 18) { player.onRenderersError( new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME)); return; } try { drmSessionManager = StreamingDrmSessionManager.newWidevineInstance( player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player); filterHdContent = getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1; } catch (UnsupportedDrmException e) { player.onRenderersError(e); return; } } // Build the video renderer. DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent), videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_VIDEO); ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO); TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, drmSessionManager, true, mainHandler, player, 50); // Build the audio renderer. DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_AUDIO); ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_AUDIO); TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, MediaCodecSelector.DEFAULT, drmSessionManager, true, mainHandler, player, AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC); // Build the text renderer. DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource textChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_TEXT); ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl, TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_TEXT); TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player, mainHandler.getLooper()); // Invoke the callback. TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT]; renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer; renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer; renderers[DemoPlayer.TYPE_TEXT] = textRenderer; player.onRenderers(renderers, bandwidthMeter); }
private void buildRenderers() { Period period = manifest.getPeriod(0); Handler mainHandler = player.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player); boolean hasContentProtection = false; for (int i = 0; i < period.adaptationSets.size(); i++) { AdaptationSet adaptationSet = period.adaptationSets.get(i); if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) { hasContentProtection |= adaptationSet.hasContentProtection(); } } // Check drm support if necessary. boolean filterHdContent = false; StreamingDrmSessionManager<FrameworkMediaCrypto> drmSessionManager = null; if (hasContentProtection) { if (Util.SDK_INT < 18) { player.onRenderersError( new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME)); return; } try { drmSessionManager = StreamingDrmSessionManager.newWidevineInstance( player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player); filterHdContent = getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1; } catch (UnsupportedDrmException e) { player.onRenderersError(e); return; } } // Build the video renderer. DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent), videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_VIDEO); ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO); TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, drmSessionManager, true, mainHandler, player, 50); // Build the audio renderer. DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_AUDIO); ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_AUDIO); TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, MediaCodecSelector.DEFAULT, drmSessionManager, true, mainHandler, player, AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC); // Build the text renderer. DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource textChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_TEXT); ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl, TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_TEXT); TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player, mainHandler.getLooper()); // Invoke the callback. TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT]; renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer; renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer; renderers[DemoPlayer.TYPE_TEXT] = textRenderer; player.onRenderers(renderers, bandwidthMeter); }
@Override public void onSingleManifest(SmoothStreamingManifest manifest) { if (canceled) { return; } Handler mainHandler = player.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player); // Check drm support if necessary. DrmSessionManager<FrameworkMediaCrypto> drmSessionManager = null; if (manifest.protectionElement != null) { if (Util.SDK_INT < 18) { player.onRenderersError( new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME)); return; } try { drmSessionManager = StreamingDrmSessionManager.newFrameworkInstance( manifest.protectionElement.uuid, player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player); } catch (UnsupportedDrmException e) { player.onRenderersError(e); return; } } // Build the video renderer. DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource videoChunkSource = new SmoothStreamingChunkSource(manifestFetcher, DefaultSmoothStreamingTrackSelector.newVideoInstance(context, true, false), videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS); ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO); TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, drmSessionManager, true, mainHandler, player, 50); // Build the audio renderer. DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource audioChunkSource = new SmoothStreamingChunkSource(manifestFetcher, DefaultSmoothStreamingTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS); ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_AUDIO); TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, MediaCodecSelector.DEFAULT, drmSessionManager, true, mainHandler, player, AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC); // Build the text renderer. DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource textChunkSource = new SmoothStreamingChunkSource(manifestFetcher, DefaultSmoothStreamingTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS); ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl, TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_TEXT); TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player, mainHandler.getLooper()); // Invoke the callback. TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT]; renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer; renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer; renderers[DemoPlayer.TYPE_TEXT] = textRenderer; player.onRenderers(renderers, bandwidthMeter); }
private void buildRenderers() { Period period = manifest.getPeriod(0); Handler mainHandler = player.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player); boolean hasContentProtection = false; for (int i = 0; i < period.adaptationSets.size(); i++) { AdaptationSet adaptationSet = period.adaptationSets.get(i); if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) { hasContentProtection |= adaptationSet.hasContentProtection(); } } // Check drm support if necessary. boolean filterHdContent = false; StreamingDrmSessionManager drmSessionManager = null; if (hasContentProtection) { if (Util.SDK_INT < 18) { player.onRenderersError( new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME)); return; } try { drmSessionManager = StreamingDrmSessionManager.newWidevineInstance( player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player); filterHdContent = getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1; } catch (UnsupportedDrmException e) { player.onRenderersError(e); return; } } // Build the video renderer. DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent), videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player); ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO); TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, drmSessionManager, true, mainHandler, player, 50); // Build the audio renderer. DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player); ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_AUDIO); TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true, mainHandler, player, AudioCapabilities.getCapabilities(context)); // Build the text renderer. DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource textChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player); ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl, TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_TEXT); TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player, mainHandler.getLooper()); // Invoke the callback. TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT]; renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer; renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer; renderers[DemoPlayer.TYPE_TEXT] = textRenderer; player.onRenderers(renderers, bandwidthMeter); }
@Override public void onSingleManifest(HlsPlaylist manifest) { if (canceled) { return; } Handler mainHandler = player.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(); int[] variantIndices = null; if (manifest instanceof HlsMasterPlaylist) { HlsMasterPlaylist masterPlaylist = (HlsMasterPlaylist) manifest; try { variantIndices = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay( context, masterPlaylist.variants, null, false); } catch (DecoderQueryException e) { player.onRenderersError(e); return; } if (variantIndices.length == 0) { player.onRenderersError(new IllegalStateException("No variants selected.")); return; } } DataSource dataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); HlsChunkSource chunkSource = new HlsChunkSource(dataSource, url, manifest, bandwidthMeter, variantIndices, HlsChunkSource.ADAPTIVE_MODE_SPLICE); HlsSampleSource sampleSource = new HlsSampleSource(chunkSource, loadControl, BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO); MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, sampleSource, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, mainHandler, player, 50); MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource, null, true, player.getMainHandler(), player, AudioCapabilities.getCapabilities(context)); MetadataTrackRenderer<Map<String, Object>> id3Renderer = new MetadataTrackRenderer<>( sampleSource, new Id3Parser(), player, mainHandler.getLooper()); Eia608TrackRenderer closedCaptionRenderer = new Eia608TrackRenderer(sampleSource, player, mainHandler.getLooper()); TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT]; renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer; renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer; renderers[DemoPlayer.TYPE_METADATA] = id3Renderer; renderers[DemoPlayer.TYPE_TEXT] = closedCaptionRenderer; player.onRenderers(renderers, bandwidthMeter); }
public HlsSampleSource(HlsChunkSource chunkSource, LoadControl loadControl, int bufferSizeContribution) { this(chunkSource, loadControl, bufferSizeContribution, null, null, 0); }
public HlsSampleSource(HlsChunkSource chunkSource, LoadControl loadControl, int bufferSizeContribution, Handler eventHandler, EventListener eventListener, int eventSourceId) { this(chunkSource, loadControl, bufferSizeContribution, eventHandler, eventListener, eventSourceId, DEFAULT_MIN_LOADABLE_RETRY_COUNT); }
public ChunkSampleSource(ChunkSource chunkSource, LoadControl loadControl, int bufferSizeContribution) { this(chunkSource, loadControl, bufferSizeContribution, null, null, 0); }
public ChunkSampleSource(ChunkSource chunkSource, LoadControl loadControl, int bufferSizeContribution, Handler eventHandler, EventListener eventListener, int eventSourceId) { this(chunkSource, loadControl, bufferSizeContribution, eventHandler, eventListener, eventSourceId, DEFAULT_MIN_LOADABLE_RETRY_COUNT); }
@Override public void onSingleManifest(HlsPlaylist manifest) { if (canceled) { return; } Handler mainHandler = player.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(); int[] variantIndices = null; if (manifest instanceof HlsMasterPlaylist) { HlsMasterPlaylist masterPlaylist = (HlsMasterPlaylist) manifest; try { variantIndices = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay( context, masterPlaylist.variants, null, false); } catch (DecoderQueryException e) { player.onRenderersError(e); return; } if (variantIndices.length == 0) { player.onRenderersError(new IllegalStateException("No variants selected.")); return; } } DataSource dataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); HlsChunkSource chunkSource = new HlsChunkSource(dataSource, url, manifest, bandwidthMeter, variantIndices, HlsChunkSource.ADAPTIVE_MODE_SPLICE, audioCapabilities); HlsSampleSource sampleSource = new HlsSampleSource(chunkSource, loadControl, BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, MediaPlayer.TYPE_VIDEO); MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, mainHandler, player, 50); MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource); MetadataTrackRenderer<Map<String, Object>> id3Renderer = new MetadataTrackRenderer<>( sampleSource, new Id3Parser(), player, mainHandler.getLooper()); Eia608TrackRenderer closedCaptionRenderer = new Eia608TrackRenderer(sampleSource, player, mainHandler.getLooper()); TrackRenderer[] renderers = new TrackRenderer[MediaPlayer.RENDERER_COUNT]; renderers[MediaPlayer.TYPE_VIDEO] = videoRenderer; renderers[MediaPlayer.TYPE_AUDIO] = audioRenderer; renderers[MediaPlayer.TYPE_METADATA] = id3Renderer; renderers[MediaPlayer.TYPE_TEXT] = closedCaptionRenderer; player.onRenderers(null, null, renderers, bandwidthMeter); }
private void buildRenderers() { Period period = manifest.getPeriod(0); Handler mainHandler = player.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player); boolean hasContentProtection = false; for (int i = 0; i < period.adaptationSets.size(); i++) { AdaptationSet adaptationSet = period.adaptationSets.get(i); if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) { hasContentProtection |= adaptationSet.hasContentProtection(); } } // Check drm support if necessary. boolean filterHdContent = false; StreamingDrmSessionManager drmSessionManager = null; if (hasContentProtection) { if (Util.SDK_INT < 18) { player.onRenderersError( new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME)); return; } try { drmSessionManager = StreamingDrmSessionManager.newWidevineInstance( player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player); filterHdContent = getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1; } catch (UnsupportedDrmException e) { player.onRenderersError(e); return; } } // Build the video renderer. DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent), videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player); ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, Player.TYPE_VIDEO); TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, drmSessionManager, true, mainHandler, player, 50); // Build the audio renderer. DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player); ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, Player.TYPE_AUDIO); TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true, mainHandler, player, AudioCapabilities.getCapabilities(context)); // Build the text renderer. DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource textChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player); ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl, TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, Player.TYPE_TEXT); TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player, mainHandler.getLooper()); // Invoke the callback. TrackRenderer[] renderers = new TrackRenderer[Player.RENDERER_COUNT]; renderers[Player.TYPE_VIDEO] = videoRenderer; renderers[Player.TYPE_AUDIO] = audioRenderer; renderers[Player.TYPE_TEXT] = textRenderer; player.onRenderers(renderers, bandwidthMeter); }
@Override public void onSingleManifest(HlsPlaylist manifest) { if (canceled) { return; } Handler mainHandler = player.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(); int[] variantIndices = null; if (manifest instanceof HlsMasterPlaylist) { HlsMasterPlaylist masterPlaylist = (HlsMasterPlaylist) manifest; try { variantIndices = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay( context, masterPlaylist.variants, null, false); } catch (DecoderQueryException e) { player.onRenderersError(e); return; } if (variantIndices.length == 0) { player.onRenderersError(new IllegalStateException("No variants selected.")); return; } } DataSource dataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); HlsChunkSource chunkSource = new HlsChunkSource(dataSource, url, manifest, bandwidthMeter, variantIndices, HlsChunkSource.ADAPTIVE_MODE_SPLICE); HlsSampleSource sampleSource = new HlsSampleSource(chunkSource, loadControl, BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, Player.TYPE_VIDEO); MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, sampleSource, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, mainHandler, player, 50); MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource, null, true, player.getMainHandler(), player, AudioCapabilities.getCapabilities(context)); MetadataTrackRenderer<Map<String, Object>> id3Renderer = new MetadataTrackRenderer<>( sampleSource, new Id3Parser(), player, mainHandler.getLooper()); Eia608TrackRenderer closedCaptionRenderer = new Eia608TrackRenderer(sampleSource, player, mainHandler.getLooper()); TrackRenderer[] renderers = new TrackRenderer[Player.RENDERER_COUNT]; renderers[Player.TYPE_VIDEO] = videoRenderer; renderers[Player.TYPE_AUDIO] = audioRenderer; renderers[Player.TYPE_METADATA] = id3Renderer; renderers[Player.TYPE_TEXT] = closedCaptionRenderer; player.onRenderers(renderers, bandwidthMeter); }
@Override public void onSingleManifest(HlsPlaylist manifest) { if (canceled) { return; } Handler mainHandler = player.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(); int[] variantIndices = null; if (manifest instanceof HlsMasterPlaylist) { HlsMasterPlaylist masterPlaylist = (HlsMasterPlaylist) manifest; try { variantIndices = VideoFormatSelectorUtil.selectVideoFormatsForDefaultDisplay( context, masterPlaylist.variants, null, false); } catch (DecoderQueryException e) { player.onRenderersError(e); return; } if (variantIndices.length == 0) { player.onRenderersError(new IllegalStateException("No variants selected.")); return; } } DataSource dataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); HlsChunkSource chunkSource = new HlsChunkSource(dataSource, url, manifest, bandwidthMeter, variantIndices, HlsChunkSource.ADAPTIVE_MODE_SPLICE, audioCapabilities); HlsSampleSource sampleSource = new HlsSampleSource(chunkSource, loadControl, BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, VideoPlayer.TYPE_VIDEO); MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, mainHandler, player, 50); MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource); MetadataTrackRenderer<Map<String, Object>> id3Renderer = new MetadataTrackRenderer<>( sampleSource, new Id3Parser(), player, mainHandler.getLooper()); Eia608TrackRenderer closedCaptionRenderer = new Eia608TrackRenderer(sampleSource, player, mainHandler.getLooper()); TrackRenderer[] renderers = new TrackRenderer[VideoPlayer.RENDERER_COUNT]; renderers[VideoPlayer.TYPE_VIDEO] = videoRenderer; renderers[VideoPlayer.TYPE_AUDIO] = audioRenderer; renderers[VideoPlayer.TYPE_METADATA] = id3Renderer; renderers[VideoPlayer.TYPE_TEXT] = closedCaptionRenderer; player.onRenderers(null, null, renderers, bandwidthMeter); }
private void buildRenderers() { Period period = manifest.getPeriod(0); Handler mainHandler = player.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player); boolean hasContentProtection = false; for (int i = 0; i < period.adaptationSets.size(); i++) { AdaptationSet adaptationSet = period.adaptationSets.get(i); if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) { hasContentProtection |= adaptationSet.hasContentProtection(); } } if (hasContentProtection) { if (Util.SDK_INT < 18) { player.onRenderersError( new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME)); return; } } // Build the video renderer. DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newVideoInstance(context, true, false), videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_VIDEO); ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO); TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, true, mainHandler, player, 50); // Build the audio renderer. DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_AUDIO); ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_AUDIO); TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, MediaCodecSelector.DEFAULT, null, true, mainHandler, player, AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC); // Build the text renderer. DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource textChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_TEXT); ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl, TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_TEXT); TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player, mainHandler.getLooper()); // Invoke the callback. TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT]; renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer; renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer; renderers[DemoPlayer.TYPE_TEXT] = textRenderer; player.onRenderers(renderers, bandwidthMeter); }
protected void buildTrackRenderers(DrmSessionManager drmSessionManager, boolean filterHdContent) { Handler mainHandler = player.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player); //Create the Sample Source to be used by the Video Renderer DataSource dataSourceVideo = createDataSource(okHttpClient, bandwidthMeter, userAgent); ChunkSource chunkSourceVideo = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent), dataSourceVideo, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, EMExoPlayer.RENDER_VIDEO); ChunkSampleSource sampleSourceVideo = new ChunkSampleSource(chunkSourceVideo, loadControl, BUFFER_SEGMENTS_VIDEO * BUFFER_SEGMENT_SIZE, mainHandler, player, EMExoPlayer.RENDER_VIDEO); //Create the Sample Source to be used by the Audio Renderer DataSource dataSourceAudio = createDataSource(okHttpClient, bandwidthMeter, userAgent); ChunkSource chunkSourceAudio = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newAudioInstance(), dataSourceAudio, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, EMExoPlayer.RENDER_AUDIO); ChunkSampleSource sampleSourceAudio = new ChunkSampleSource(chunkSourceAudio, loadControl, BUFFER_SEGMENTS_AUDIO * BUFFER_SEGMENT_SIZE, mainHandler, player, EMExoPlayer.RENDER_AUDIO); //Create the Sample Source to be used by the Closed Captions Renderer DataSource dataSourceCC = createDataSource(okHttpClient, bandwidthMeter, userAgent); ChunkSource chunkSourceCC = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newAudioInstance(), dataSourceCC, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, EMExoPlayer.RENDER_CLOSED_CAPTION); ChunkSampleSource sampleSourceCC = new ChunkSampleSource(chunkSourceCC, loadControl, BUFFER_SEGMENTS_TEXT * BUFFER_SEGMENT_SIZE, mainHandler, player, EMExoPlayer.RENDER_CLOSED_CAPTION); //Build the renderers MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, sampleSourceVideo, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, MAX_JOIN_TIME, drmSessionManager, true, mainHandler, player, DROPPED_FRAME_NOTIFICATION_AMOUNT); EMMediaCodecAudioTrackRenderer audioRenderer = new EMMediaCodecAudioTrackRenderer(sampleSourceAudio, MediaCodecSelector.DEFAULT, drmSessionManager, true, mainHandler, player, AudioCapabilities.getCapabilities(context), streamType); TextTrackRenderer captionsRenderer = new TextTrackRenderer(sampleSourceCC, player, mainHandler.getLooper()); // Invoke the callback. TrackRenderer[] renderers = new TrackRenderer[EMExoPlayer.RENDER_COUNT]; renderers[EMExoPlayer.RENDER_VIDEO] = videoRenderer; renderers[EMExoPlayer.RENDER_AUDIO] = audioRenderer; renderers[EMExoPlayer.RENDER_CLOSED_CAPTION] = captionsRenderer; player.onRenderers(renderers, bandwidthMeter); }
protected void buildTrackRenderers(HlsPlaylist playlist, boolean hasClosedCaptions, boolean hasMultipleAudioTracks) { LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(player.getMainHandler(), player); PtsTimestampAdjusterProvider timestampAdjusterProvider = new PtsTimestampAdjusterProvider(); //Create the Sample Source to be used by the Video Renderer DataSource dataSourceVideo = createDataSource(okHttpClient, bandwidthMeter, userAgent); HlsChunkSource chunkSourceVideo = new HlsChunkSource(true, dataSourceVideo, playlist, DefaultHlsTrackSelector.newDefaultInstance(context), bandwidthMeter, timestampAdjusterProvider); HlsSampleSource sampleSourceVideo = new HlsSampleSource(chunkSourceVideo, loadControl, BUFFER_SEGMENTS_TOTAL * BUFFER_SEGMENT_SIZE, player.getMainHandler(), player, EMExoPlayer.RENDER_VIDEO); //Create the Sample Source to be used by the Audio Renderer DataSource dataSourceAudio = new DefaultUriDataSource(context, bandwidthMeter, userAgent); HlsChunkSource chunkSourceAudio = new HlsChunkSource(false, dataSourceAudio, playlist, DefaultHlsTrackSelector.newAudioInstance(), bandwidthMeter, timestampAdjusterProvider); HlsSampleSource sampleSourceAudio = new HlsSampleSource(chunkSourceAudio, loadControl, BUFFER_SEGMENTS_AUDIO * BUFFER_SEGMENT_SIZE, player.getMainHandler(), player, EMExoPlayer.RENDER_AUDIO); SampleSource[] sampleSourcesAudio = hasMultipleAudioTracks ? new SampleSource[] {sampleSourceVideo, sampleSourceAudio} : new SampleSource[] {sampleSourceVideo}; //Create the Sample Source to be used by the Closed Captions Renderer DataSource dataSourceCC = createDataSource(okHttpClient, bandwidthMeter, userAgent); HlsChunkSource chunkSourceCC = new HlsChunkSource(false, dataSourceCC, playlist, DefaultHlsTrackSelector.newSubtitleInstance(), bandwidthMeter, timestampAdjusterProvider); HlsSampleSource sampleSourceCC = new HlsSampleSource(chunkSourceCC, loadControl, BUFFER_SEGMENTS_TEXT * BUFFER_SEGMENT_SIZE, player.getMainHandler(), player, EMExoPlayer.RENDER_CLOSED_CAPTION); //Build the renderers MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, sampleSourceVideo, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, MAX_JOIN_TIME, player.getMainHandler(), player, DROPPED_FRAME_NOTIFICATION_AMOUNT); EMMediaCodecAudioTrackRenderer audioRenderer = new EMMediaCodecAudioTrackRenderer(sampleSourcesAudio, MediaCodecSelector.DEFAULT, null, true, player.getMainHandler(), player, AudioCapabilities.getCapabilities(context), streamType); TrackRenderer captionsRenderer = hasClosedCaptions ? new TextTrackRenderer(sampleSourceCC, player, player.getMainHandler().getLooper()) : new Eia608TrackRenderer(sampleSourceVideo, player, player.getMainHandler().getLooper()); MetadataTrackRenderer<List<Id3Frame>> id3Renderer = new MetadataTrackRenderer<>(sampleSourceVideo, new Id3Parser(), player, player.getMainHandler().getLooper()); //Populate the Render list to pass back to the callback TrackRenderer[] renderers = new TrackRenderer[EMExoPlayer.RENDER_COUNT]; renderers[EMExoPlayer.RENDER_VIDEO] = videoRenderer; renderers[EMExoPlayer.RENDER_AUDIO] = audioRenderer; renderers[EMExoPlayer.RENDER_CLOSED_CAPTION] = captionsRenderer; renderers[EMExoPlayer.RENDER_TIMED_METADATA] = id3Renderer; player.onRenderers(renderers, bandwidthMeter); }
protected void buildTrackRenderers(DrmSessionManager drmSessionManager) { Handler mainHandler = player.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player); //Create the Sample Source to be used by the Video Renderer DataSource dataSourceVideo = createDataSource(okHttpClient, bandwidthMeter, userAgent); SmoothStreamingTrackSelector trackSelectorVideo = DefaultSmoothStreamingTrackSelector.newVideoInstance(context, true, false); ChunkSource chunkSourceVideo = new SmoothStreamingChunkSource(manifestFetcher, trackSelectorVideo, dataSourceVideo, new FormatEvaluator.AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS); ChunkSampleSource sampleSourceVideo = new ChunkSampleSource(chunkSourceVideo, loadControl, BUFFER_SEGMENTS_VIDEO * BUFFER_SEGMENT_SIZE, mainHandler, player, EMExoPlayer.RENDER_VIDEO); //Create the Sample Source to be used by the Audio Renderer DataSource dataSourceAudio = createDataSource(okHttpClient, bandwidthMeter, userAgent); SmoothStreamingTrackSelector trackSelectorAudio = DefaultSmoothStreamingTrackSelector.newAudioInstance(); ChunkSource chunkSourceAudio = new SmoothStreamingChunkSource(manifestFetcher, trackSelectorAudio, dataSourceAudio, null, LIVE_EDGE_LATENCY_MS); ChunkSampleSource sampleSourceAudio = new ChunkSampleSource(chunkSourceAudio, loadControl, BUFFER_SEGMENTS_AUDIO * BUFFER_SEGMENT_SIZE, mainHandler, player, EMExoPlayer.RENDER_AUDIO); //Create the Sample Source to be used by the Closed Captions Renderer DataSource dataSourceCC = createDataSource(okHttpClient, bandwidthMeter, userAgent); SmoothStreamingTrackSelector trackSelectorCC = DefaultSmoothStreamingTrackSelector.newTextInstance(); ChunkSource chunkSourceCC = new SmoothStreamingChunkSource(manifestFetcher, trackSelectorCC, dataSourceCC, null, LIVE_EDGE_LATENCY_MS); ChunkSampleSource sampleSourceCC = new ChunkSampleSource(chunkSourceCC, loadControl, BUFFER_SEGMENTS_TEXT * BUFFER_SEGMENT_SIZE, mainHandler, player, EMExoPlayer.RENDER_CLOSED_CAPTION); // Build the renderers MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, sampleSourceVideo, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, MAX_JOIN_TIME, drmSessionManager, true, mainHandler, player, DROPPED_FRAME_NOTIFICATION_AMOUNT); EMMediaCodecAudioTrackRenderer audioRenderer = new EMMediaCodecAudioTrackRenderer(sampleSourceAudio, MediaCodecSelector.DEFAULT, drmSessionManager, true, mainHandler, player, AudioCapabilities.getCapabilities(context), streamType); TextTrackRenderer captionsRenderer = new TextTrackRenderer(sampleSourceCC, player, mainHandler.getLooper()); // Invoke the callback TrackRenderer[] renderers = new TrackRenderer[EMExoPlayer.RENDER_COUNT]; renderers[EMExoPlayer.RENDER_VIDEO] = videoRenderer; renderers[EMExoPlayer.RENDER_AUDIO] = audioRenderer; renderers[EMExoPlayer.RENDER_CLOSED_CAPTION] = captionsRenderer; player.onRenderers(renderers, bandwidthMeter); }
private void buildRenderers() { Period period = manifest.getPeriod(0); Handler mainHandler = player.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player); boolean hasContentProtection = false; for (int i = 0; i < period.adaptationSets.size(); i++) { AdaptationSet adaptationSet = period.adaptationSets.get(i); if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) { hasContentProtection |= adaptationSet.hasContentProtection(); } } // Check drm support if necessary. boolean filterHdContent = false; StreamingDrmSessionManager drmSessionManager = null; if (hasContentProtection) { if (Util.SDK_INT < 18) { player.onRenderersError( new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME)); return; } try { drmSessionManager = StreamingDrmSessionManager.newWidevineInstance( player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player); filterHdContent = getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1; } catch (UnsupportedDrmException e) { player.onRenderersError(e); return; } } // Build the video renderer. DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent), videoDataSource, new FormatEvaluator.AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, ExoPlayerWrapper.TYPE_VIDEO); ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, ExoPlayerWrapper.TYPE_VIDEO); TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, drmSessionManager, true, mainHandler, player, 50); // Build the audio renderer. DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, ExoPlayerWrapper.TYPE_AUDIO); ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, ExoPlayerWrapper.TYPE_AUDIO); TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, MediaCodecSelector.DEFAULT, drmSessionManager, true, mainHandler, player, AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC); // Build the text renderer. DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource textChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, ExoPlayerWrapper.TYPE_TEXT); ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl, TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, ExoPlayerWrapper.TYPE_TEXT); TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player, mainHandler.getLooper()); // Invoke the callback. TrackRenderer[] renderers = new TrackRenderer[ExoPlayerWrapper.RENDERER_COUNT]; renderers[ExoPlayerWrapper.TYPE_VIDEO] = videoRenderer; renderers[ExoPlayerWrapper.TYPE_AUDIO] = audioRenderer; renderers[ExoPlayerWrapper.TYPE_TEXT] = textRenderer; player.onRenderers(renderers, bandwidthMeter); }
@Override public void onManifest(String contentId, SmoothStreamingManifest manifest) { Handler mainHandler = playerActivity.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new BufferPool(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(); // Obtain stream elements for playback. int maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize(); int audioStreamElementIndex = -1; int videoStreamElementIndex = -1; ArrayList<Integer> videoTrackIndexList = new ArrayList<Integer>(); for (int i = 0; i < manifest.streamElements.length; i++) { if (audioStreamElementIndex == -1 && manifest.streamElements[i].type == StreamElement.TYPE_AUDIO) { audioStreamElementIndex = i; } else if (videoStreamElementIndex == -1 && manifest.streamElements[i].type == StreamElement.TYPE_VIDEO) { videoStreamElementIndex = i; StreamElement streamElement = manifest.streamElements[i]; for (int j = 0; j < streamElement.tracks.length; j++) { TrackElement trackElement = streamElement.tracks[j]; if (trackElement.maxWidth * trackElement.maxHeight <= maxDecodableFrameSize) { videoTrackIndexList.add(j); } else { // The device isn't capable of playing this stream. } } } } int[] videoTrackIndices = Util.toArray(videoTrackIndexList); // Build the video renderer. DataSource videoDataSource = new UriDataSource(userAgent, bandwidthMeter); ChunkSource videoChunkSource = new SmoothStreamingChunkSource(manifestFetcher, videoStreamElementIndex, videoTrackIndices, videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS); ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true); MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 0, mainHandler, playerActivity, 50); // Build the audio renderer. DataSource audioDataSource = new UriDataSource(userAgent, bandwidthMeter); ChunkSource audioChunkSource = new SmoothStreamingChunkSource(manifestFetcher, audioStreamElementIndex, new int[] {0}, audioDataSource, new FormatEvaluator.FixedEvaluator(), LIVE_EDGE_LATENCY_MS); SampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true); MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer( audioSampleSource); callback.onRenderers(videoRenderer, audioRenderer); }
public ChunkSampleSource(ChunkSource chunkSource, LoadControl loadControl, int bufferSizeContribution, boolean frameAccurateSeeking) { this(chunkSource, loadControl, bufferSizeContribution, frameAccurateSeeking, null, null, 0); }