@Override public TrackRenderer[] buildRenderers(HostActivity host, ExoPlayer player, Surface surface) { Handler handler = new Handler(); LogcatLogger logger = new LogcatLogger(TAG, player); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); String userAgent = TestUtil.getUserAgent(host); // Build the video renderer. DataSource videoDataSource = new DefaultUriDataSource(host, null, userAgent); videoTrackSelector = new TrackSelector(AdaptationSet.TYPE_VIDEO, canIncludeAdditionalVideoFormats, videoFormats); ChunkSource videoChunkSource = new DashChunkSource(mpd, videoTrackSelector, videoDataSource, new FormatEvaluator.RandomEvaluator(0)); ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, handler, logger, VIDEO_EVENT_ID, MIN_LOADABLE_RETRY_COUNT); DebugMediaCodecVideoTrackRenderer videoRenderer = new DebugMediaCodecVideoTrackRenderer(host, videoSampleSource, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 0, handler, logger, 50); videoCounters = videoRenderer.codecCounters; player.sendMessage(videoRenderer, DebugMediaCodecVideoTrackRenderer.MSG_SET_SURFACE, surface); // Build the audio renderer. DataSource audioDataSource = new DefaultUriDataSource(host, null, userAgent); TrackSelector audioTrackSelector = new TrackSelector(AdaptationSet.TYPE_AUDIO, false, audioFormats); ChunkSource audioChunkSource = new DashChunkSource(mpd, audioTrackSelector, audioDataSource, null); ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, handler, logger, AUDIO_EVENT_ID, MIN_LOADABLE_RETRY_COUNT); MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer( audioSampleSource, MediaCodecSelector.DEFAULT, handler, logger); audioCounters = audioRenderer.codecCounters; TrackRenderer[] renderers = new TrackRenderer[RENDERER_COUNT]; renderers[VIDEO_RENDERER_INDEX] = videoRenderer; renderers[AUDIO_RENDERER_INDEX] = audioRenderer; return renderers; }
private void buildRenderers() { Period period = manifest.getPeriod(0); Handler mainHandler = player.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player); boolean hasContentProtection = false; for (int i = 0; i < period.adaptationSets.size(); i++) { AdaptationSet adaptationSet = period.adaptationSets.get(i); if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) { hasContentProtection |= adaptationSet.hasContentProtection(); } } // Check drm support if necessary. boolean filterHdContent = false; StreamingDrmSessionManager drmSessionManager = null; if (hasContentProtection) { if (Util.SDK_INT < 18) { player.onRenderersError( new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME)); return; } try { drmSessionManager = StreamingDrmSessionManager.newWidevineInstance( player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player); filterHdContent = getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1; } catch (UnsupportedDrmException e) { player.onRenderersError(e); return; } } // Build the video renderer. DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent), videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_VIDEO); ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO); TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, drmSessionManager, true, mainHandler, player, 50); // Build the audio renderer. DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_AUDIO); ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_AUDIO); TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, MediaCodecSelector.DEFAULT, drmSessionManager, true, mainHandler, player, AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC); // Build the text renderer. DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource textChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_TEXT); ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl, TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_TEXT); TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player, mainHandler.getLooper()); // Invoke the callback. TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT]; renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer; renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer; renderers[DemoPlayer.TYPE_TEXT] = textRenderer; player.onRenderers(renderers, bandwidthMeter); }
private void buildRenderers() { Period period = manifest.getPeriod(0); Handler mainHandler = player.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player); boolean hasContentProtection = false; for (int i = 0; i < period.adaptationSets.size(); i++) { AdaptationSet adaptationSet = period.adaptationSets.get(i); if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) { hasContentProtection |= adaptationSet.hasContentProtection(); } } // Check drm support if necessary. boolean filterHdContent = false; StreamingDrmSessionManager<FrameworkMediaCrypto> drmSessionManager = null; if (hasContentProtection) { if (Util.SDK_INT < 18) { player.onRenderersError( new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME)); return; } try { drmSessionManager = StreamingDrmSessionManager.newWidevineInstance( player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player); filterHdContent = getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1; } catch (UnsupportedDrmException e) { player.onRenderersError(e); return; } } // Build the video renderer. DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent), videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_VIDEO); ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO); TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, drmSessionManager, true, mainHandler, player, 50); // Build the audio renderer. DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_AUDIO); ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_AUDIO); TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, MediaCodecSelector.DEFAULT, drmSessionManager, true, mainHandler, player, AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC); // Build the text renderer. DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource textChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_TEXT); ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl, TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_TEXT); TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player, mainHandler.getLooper()); // Invoke the callback. TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT]; renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer; renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer; renderers[DemoPlayer.TYPE_TEXT] = textRenderer; player.onRenderers(renderers, bandwidthMeter); }
private void buildRenderers() { Period period = manifest.getPeriod(0); Handler mainHandler = player.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player); boolean hasContentProtection = false; for (int i = 0; i < period.adaptationSets.size(); i++) { AdaptationSet adaptationSet = period.adaptationSets.get(i); if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) { hasContentProtection |= adaptationSet.hasContentProtection(); } } // Check drm support if necessary. boolean filterHdContent = false; StreamingDrmSessionManager drmSessionManager = null; if (hasContentProtection) { if (Util.SDK_INT < 18) { player.onRenderersError( new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME)); return; } try { drmSessionManager = StreamingDrmSessionManager.newWidevineInstance( player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player); filterHdContent = getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1; } catch (UnsupportedDrmException e) { player.onRenderersError(e); return; } } // Build the video renderer. DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent), videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player); ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO); TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, drmSessionManager, true, mainHandler, player, 50); // Build the audio renderer. DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player); ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_AUDIO); TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true, mainHandler, player, AudioCapabilities.getCapabilities(context)); // Build the text renderer. DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource textChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player); ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl, TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_TEXT); TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player, mainHandler.getLooper()); // Invoke the callback. TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT]; renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer; renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer; renderers[DemoPlayer.TYPE_TEXT] = textRenderer; player.onRenderers(renderers, bandwidthMeter); }
private void buildRenderers() { Period period = manifest.getPeriod(0); Handler mainHandler = player.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player); boolean hasContentProtection = false; for (int i = 0; i < period.adaptationSets.size(); i++) { AdaptationSet adaptationSet = period.adaptationSets.get(i); if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) { hasContentProtection |= adaptationSet.hasContentProtection(); } } // Check drm support if necessary. boolean filterHdContent = false; StreamingDrmSessionManager drmSessionManager = null; if (hasContentProtection) { if (Util.SDK_INT < 18) { player.onRenderersError( new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME)); return; } try { drmSessionManager = StreamingDrmSessionManager.newWidevineInstance( player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player); filterHdContent = getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1; } catch (UnsupportedDrmException e) { player.onRenderersError(e); return; } } // Build the video renderer. DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent), videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player); ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, Player.TYPE_VIDEO); TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, drmSessionManager, true, mainHandler, player, 50); // Build the audio renderer. DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player); ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, Player.TYPE_AUDIO); TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true, mainHandler, player, AudioCapabilities.getCapabilities(context)); // Build the text renderer. DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource textChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player); ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl, TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, Player.TYPE_TEXT); TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player, mainHandler.getLooper()); // Invoke the callback. TrackRenderer[] renderers = new TrackRenderer[Player.RENDERER_COUNT]; renderers[Player.TYPE_VIDEO] = videoRenderer; renderers[Player.TYPE_AUDIO] = audioRenderer; renderers[Player.TYPE_TEXT] = textRenderer; player.onRenderers(renderers, bandwidthMeter); }
private void buildRenderers() { Period period = manifest.getPeriod(0); Handler mainHandler = player.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player); boolean hasContentProtection = false; for (int i = 0; i < period.adaptationSets.size(); i++) { AdaptationSet adaptationSet = period.adaptationSets.get(i); if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) { hasContentProtection |= adaptationSet.hasContentProtection(); } } if (hasContentProtection) { if (Util.SDK_INT < 18) { player.onRenderersError( new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME)); return; } } // Build the video renderer. DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newVideoInstance(context, true, false), videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_VIDEO); ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_VIDEO); TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, true, mainHandler, player, 50); // Build the audio renderer. DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_AUDIO); ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_AUDIO); TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, MediaCodecSelector.DEFAULT, null, true, mainHandler, player, AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC); // Build the text renderer. DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource textChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, DemoPlayer.TYPE_TEXT); ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl, TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, DemoPlayer.TYPE_TEXT); TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player, mainHandler.getLooper()); // Invoke the callback. TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT]; renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer; renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer; renderers[DemoPlayer.TYPE_TEXT] = textRenderer; player.onRenderers(renderers, bandwidthMeter); }
protected void buildTrackRenderers(DrmSessionManager drmSessionManager, boolean filterHdContent) { Handler mainHandler = player.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player); //Create the Sample Source to be used by the Video Renderer DataSource dataSourceVideo = createDataSource(okHttpClient, bandwidthMeter, userAgent); ChunkSource chunkSourceVideo = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent), dataSourceVideo, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, EMExoPlayer.RENDER_VIDEO); ChunkSampleSource sampleSourceVideo = new ChunkSampleSource(chunkSourceVideo, loadControl, BUFFER_SEGMENTS_VIDEO * BUFFER_SEGMENT_SIZE, mainHandler, player, EMExoPlayer.RENDER_VIDEO); //Create the Sample Source to be used by the Audio Renderer DataSource dataSourceAudio = createDataSource(okHttpClient, bandwidthMeter, userAgent); ChunkSource chunkSourceAudio = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newAudioInstance(), dataSourceAudio, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, EMExoPlayer.RENDER_AUDIO); ChunkSampleSource sampleSourceAudio = new ChunkSampleSource(chunkSourceAudio, loadControl, BUFFER_SEGMENTS_AUDIO * BUFFER_SEGMENT_SIZE, mainHandler, player, EMExoPlayer.RENDER_AUDIO); //Create the Sample Source to be used by the Closed Captions Renderer DataSource dataSourceCC = createDataSource(okHttpClient, bandwidthMeter, userAgent); ChunkSource chunkSourceCC = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newAudioInstance(), dataSourceCC, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, EMExoPlayer.RENDER_CLOSED_CAPTION); ChunkSampleSource sampleSourceCC = new ChunkSampleSource(chunkSourceCC, loadControl, BUFFER_SEGMENTS_TEXT * BUFFER_SEGMENT_SIZE, mainHandler, player, EMExoPlayer.RENDER_CLOSED_CAPTION); //Build the renderers MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, sampleSourceVideo, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, MAX_JOIN_TIME, drmSessionManager, true, mainHandler, player, DROPPED_FRAME_NOTIFICATION_AMOUNT); EMMediaCodecAudioTrackRenderer audioRenderer = new EMMediaCodecAudioTrackRenderer(sampleSourceAudio, MediaCodecSelector.DEFAULT, drmSessionManager, true, mainHandler, player, AudioCapabilities.getCapabilities(context), streamType); TextTrackRenderer captionsRenderer = new TextTrackRenderer(sampleSourceCC, player, mainHandler.getLooper()); // Invoke the callback. TrackRenderer[] renderers = new TrackRenderer[EMExoPlayer.RENDER_COUNT]; renderers[EMExoPlayer.RENDER_VIDEO] = videoRenderer; renderers[EMExoPlayer.RENDER_AUDIO] = audioRenderer; renderers[EMExoPlayer.RENDER_CLOSED_CAPTION] = captionsRenderer; player.onRenderers(renderers, bandwidthMeter); }
private void buildRenderers() { Period period = manifest.getPeriod(0); Handler mainHandler = player.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player); boolean hasContentProtection = false; for (int i = 0; i < period.adaptationSets.size(); i++) { AdaptationSet adaptationSet = period.adaptationSets.get(i); if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) { hasContentProtection |= adaptationSet.hasContentProtection(); } } // Check drm support if necessary. boolean filterHdContent = false; StreamingDrmSessionManager drmSessionManager = null; if (hasContentProtection) { if (Util.SDK_INT < 18) { player.onRenderersError( new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME)); return; } try { drmSessionManager = StreamingDrmSessionManager.newWidevineInstance( player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player); filterHdContent = getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1; } catch (UnsupportedDrmException e) { player.onRenderersError(e); return; } } // Build the video renderer. DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent), videoDataSource, new FormatEvaluator.AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, ExoPlayerWrapper.TYPE_VIDEO); ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, ExoPlayerWrapper.TYPE_VIDEO); TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, drmSessionManager, true, mainHandler, player, 50); // Build the audio renderer. DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, ExoPlayerWrapper.TYPE_AUDIO); ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, ExoPlayerWrapper.TYPE_AUDIO); TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, MediaCodecSelector.DEFAULT, drmSessionManager, true, mainHandler, player, AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC); // Build the text renderer. DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource textChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, ExoPlayerWrapper.TYPE_TEXT); ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl, TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, ExoPlayerWrapper.TYPE_TEXT); TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player, mainHandler.getLooper()); // Invoke the callback. TrackRenderer[] renderers = new TrackRenderer[ExoPlayerWrapper.RENDERER_COUNT]; renderers[ExoPlayerWrapper.TYPE_VIDEO] = videoRenderer; renderers[ExoPlayerWrapper.TYPE_AUDIO] = audioRenderer; renderers[ExoPlayerWrapper.TYPE_TEXT] = textRenderer; player.onRenderers(renderers, bandwidthMeter); }
private void buildRenderers() { Period period = manifest.getPeriod(0); Handler mainHandler = player.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player); boolean hasContentProtection = false; for (int i = 0; i < period.adaptationSets.size(); i++) { AdaptationSet adaptationSet = period.adaptationSets.get(i); if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) { hasContentProtection |= adaptationSet.hasContentProtection(); } } // Check drm support if necessary. boolean filterHdContent = false; StreamingDrmSessionManager drmSessionManager = null; if (hasContentProtection) { if (Util.SDK_INT < 18) { player.onRenderersError( new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME)); return; } try { drmSessionManager = StreamingDrmSessionManager.newWidevineInstance( player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player); filterHdContent = getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1; } catch (UnsupportedDrmException e) { player.onRenderersError(e); return; } } // Build the video renderer. DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent), videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player); ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, FlyingPlayer.TYPE_VIDEO); TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, drmSessionManager, true, mainHandler, player, 50); // Build the audio renderer. DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player); ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, FlyingPlayer.TYPE_AUDIO); TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, drmSessionManager, true, mainHandler, player, AudioCapabilities.getCapabilities(context)); // Build the text renderer. DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource textChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player); ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl, TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, FlyingPlayer.TYPE_TEXT); TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player, mainHandler.getLooper()); // Invoke the callback. TrackRenderer[] renderers = new TrackRenderer[FlyingPlayer.RENDERER_COUNT]; renderers[FlyingPlayer.TYPE_VIDEO] = videoRenderer; renderers[FlyingPlayer.TYPE_AUDIO] = audioRenderer; renderers[FlyingPlayer.TYPE_TEXT] = textRenderer; player.onRenderers(renderers, bandwidthMeter); }
private void buildRenderers() { Period period = manifest.getPeriod(0); Handler mainHandler = player.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player); boolean hasContentProtection = false; for (int i = 0; i < period.adaptationSets.size(); i++) { AdaptationSet adaptationSet = period.adaptationSets.get(i); if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) { hasContentProtection |= adaptationSet.hasContentProtection(); } } // Check drm support if necessary. boolean filterHdContent = false; StreamingDrmSessionManager drmSessionManager = null; if (hasContentProtection) { if (Util.SDK_INT < 18) { player.onRenderersError( new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME)); return; } try { drmSessionManager = StreamingDrmSessionManager.newWidevineInstance( player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player); filterHdContent = getWidevineSecurityLevel(drmSessionManager) != SECURITY_LEVEL_1; } catch (UnsupportedDrmException e) { player.onRenderersError(e); return; } } // Build the video renderer. DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent), videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, ExoplayerWrapper.TYPE_VIDEO); ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, ExoplayerWrapper.TYPE_VIDEO); TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, drmSessionManager, true, mainHandler, player, 50); // Build the audio renderer. DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, ExoplayerWrapper.TYPE_AUDIO); ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, ExoplayerWrapper.TYPE_AUDIO); TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, MediaCodecSelector.DEFAULT, drmSessionManager, true, mainHandler, player, AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC); // Build the text renderer. DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource textChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, ExoplayerWrapper.TYPE_TEXT); ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl, TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, ExoplayerWrapper.TYPE_TEXT); TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player, mainHandler.getLooper()); // Invoke the callback. TrackRenderer[] renderers = new TrackRenderer[ExoplayerWrapper.RENDERER_COUNT]; renderers[ExoplayerWrapper.TYPE_VIDEO] = videoRenderer; renderers[ExoplayerWrapper.TYPE_AUDIO] = audioRenderer; renderers[ExoplayerWrapper.TYPE_TEXT] = textRenderer; player.onRenderers(renderers, bandwidthMeter); }
private void buildRenderers() { Period period = manifest.getPeriod(0); Handler mainHandler = player.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new DefaultAllocator(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, player); boolean hasContentProtection = false; for (int i = 0; i < period.adaptationSets.size(); i++) { AdaptationSet adaptationSet = period.adaptationSets.get(i); if (adaptationSet.type != AdaptationSet.TYPE_UNKNOWN) { hasContentProtection |= adaptationSet.hasContentProtection(); } } // Check drm support if necessary. boolean filterHdContent = false; DrmSessionManager drmSessionManager = null; if (hasContentProtection) { if (Util.SDK_INT < 18) { player.onRenderersError( new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME)); return; } try { if (drmCallback instanceof ExtendedMediaDrmCallback) { drmSessionManager = ((ExtendedMediaDrmCallback) drmCallback).getSessionManager(); } if (drmSessionManager == null) { drmSessionManager = StreamingDrmSessionManager.newWidevineInstance( player.getPlaybackLooper(), drmCallback, null, player.getMainHandler(), player); filterHdContent = getWidevineSecurityLevel((StreamingDrmSessionManager) drmSessionManager) != SECURITY_LEVEL_1; } } catch (UnsupportedDrmException e) { player.onRenderersError(e); return; } } // Build the video renderer. DataSource videoDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource videoChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newVideoInstance(context, true, filterHdContent), videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player,ExoplayerWrapper.TYPE_VIDEO); ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, ExoplayerWrapper.TYPE_VIDEO); TrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context, videoSampleSource, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, drmSessionManager, true, mainHandler, player, 50); // Build the audio renderer. DataSource audioDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource audioChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newAudioInstance(), audioDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, ExoplayerWrapper.TYPE_AUDIO); ChunkSampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, ExoplayerWrapper.TYPE_AUDIO); TrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(audioSampleSource, MediaCodecSelector.DEFAULT, drmSessionManager, true, mainHandler, player, AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC); // Build the text renderer. DataSource textDataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent); ChunkSource textChunkSource = new DashChunkSource(manifestFetcher, DefaultDashTrackSelector.newTextInstance(), textDataSource, null, LIVE_EDGE_LATENCY_MS, elapsedRealtimeOffset, mainHandler, player, ExoplayerWrapper.TYPE_TEXT); ChunkSampleSource textSampleSource = new ChunkSampleSource(textChunkSource, loadControl, TEXT_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, mainHandler, player, ExoplayerWrapper.TYPE_TEXT); TrackRenderer textRenderer = new TextTrackRenderer(textSampleSource, player, mainHandler.getLooper()); // Invoke the callback. TrackRenderer[] renderers = new TrackRenderer[ExoplayerWrapper.RENDERER_COUNT]; renderers[ExoplayerWrapper.TYPE_VIDEO] = videoRenderer; renderers[ExoplayerWrapper.TYPE_AUDIO] = audioRenderer; renderers[ExoplayerWrapper.TYPE_TEXT] = textRenderer; player.onRenderers(renderers, bandwidthMeter); }