@Override public void buildRenderers(DemoPlayer player, DemoPlayer.RendererBuilderCallback callback) { // Build the video and audio renderers. Log.d(TAG, "buildRenderers(): uri=" + uri.toString()); Handler mainHandler = player.getMainHandler(); RawExtractor extractor = null; if( playerType == DemoUtil.TYPE_RAW_HTTP_TS) { BufferPool bufferPool = new BufferPool(this.BUFFER_POOL_LENGTH); extractor = new TsExtractor(false, 0, bufferPool); } DataSource videoDataSource = new RawHttpDataSource(userAgent, RawHttpDataSource.REJECT_PAYWALL_TYPES); DataSource rawSource = new RawBufferedSource(videoDataSource); SampleSource sampleSource = new RawSampleSource(rawSource, this.uri, this.context, extractor); MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource, null, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, mainHandler, player, 50); MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource); // Build the debug renderer. TrackRenderer debugRenderer = debugTextView != null ? new DebugTrackRenderer(debugTextView, videoRenderer) : null; // Invoke the callback. TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT]; renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer; renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer; // audioRenderer; renderers[DemoPlayer.TYPE_DEBUG] = debugRenderer; callback.onRenderers(null, null, renderers); }
@Override public void buildRenderers(DemoPlayer player, DemoPlayer.RendererBuilderCallback callback) { Log.d(TAG, "******************buildRenderers(): uri=" + uri.toString()); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(); Handler mainHandler = player.getMainHandler(); RawExtractor extractor = null; BufferPool bufferPool = new BufferPool(this.BUFFER_POOL_LENGTH); extractor = new TsExtractor(false, 0, bufferPool); DataSource videoDataSource = new UdpMulticastDataSource(); DataSource rawSource = new UdpSampleSource(videoDataSource); SampleSource sampleSource = new RawSampleSource(rawSource, this.uri, this.context, extractor); MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource, null, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, mainHandler, player, 50); MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource); MetadataTrackRenderer<Map<String, Object>> id3Renderer = new MetadataTrackRenderer<Map<String, Object>>(sampleSource, new Id3Parser(), player.getId3MetadataRenderer(), player.getMainHandler().getLooper()); Eia608TrackRenderer closedCaptionRenderer = new Eia608TrackRenderer(sampleSource, player, player.getMainHandler().getLooper()); // Build the debug renderer. TrackRenderer debugRenderer = debugTextView != null ? new DebugTrackRenderer(debugTextView, videoRenderer) : null; // Invoke the callback. TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT]; renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer; renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer; renderers[DemoPlayer.TYPE_TIMED_METADATA] = id3Renderer; renderers[DemoPlayer.TYPE_TEXT] = closedCaptionRenderer; renderers[DemoPlayer.TYPE_DEBUG] = debugRenderer; callback.onRenderers(null, null, renderers); }
@Override public void buildRenderers(DemoPlayer player, DemoPlayer.RendererBuilderCallback callback) { // Build the video and audio renderers. Log.d(TAG, "buildRenderers(): uri=" + uri.toString()); Handler mainHandler = player.getMainHandler(); RawExtractor extractor = null; BufferPool bufferPool = new BufferPool(this.BUFFER_POOL_LENGTH); extractor = new TsExtractor(false, 0, bufferPool); DataSource videoDataSource = new UdpMulticastDataSource(); DataSource rawSource = new RtpSampleSource(videoDataSource); SampleSource sampleSource = new RawSampleSource(rawSource, this.uri, this.context, extractor); MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(sampleSource, null, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, mainHandler, player, 50); MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource); // Build the debug renderer. TrackRenderer debugRenderer = debugTextView != null ? new DebugTrackRenderer(debugTextView, videoRenderer) : null; // Invoke the callback. TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT]; renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer; renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer; renderers[DemoPlayer.TYPE_DEBUG] = debugRenderer; callback.onRenderers(null, null, renderers); }
@Override public void buildRenderers(DemoPlayer player, DemoPlayer.RendererBuilderCallback callback) { // Build the video and audio renderers. Log.d(TAG, "buildRenderers(): uri=" + uri.toString()); Handler mainHandler = player.getMainHandler(); RawExtractor extractor = null; BufferPool bufferPool = new BufferPool(this.BUFFER_POOL_LENGTH); extractor = new TsExtractor(false, 0, bufferPool); DataSource videoDataSource = new UdpUnicastDataSource(); DataSource rawSource = null; if (this.uri.getScheme().equals("rtp")) { rawSource = new RtpSampleSource(videoDataSource); } else { rawSource = new RawBufferedSource(videoDataSource); } SampleSource videoSampleSource = new RawSampleSource(rawSource, this.uri, this.context, extractor); MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource, null, true, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, null, mainHandler, player, 50); // Build the debug renderer. TrackRenderer debugRenderer = debugTextView != null ? new DebugTrackRenderer(debugTextView, videoRenderer) : null; // Invoke the callback. TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT]; renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer; renderers[DemoPlayer.TYPE_AUDIO] = null; // audioRenderer; renderers[DemoPlayer.TYPE_DEBUG] = debugRenderer; callback.onRenderers(null, null, renderers); }
protected SampleQueue(BufferPool bufferPool) { rollingBuffer = new RollingSampleBuffer(bufferPool); sampleInfoHolder = new SampleHolder(SampleHolder.BUFFER_REPLACEMENT_MODE_DISABLED); needKeyframe = true; lastReadTimeUs = Long.MIN_VALUE; spliceOutTimeUs = Long.MIN_VALUE; largestParsedTimestampUs = Long.MIN_VALUE; }
public H264Reader(BufferPool bufferPool, SeiReader seiReader) { super(bufferPool); this.seiReader = seiReader; prefixFlags = new boolean[3]; sps = new NalUnitTargetBuffer(NAL_UNIT_TYPE_SPS, 128); pps = new NalUnitTargetBuffer(NAL_UNIT_TYPE_PPS, 128); sei = new NalUnitTargetBuffer(NAL_UNIT_TYPE_SEI, 128); scratchEscapePositions = new int[10]; }
public RollingSampleBuffer(BufferPool bufferPool) { this.fragmentPool = bufferPool; fragmentLength = bufferPool.bufferLength; infoQueue = new InfoQueue(); dataQueue = new ConcurrentLinkedQueue<byte[]>(); dataOffsetHolder = new long[1]; }
public AdtsExtractor(boolean shouldSpliceIn, long firstSampleTimestamp, BufferPool bufferPool) { super(shouldSpliceIn); this.firstSampleTimestamp = firstSampleTimestamp; packetBuffer = new ParsableByteArray(MAX_PACKET_SIZE); adtsReader = new AdtsReader(bufferPool); firstPacket = true; }
public TsExtractor(boolean shouldSpliceIn, long firstSampleTimestamp, BufferPool bufferPool) { super(shouldSpliceIn); this.firstSampleTimestamp = firstSampleTimestamp; this.bufferPool = bufferPool; tsScratch = new ParsableBitArray(new byte[3]); tsPacketBuffer = new ParsableByteArray(TS_PACKET_SIZE); sampleQueues = new SparseArray<SampleQueue>(); tsPayloadReaders = new SparseArray<TsPayloadReader>(); tsPayloadReaders.put(TS_PAT_PID, new PatReader()); lastPts = Long.MIN_VALUE; }
public TsExtractor(boolean shouldSpliceIn, long firstSampleTimestamp, BufferPool bufferPool) { this.firstSampleTimestamp = firstSampleTimestamp; this.bufferPool = bufferPool; tsScratch = new ParsableBitArray(new byte[3]); tsPacketBuffer = new ParsableByteArray(TS_PACKET_SIZE); sampleQueues = new SparseArray<SampleQueue>(); tsPayloadReaders = new SparseArray<TsPayloadReader>(); tsPayloadReaders.put(TS_PAT_PID, new PatReader()); lastPts = Long.MIN_VALUE; tsStreamTypes = new SparseArray<Integer>(); }
@Override public void onManifest(String contentId, SmoothStreamingManifest manifest) { Handler mainHandler = playerActivity.getMainHandler(); LoadControl loadControl = new DefaultLoadControl(new BufferPool(BUFFER_SEGMENT_SIZE)); DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(); // Obtain stream elements for playback. int maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize(); int audioStreamElementIndex = -1; int videoStreamElementIndex = -1; ArrayList<Integer> videoTrackIndexList = new ArrayList<Integer>(); for (int i = 0; i < manifest.streamElements.length; i++) { if (audioStreamElementIndex == -1 && manifest.streamElements[i].type == StreamElement.TYPE_AUDIO) { audioStreamElementIndex = i; } else if (videoStreamElementIndex == -1 && manifest.streamElements[i].type == StreamElement.TYPE_VIDEO) { videoStreamElementIndex = i; StreamElement streamElement = manifest.streamElements[i]; for (int j = 0; j < streamElement.tracks.length; j++) { TrackElement trackElement = streamElement.tracks[j]; if (trackElement.maxWidth * trackElement.maxHeight <= maxDecodableFrameSize) { videoTrackIndexList.add(j); } else { // The device isn't capable of playing this stream. } } } } int[] videoTrackIndices = Util.toArray(videoTrackIndexList); // Build the video renderer. DataSource videoDataSource = new UriDataSource(userAgent, bandwidthMeter); ChunkSource videoChunkSource = new SmoothStreamingChunkSource(manifestFetcher, videoStreamElementIndex, videoTrackIndices, videoDataSource, new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS); ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl, VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true); MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 0, mainHandler, playerActivity, 50); // Build the audio renderer. DataSource audioDataSource = new UriDataSource(userAgent, bandwidthMeter); ChunkSource audioChunkSource = new SmoothStreamingChunkSource(manifestFetcher, audioStreamElementIndex, new int[] {0}, audioDataSource, new FormatEvaluator.FixedEvaluator(), LIVE_EDGE_LATENCY_MS); SampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl, AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true); MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer( audioSampleSource); callback.onRenderers(videoRenderer, audioRenderer); }
public SeiReader(BufferPool bufferPool) { super(bufferPool); setMediaFormat(MediaFormat.createEia608Format()); seiBuffer = new ParsableByteArray(); }
public AdtsReader(BufferPool bufferPool) { super(bufferPool); adtsScratch = new ParsableBitArray(new byte[HEADER_SIZE + CRC_SIZE]); state = STATE_FINDING_SYNC; }
protected ElementaryStreamReader(BufferPool bufferPool) { super(bufferPool); }
public Id3Reader(BufferPool bufferPool) { super(bufferPool); setMediaFormat(MediaFormat.createId3Format()); }
/** * @param dataSource A {@link DataSource} suitable for loading the media data. * @param playlistUrl The playlist URL. * @param playlist The hls playlist. * @param bandwidthMeter provides an estimate of the currently available bandwidth. * @param variantIndices A subset of variant indices to consider, or null to consider all of the * variants in the master playlist. * @param adaptiveMode The mode for switching from one variant to another. One of * {@link #ADAPTIVE_MODE_NONE}, {@link #ADAPTIVE_MODE_ABRUPT} and * {@link #ADAPTIVE_MODE_SPLICE}. * @param targetBufferSize The targeted buffer size in bytes. The buffer will not be filled more * than one chunk beyond this amount of data. * @param targetBufferDurationMs The targeted duration of media to buffer ahead of the current * playback position. The buffer will not be filled more than one chunk beyond this position. * @param minBufferDurationToSwitchUpMs The minimum duration of media that needs to be buffered * for a switch to a higher quality variant to be considered. * @param maxBufferDurationToSwitchDownMs The maximum duration of media that needs to be buffered * for a switch to a lower quality variant to be considered. */ public HlsChunkSource(DataSource dataSource, String playlistUrl, HlsPlaylist playlist, BandwidthMeter bandwidthMeter, int[] variantIndices, int adaptiveMode, int targetBufferSize, long targetBufferDurationMs, long minBufferDurationToSwitchUpMs, long maxBufferDurationToSwitchDownMs) { this.upstreamDataSource = dataSource; this.bandwidthMeter = bandwidthMeter; this.adaptiveMode = adaptiveMode; this.targetBufferSize = targetBufferSize; targetBufferDurationUs = targetBufferDurationMs * 1000; minBufferDurationToSwitchUpUs = minBufferDurationToSwitchUpMs * 1000; maxBufferDurationToSwitchDownUs = maxBufferDurationToSwitchDownMs * 1000; baseUri = playlist.baseUri; playlistParser = new HlsPlaylistParser(); bufferPool = new BufferPool(256 * 1024); if (playlist.type == HlsPlaylist.TYPE_MEDIA) { enabledVariants = new Variant[] {new Variant(0, playlistUrl, 0, null, -1, -1)}; mediaPlaylists = new HlsMediaPlaylist[1]; mediaPlaylistBlacklistFlags = new boolean[1]; lastMediaPlaylistLoadTimesMs = new long[1]; setMediaPlaylist(0, (HlsMediaPlaylist) playlist); } else { Assertions.checkState(playlist.type == HlsPlaylist.TYPE_MASTER); enabledVariants = filterVariants((HlsMasterPlaylist) playlist, variantIndices); mediaPlaylists = new HlsMediaPlaylist[enabledVariants.length]; mediaPlaylistBlacklistFlags = new boolean[enabledVariants.length]; lastMediaPlaylistLoadTimesMs = new long[enabledVariants.length]; } int maxWidth = -1; int maxHeight = -1; // Select the first variant from the master playlist that's enabled. long minOriginalVariantIndex = Integer.MAX_VALUE; for (int i = 0; i < enabledVariants.length; i++) { if (enabledVariants[i].index < minOriginalVariantIndex) { minOriginalVariantIndex = enabledVariants[i].index; variantIndex = i; } maxWidth = Math.max(enabledVariants[i].width, maxWidth); maxHeight = Math.max(enabledVariants[i].height, maxHeight); } // TODO: We should allow the default values to be passed through the constructor. this.maxWidth = maxWidth > 0 ? maxWidth : 1920; this.maxHeight = maxHeight > 0 ? maxHeight : 1080; }
public AC3ReaderATSC(BufferPool bufferPool) { super(bufferPool); bitparser = new ParsableBitArray(new byte[5]); }
public DVBTxtReader(BufferPool bufferPool) { super(bufferPool); }
public Mpeg2vReader(BufferPool bufferPool) { super(bufferPool); setMediaFormat(MediaFormat.createMp2vFormat()); }
public MP2Reader(BufferPool bufferPool) { super(bufferPool); }