public void testPrepareThreeTracks() throws IOException, InterruptedException { byte[] data = new StreamBuilder() .setHeader(WEBM_DOC_TYPE) .setInfo(DEFAULT_TIMECODE_SCALE, TEST_DURATION_TIMECODE) .addVp9Track(VIDEO_TRACK_NUMBER, TEST_WIDTH, TEST_HEIGHT, null) .addUnsupportedTrack(UNSUPPORTED_TRACK_NUMBER) .addOpusTrack(AUDIO_TRACK_NUMBER, TEST_CHANNEL_COUNT, TEST_SAMPLE_RATE, TEST_CODEC_DELAY, TEST_SEEK_PRE_ROLL, TEST_OPUS_CODEC_PRIVATE) .build(1); TestUtil.consumeTestData(extractor, data); assertTracksEnded(); // Even though the input stream has 3 tracks, only 2 of them are supported and will be reported. assertEquals(2, extractorOutput.numberOfTracks); assertVp9VideoFormat(VIDEO_TRACK_NUMBER, DEFAULT_TIMECODE_SCALE); assertAudioFormat(AUDIO_TRACK_NUMBER, DEFAULT_TIMECODE_SCALE, MimeTypes.AUDIO_OPUS); assertIndex(DEFAULT_TIMECODE_SCALE, 1); }
public void testReadSampleWithFixedSizeLacing() throws IOException, InterruptedException { byte[] media = createFrameData(100); byte[] data = new StreamBuilder() .setHeader(WEBM_DOC_TYPE) .setInfo(DEFAULT_TIMECODE_SCALE, TEST_DURATION_TIMECODE) .addOpusTrack(AUDIO_TRACK_NUMBER, TEST_CHANNEL_COUNT, TEST_SAMPLE_RATE, TEST_CODEC_DELAY, TEST_SEEK_PRE_ROLL, TEST_OPUS_CODEC_PRIVATE, TEST_DEFAULT_DURATION_NS) .addSimpleBlockMediaWithFixedSizeLacing(2 /* trackNumber */, 0 /* clusterTimecode */, 0 /* blockTimecode */, 20 /* lacingFrameCount */, media) .build(1); TestUtil.consumeTestData(extractor, data); assertTracksEnded(); assertAudioFormat(AUDIO_TRACK_NUMBER, DEFAULT_TIMECODE_SCALE, MimeTypes.AUDIO_OPUS); for (int i = 0; i < 20; i++) { long expectedTimeUs = i * TEST_DEFAULT_DURATION_NS / 1000; assertSample(i, Arrays.copyOfRange(media, i * 5, i * 5 + 5), expectedTimeUs, true, false, null, getTrackOutput(AUDIO_TRACK_NUMBER)); } }
private void onMoovContainerAtomRead(ContainerAtom moov) { List<Atom.LeafAtom> moovChildren = moov.leafChildren; int moovChildrenSize = moovChildren.size(); DrmInitData.Mapped drmInitData = null; for (int i = 0; i < moovChildrenSize; i++) { LeafAtom child = moovChildren.get(i); if (child.type == Atom.TYPE_pssh) { if (drmInitData == null) { drmInitData = new DrmInitData.Mapped(MimeTypes.VIDEO_MP4); } byte[] psshData = child.data.data; drmInitData.put(PsshAtomUtil.parseUuid(psshData), psshData); } } if (drmInitData != null) { extractorOutput.drmInitData(drmInitData); } ContainerAtom mvex = moov.getContainerAtomOfType(Atom.TYPE_mvex); extendsDefaults = parseTrex(mvex.getLeafAtomOfType(Atom.TYPE_trex).data); track = AtomParsers.parseTrak(moov.getContainerAtomOfType(Atom.TYPE_trak), moov.getLeafAtomOfType(Atom.TYPE_mvhd)); Assertions.checkState(track != null); trackOutput.format(track.mediaFormat); }
public void testParsesValidMp4vFile() throws Exception { TestUtil.consumeTestData(extractor, getTestInputData(true /* includeStss */, true /* mp4vFormat */)); // The seek map is correct. assertSeekMap(extractorOutput.seekMap, true); // The video and audio formats are set correctly. assertEquals(2, extractorOutput.trackOutputs.size()); MediaFormat videoFormat = extractorOutput.trackOutputs.get(0).format; MediaFormat audioFormat = extractorOutput.trackOutputs.get(1).format; assertEquals(MimeTypes.VIDEO_MP4V, videoFormat.mimeType); assertEquals(VIDEO_MP4V_WIDTH, videoFormat.width); assertEquals(VIDEO_MP4V_HEIGHT, videoFormat.height); assertEquals(MimeTypes.AUDIO_AAC, audioFormat.mimeType); // The timestamps and sizes are set correctly. FakeTrackOutput videoTrackOutput = extractorOutput.trackOutputs.get(0); videoTrackOutput.assertSampleCount(SAMPLE_TIMESTAMPS.length); for (int i = 0; i < SAMPLE_TIMESTAMPS.length; i++) { byte[] sampleData = getOutputSampleData(i, false); int sampleFlags = SAMPLE_IS_SYNC[i] ? C.SAMPLE_FLAG_SYNC : 0; long sampleTimestampUs = getVideoTimestampUs(SAMPLE_TIMESTAMPS[i]); videoTrackOutput.assertSample(i, sampleData, sampleTimestampUs, sampleFlags, null); } }
public void testPrepareContentEncodingEncryption() throws IOException, InterruptedException { ContentEncodingSettings settings = new StreamBuilder.ContentEncodingSettings(0, 1, 5, 1); byte[] data = new StreamBuilder() .setHeader(WEBM_DOC_TYPE) .setInfo(DEFAULT_TIMECODE_SCALE, TEST_DURATION_TIMECODE) .addVp9Track(VIDEO_TRACK_NUMBER, TEST_WIDTH, TEST_HEIGHT, settings) .build(1); TestUtil.consumeTestData(extractor, data); assertTracksEnded(); assertVp9VideoFormat(VIDEO_TRACK_NUMBER, DEFAULT_TIMECODE_SCALE); assertIndex(DEFAULT_TIMECODE_SCALE, 1); DrmInitData drmInitData = extractorOutput.drmInitData; assertNotNull(drmInitData); SchemeInitData widevineInitData = drmInitData.get(WIDEVINE_UUID); assertEquals(MimeTypes.VIDEO_WEBM, widevineInitData.mimeType); android.test.MoreAsserts.assertEquals(TEST_ENCRYPTION_KEY_ID, widevineInitData.data); SchemeInitData zeroInitData = drmInitData.get(ZERO_UUID); assertEquals(MimeTypes.VIDEO_WEBM, zeroInitData.mimeType); android.test.MoreAsserts.assertEquals(TEST_ENCRYPTION_KEY_ID, zeroInitData.data); }
public void testPrepareTwoTracks() throws IOException, InterruptedException { byte[] data = new StreamBuilder() .setHeader(WEBM_DOC_TYPE) .setInfo(DEFAULT_TIMECODE_SCALE, TEST_DURATION_TIMECODE) .addVp9Track(VIDEO_TRACK_NUMBER, TEST_WIDTH, TEST_HEIGHT, null) .addOpusTrack(AUDIO_TRACK_NUMBER, TEST_CHANNEL_COUNT, TEST_SAMPLE_RATE, TEST_CODEC_DELAY, TEST_SEEK_PRE_ROLL, TEST_OPUS_CODEC_PRIVATE) .build(1); TestUtil.consumeTestData(extractor, data); assertTracksEnded(); assertEquals(2, extractorOutput.numberOfTracks); assertVp9VideoFormat(VIDEO_TRACK_NUMBER, DEFAULT_TIMECODE_SCALE); assertAudioFormat(AUDIO_TRACK_NUMBER, DEFAULT_TIMECODE_SCALE, MimeTypes.AUDIO_OPUS); assertIndex(DEFAULT_TIMECODE_SCALE, 1); }
private static DrmInitData getDrmInitData(MediaPresentationDescription manifest, int adaptationSetIndex) { AdaptationSet adaptationSet = manifest.periods.get(0).adaptationSets.get(adaptationSetIndex); String drmInitMimeType = mimeTypeIsWebm(adaptationSet.representations.get(0).format.mimeType) ? MimeTypes.VIDEO_WEBM : MimeTypes.VIDEO_MP4; if (adaptationSet.contentProtections.isEmpty()) { return null; } else { DrmInitData.Mapped drmInitData = null; for (ContentProtection contentProtection : adaptationSet.contentProtections) { if (contentProtection.uuid != null && contentProtection.data != null) { if (drmInitData == null) { drmInitData = new DrmInitData.Mapped(drmInitMimeType); } drmInitData.put(contentProtection.uuid, contentProtection.data); } } return drmInitData; } }
/** * @param profile An AVC profile constant from {@link CodecProfileLevel}. * @param level An AVC profile level from {@link CodecProfileLevel}. * @return Whether the specified profile is supported at the specified level. */ public static boolean isH264ProfileSupported(int profile, int level) throws DecoderQueryException { Pair<String, CodecCapabilities> info = getMediaCodecInfo(MimeTypes.VIDEO_H264, false); if (info == null) { return false; } CodecCapabilities capabilities = info.second; for (int i = 0; i < capabilities.profileLevels.length; i++) { CodecProfileLevel profileLevel = capabilities.profileLevels[i]; if (profileLevel.profile == profile && profileLevel.level >= level) { return true; } } return false; }
private static String buildTrackName(MediaFormat format) { if (format.adaptive) { return "auto"; } String trackName; if (MimeTypes.isVideo(format.mimeType)) { trackName = joinWithSeparator(joinWithSeparator(buildResolutionString(format), buildBitrateString(format)), buildTrackIdString(format)); } else if (MimeTypes.isAudio(format.mimeType)) { trackName = joinWithSeparator(joinWithSeparator(joinWithSeparator(buildLanguageString(format), buildAudioPropertyString(format)), buildBitrateString(format)), buildTrackIdString(format)); } else { trackName = joinWithSeparator(joinWithSeparator(buildLanguageString(format), buildBitrateString(format)), buildTrackIdString(format)); } return trackName.length() == 0 ? "unknown" : trackName; }
public void testReadTwoTrackSamplesWithSkippedTrack() throws IOException, InterruptedException { byte[] media = createFrameData(100); byte[] data = new StreamBuilder() .setHeader(WEBM_DOC_TYPE) .setInfo(DEFAULT_TIMECODE_SCALE, TEST_DURATION_TIMECODE) .addUnsupportedTrack(UNSUPPORTED_TRACK_NUMBER) .addVp9Track(VIDEO_TRACK_NUMBER, TEST_WIDTH, TEST_HEIGHT, null) .addOpusTrack(AUDIO_TRACK_NUMBER, TEST_CHANNEL_COUNT, TEST_SAMPLE_RATE, TEST_CODEC_DELAY, TEST_SEEK_PRE_ROLL, TEST_OPUS_CODEC_PRIVATE) .addSimpleBlockMedia(1 /* trackNumber */, 0 /* clusterTimecode */, 0 /* blockTimecode */, true /* keyframe */, false /* invisible */, media) .addSimpleBlockMedia(2 /* trackNumber */, 0 /* clusterTimecode */, 0 /* blockTimecode */, true /* keyframe */, false /* invisible */, media) .addSimpleBlockMedia(17 /* trackNumber */, 0 /* clusterTimecode */, 0 /* blockTimecode */, true /* keyframe */, false /* invisible */, media) .build(1); TestUtil.consumeTestData(extractor, data); assertTracksEnded(); assertEquals(2, extractorOutput.numberOfTracks); assertVp9VideoFormat(VIDEO_TRACK_NUMBER, DEFAULT_TIMECODE_SCALE); assertAudioFormat(AUDIO_TRACK_NUMBER, DEFAULT_TIMECODE_SCALE, MimeTypes.AUDIO_OPUS); assertSample(0, media, 0, true, false, null, getTrackOutput(VIDEO_TRACK_NUMBER)); assertSample(0, media, 0, true, false, null, getTrackOutput(AUDIO_TRACK_NUMBER)); }
/** * Whether the extractor is prepared. * * @return True if the extractor is prepared. False otherwise. */ public boolean isPrepared() { if (!prepared && tracksBuilt) { for (int i = 0; i < sampleQueues.size(); i++) { if (!sampleQueues.valueAt(i).hasFormat()) { return false; } } prepared = true; sampleQueueFormats = new MediaFormat[sampleQueues.size()]; for (int i = 0; i < sampleQueueFormats.length; i++) { MediaFormat format = sampleQueues.valueAt(i).getFormat(); if (MimeTypes.isVideo(format.mimeType) && (adaptiveMaxWidth != MediaFormat.NO_VALUE || adaptiveMaxHeight != MediaFormat.NO_VALUE)) { format = format.copyWithMaxVideoDimensions(adaptiveMaxWidth, adaptiveMaxHeight); } sampleQueueFormats[i] = format; } } return prepared; }
public void testParsesValidMp4File() throws Exception { TestUtil.consumeTestData(extractor, getTestInputData(true /* includeStss */, false /* mp4vFormat */)); // The seek map is correct. assertSeekMap(extractorOutput.seekMap, true); // The video and audio formats are set correctly. assertEquals(2, extractorOutput.trackOutputs.size()); MediaFormat videoFormat = extractorOutput.trackOutputs.get(0).format; MediaFormat audioFormat = extractorOutput.trackOutputs.get(1).format; assertEquals(MimeTypes.VIDEO_H264, videoFormat.mimeType); assertEquals(VIDEO_WIDTH, videoFormat.width); assertEquals(VIDEO_HEIGHT, videoFormat.height); assertEquals(MimeTypes.AUDIO_AAC, audioFormat.mimeType); // The timestamps and sizes are set correctly. FakeTrackOutput videoTrackOutput = extractorOutput.trackOutputs.get(0); videoTrackOutput.assertSampleCount(SAMPLE_TIMESTAMPS.length); for (int i = 0; i < SAMPLE_TIMESTAMPS.length; i++) { byte[] sampleData = getOutputSampleData(i, true); int sampleFlags = SAMPLE_IS_SYNC[i] ? C.SAMPLE_FLAG_SYNC : 0; long sampleTimestampUs = getVideoTimestampUs(SAMPLE_TIMESTAMPS[i]); videoTrackOutput.assertSample(i, sampleData, sampleTimestampUs, sampleFlags, null); } }
@Override protected void parsePayload(ParsableByteArray data, long timeUs) { int packetType = data.readUnsignedByte(); // Parse sequence header just in case it was not done before. if (packetType == AAC_PACKET_TYPE_SEQUENCE_HEADER && !hasOutputFormat) { byte[] audioSpecifiConfig = new byte[data.bytesLeft()]; data.readBytes(audioSpecifiConfig, 0, audioSpecifiConfig.length); Pair<Integer, Integer> audioParams = CodecSpecificDataUtil.parseAacAudioSpecificConfig( audioSpecifiConfig); MediaFormat mediaFormat = MediaFormat.createAudioFormat(null, MimeTypes.AUDIO_AAC, MediaFormat.NO_VALUE, MediaFormat.NO_VALUE, getDurationUs(), audioParams.second, audioParams.first, Collections.singletonList(audioSpecifiConfig), null); output.format(mediaFormat); hasOutputFormat = true; } else if (packetType == AAC_PACKET_TYPE_AAC_RAW) { // Sample audio AAC frames int bytesToWrite = data.bytesLeft(); output.sampleData(data, bytesToWrite); output.sampleMetadata(timeUs, C.SAMPLE_FLAG_SYNC, bytesToWrite, 0, null); } }
private void assertAudioFormat(int trackNumber, int timecodeScale, String expectedMimeType) { MediaFormat format = getTrackOutput(trackNumber).format; assertEquals(Util.scaleLargeTimestamp(TEST_DURATION_TIMECODE, timecodeScale, 1000), format.durationUs); assertEquals(TEST_CHANNEL_COUNT, format.channelCount); assertEquals(TEST_SAMPLE_RATE, format.sampleRate); assertEquals(expectedMimeType, format.mimeType); if (MimeTypes.AUDIO_OPUS.equals(expectedMimeType)) { assertEquals(3, format.initializationData.size()); android.test.MoreAsserts.assertEquals(TEST_OPUS_CODEC_PRIVATE, format.initializationData.get(0)); assertEquals(TEST_CODEC_DELAY, ByteBuffer.wrap(format.initializationData.get(1)) .order(ByteOrder.nativeOrder()).getLong()); assertEquals(TEST_SEEK_PRE_ROLL, ByteBuffer.wrap(format.initializationData.get(2)) .order(ByteOrder.nativeOrder()).getLong()); } else if (MimeTypes.AUDIO_VORBIS.equals(expectedMimeType)) { assertEquals(2, format.initializationData.size()); assertEquals(TEST_VORBIS_INFO_SIZE, format.initializationData.get(0).length); assertEquals(TEST_VORBIS_BOOKS_SIZE, format.initializationData.get(1).length); } }
public void testPrepareFourTracks() throws IOException, InterruptedException { byte[] data = new StreamBuilder() .setHeader(WEBM_DOC_TYPE) .setInfo(DEFAULT_TIMECODE_SCALE, TEST_DURATION_TIMECODE) .addVp9Track(VIDEO_TRACK_NUMBER, TEST_WIDTH, TEST_HEIGHT, null) .addVorbisTrack(AUDIO_TRACK_NUMBER, TEST_CHANNEL_COUNT, TEST_SAMPLE_RATE, getVorbisCodecPrivate()) .addVp9Track(SECOND_VIDEO_TRACK_NUMBER, TEST_WIDTH, TEST_HEIGHT, null) .addOpusTrack(SECOND_AUDIO_TRACK_NUMBER, TEST_CHANNEL_COUNT, TEST_SAMPLE_RATE, TEST_CODEC_DELAY, TEST_SEEK_PRE_ROLL, TEST_OPUS_CODEC_PRIVATE) .build(1); TestUtil.consumeTestData(extractor, data); assertTracksEnded(); assertEquals(4, extractorOutput.numberOfTracks); assertVp9VideoFormat(VIDEO_TRACK_NUMBER, DEFAULT_TIMECODE_SCALE); assertAudioFormat(AUDIO_TRACK_NUMBER, DEFAULT_TIMECODE_SCALE, MimeTypes.AUDIO_VORBIS); assertVp9VideoFormat(SECOND_VIDEO_TRACK_NUMBER, DEFAULT_TIMECODE_SCALE); assertAudioFormat(SECOND_AUDIO_TRACK_NUMBER, DEFAULT_TIMECODE_SCALE, MimeTypes.AUDIO_OPUS); assertIndex(DEFAULT_TIMECODE_SCALE, 1); }
protected int getContentType(Representation representation) { String mimeType = representation.format.mimeType; if (TextUtils.isEmpty(mimeType)) { return AdaptationSet.TYPE_UNKNOWN; } else if (MimeTypes.isVideo(mimeType)) { return AdaptationSet.TYPE_VIDEO; } else if (MimeTypes.isAudio(mimeType)) { return AdaptationSet.TYPE_AUDIO; } else if (MimeTypes.isText(mimeType) || MimeTypes.APPLICATION_TTML.equals(mimeType)) { return AdaptationSet.TYPE_TEXT; } else if (MimeTypes.APPLICATION_MP4.equals(mimeType)) { // The representation uses mp4 but does not contain video or audio. Use codecs to determine // whether the container holds text. String codecs = representation.format.codecs; if ("stpp".equals(codecs) || "wvtt".equals(codecs)) { return AdaptationSet.TYPE_TEXT; } } return AdaptationSet.TYPE_UNKNOWN; }
/** * Parses a {@link ContentProtection} element. * * @throws XmlPullParserException If an error occurs parsing the element. * @throws IOException If an error occurs reading the element. * @return The parsed {@link ContentProtection} element, or null if the element is unsupported. **/ protected ContentProtection parseContentProtection(XmlPullParser xpp) throws XmlPullParserException, IOException { String schemeIdUri = xpp.getAttributeValue(null, "schemeIdUri"); UUID uuid = null; SchemeInitData data = null; boolean seenPsshElement = false; do { xpp.next(); // The cenc:pssh element is defined in 23001-7:2015. if (ParserUtil.isStartTag(xpp, "cenc:pssh") && xpp.next() == XmlPullParser.TEXT) { seenPsshElement = true; data = new SchemeInitData(MimeTypes.VIDEO_MP4, Base64.decode(xpp.getText(), Base64.DEFAULT)); uuid = PsshAtomUtil.parseUuid(data.data); } } while (!ParserUtil.isEndTag(xpp, "ContentProtection")); if (seenPsshElement && uuid == null) { Log.w(TAG, "Skipped unsupported ContentProtection element"); return null; } return buildContentProtection(schemeIdUri, uuid, data); }
public void testReadTwoTrackSamples() throws IOException, InterruptedException { byte[] media = createFrameData(100); byte[] data = new StreamBuilder() .setHeader(WEBM_DOC_TYPE) .setInfo(DEFAULT_TIMECODE_SCALE, TEST_DURATION_TIMECODE) .addVp9Track(VIDEO_TRACK_NUMBER, TEST_WIDTH, TEST_HEIGHT, null) .addOpusTrack(AUDIO_TRACK_NUMBER, TEST_CHANNEL_COUNT, TEST_SAMPLE_RATE, TEST_CODEC_DELAY, TEST_SEEK_PRE_ROLL, TEST_OPUS_CODEC_PRIVATE) .addSimpleBlockMedia(1 /* trackNumber */, 0 /* clusterTimecode */, 0 /* blockTimecode */, true /* keyframe */, false /* invisible */, media) .addSimpleBlockMedia(2 /* trackNumber */, 0 /* clusterTimecode */, 0 /* blockTimecode */, true /* keyframe */, false /* invisible */, media) .build(1); TestUtil.consumeTestData(extractor, data); assertTracksEnded(); assertEquals(2, extractorOutput.numberOfTracks); assertVp9VideoFormat(VIDEO_TRACK_NUMBER, DEFAULT_TIMECODE_SCALE); assertAudioFormat(AUDIO_TRACK_NUMBER, DEFAULT_TIMECODE_SCALE, MimeTypes.AUDIO_OPUS); assertSample(0, media, 0, true, false, null, getTrackOutput(VIDEO_TRACK_NUMBER)); assertSample(0, media, 0, true, false, null, getTrackOutput(AUDIO_TRACK_NUMBER)); }
public void testReadSampleWithXiphLacing() throws IOException, InterruptedException { byte[] media = createFrameData(300); byte[] data = new StreamBuilder() .setHeader(WEBM_DOC_TYPE) .setInfo(DEFAULT_TIMECODE_SCALE, TEST_DURATION_TIMECODE) .addOpusTrack(AUDIO_TRACK_NUMBER, TEST_CHANNEL_COUNT, TEST_SAMPLE_RATE, TEST_CODEC_DELAY, TEST_SEEK_PRE_ROLL, TEST_OPUS_CODEC_PRIVATE, TEST_DEFAULT_DURATION_NS) .addSimpleBlockMediaWithXiphLacing(2 /* trackNumber */, 0 /* clusterTimecode */, 0 /* blockTimecode */, media, 256, 1, 243) .build(1); TestUtil.consumeTestData(extractor, data); assertTracksEnded(); assertAudioFormat(AUDIO_TRACK_NUMBER, DEFAULT_TIMECODE_SCALE, MimeTypes.AUDIO_OPUS); assertSample(0, Arrays.copyOfRange(media, 0, 256), 0 * TEST_DEFAULT_DURATION_NS / 1000, true, false, null, getTrackOutput(AUDIO_TRACK_NUMBER)); assertSample(1, Arrays.copyOfRange(media, 256, 257), 1 * TEST_DEFAULT_DURATION_NS / 1000, true, false, null, getTrackOutput(AUDIO_TRACK_NUMBER)); assertSample(2, Arrays.copyOfRange(media, 257, 300), 2 * TEST_DEFAULT_DURATION_NS / 1000, true, false, null, getTrackOutput(AUDIO_TRACK_NUMBER)); }
/** * @return the maximum frame size for an H264 stream that can be decoded on the device. */ public static int maxH264DecodableFrameSize() throws DecoderQueryException { if (maxH264DecodableFrameSize == -1) { int result = 0; DecoderInfo decoderInfo = getDecoderInfo(MimeTypes.VIDEO_H264, false); if (decoderInfo != null) { for (CodecProfileLevel profileLevel : decoderInfo.capabilities.profileLevels) { result = Math.max(avcLevelToMaxFrameSize(profileLevel.level), result); } // We assume support for at least 360p. result = Math.max(result, 480 * 360); } maxH264DecodableFrameSize = result; } return maxH264DecodableFrameSize; }
private static String fourCCToMimeType(String fourCC) { if (fourCC.equalsIgnoreCase("H264") || fourCC.equalsIgnoreCase("X264") || fourCC.equalsIgnoreCase("AVC1") || fourCC.equalsIgnoreCase("DAVC")) { return MimeTypes.VIDEO_H264; } else if (fourCC.equalsIgnoreCase("AAC") || fourCC.equalsIgnoreCase("AACL") || fourCC.equalsIgnoreCase("AACH") || fourCC.equalsIgnoreCase("AACP")) { return MimeTypes.AUDIO_AAC; } else if (fourCC.equalsIgnoreCase("TTML")) { return MimeTypes.APPLICATION_TTML; } else if (fourCC.equalsIgnoreCase("ac-3") || fourCC.equalsIgnoreCase("dac3")) { return MimeTypes.AUDIO_AC3; } else if (fourCC.equalsIgnoreCase("ec-3") || fourCC.equalsIgnoreCase("dec3")) { return MimeTypes.AUDIO_E_AC3; } else if (fourCC.equalsIgnoreCase("dtsc")) { return MimeTypes.AUDIO_DTS; } else if (fourCC.equalsIgnoreCase("dtsh") || fourCC.equalsIgnoreCase("dtsl")) { return MimeTypes.AUDIO_DTS_HD; } else if (fourCC.equalsIgnoreCase("dtse")) { return MimeTypes.AUDIO_DTS_EXPRESS; } else if (fourCC.equalsIgnoreCase("opus")) { return MimeTypes.AUDIO_OPUS; } return null; }
@Override public void setQuality(View v) { PopupMenu popup = new PopupMenu(activity, v); popup.setOnMenuItemClickListener(new PopupMenu.OnMenuItemClickListener() { @Override public boolean onMenuItemClick(MenuItem item) { player.setSelectedTrack(0, (item.getItemId() - 1)); return false; } }); ArrayList<Integer> formats = new ArrayList<>(); Menu menu = popup.getMenu(); menu.add(Menu.NONE, 0, 0, "Bitrate"); for (int i = 0; i < player.getTrackCount(0); i++) { MediaFormat format = player.getTrackFormat(0, i); if (MimeTypes.isVideo(format.mimeType)) { Log.e("dsa", format.bitrate + ""); if (format.adaptive) { menu.add(1, (i + 1), (i + 1), "Auto"); } else { if (!formats.contains(format.bitrate)) { menu.add(1, (i + 1), (i + 1), (format.bitrate) / 1000 + " kbps"); formats.add(format.bitrate); } } } } menu.setGroupCheckable(1, true, true); menu.findItem((player.getSelectedTrack(0) + 1)).setChecked(true); popup.show(); }
@Override public boolean prepare() throws IOException { if(!mSampleExtractor.prepare()) { return false; } List<MediaFormat> formats = mSampleExtractor.getTrackFormats(); int trackCount = formats.size(); mTrackFormats.clear(); mReachedEos.clear(); for (int i = 0; i < trackCount; ++i) { mTrackFormats.add(formats.get(i)); mReachedEos.add(false); String mime = formats.get(i).mimeType; if (MimeTypes.isVideo(mime) && mVideoTrackIndex == -1) { mVideoTrackIndex = i; if (android.media.MediaFormat.MIMETYPE_VIDEO_MPEG2.equals(mime)) { mCcParser = new Mpeg2CcParser(); } else if (android.media.MediaFormat.MIMETYPE_VIDEO_AVC.equals(mime)) { mCcParser = new H264CcParser(); } } } if (mVideoTrackIndex != -1) { mCea708TextTrackIndex = trackCount; } if (mCea708TextTrackIndex >= 0) { mTrackFormats.add(MediaFormat.createTextFormat(null, MIMETYPE_TEXT_CEA_708, 0, mTrackFormats.get(0).durationUs, "")); } return true; }
/** * Finishes I/O operations and releases all the resources. * @throws IOException */ public void release() throws IOException { if (mIoHandler == null) { return; } // Finishes all I/O operations. ConditionVariable conditionVariable = new ConditionVariable(); mIoHandler.sendMessage(mIoHandler.obtainMessage(MSG_RELEASE, conditionVariable)); conditionVariable.block(); for (int i = 0; i < mTrackCount; ++i) { mBufferManager.unregisterChunkEvictedListener(mIds.get(i)); } try { if (mBufferReason == RecordingSampleBuffer.BUFFER_REASON_RECORDING && mTrackCount > 0) { // Saves meta information for recording. Pair<String, android.media.MediaFormat> audio = null, video = null; for (int i = 0; i < mTrackCount; ++i) { android.media.MediaFormat format = mMediaFormats.get(i).getFrameworkMediaFormatV16(); format.setLong(android.media.MediaFormat.KEY_DURATION, mBufferDurationUs); if (audio == null && MimeTypes.isAudio(mMediaFormats.get(i).mimeType)) { audio = new Pair<>(mIds.get(i), format); } else if (video == null && MimeTypes.isVideo(mMediaFormats.get(i).mimeType)) { video = new Pair<>(mIds.get(i), format); } if (audio != null && video != null) { break; } } mBufferManager.writeMetaFiles(audio, video); } } finally { mBufferManager.release(); mIoHandler.getLooper().quitSafely(); } }
/** * Creates {@link MediaFormat} from {@link android.media.MediaFormat}. * Since {@link com.google.android.exoplayer.TrackRenderer} uses {@link MediaFormat}, * {@link android.media.MediaFormat} should be converted to be used with ExoPlayer. */ public static MediaFormat createMediaFormat(android.media.MediaFormat format) { String mimeType = format.getString(android.media.MediaFormat.KEY_MIME); String language = getOptionalStringV16(format, android.media.MediaFormat.KEY_LANGUAGE); int maxInputSize = getOptionalIntegerV16(format, android.media.MediaFormat.KEY_MAX_INPUT_SIZE); int width = getOptionalIntegerV16(format, android.media.MediaFormat.KEY_WIDTH); int height = getOptionalIntegerV16(format, android.media.MediaFormat.KEY_HEIGHT); int rotationDegrees = getOptionalIntegerV16(format, "rotation-degrees"); int channelCount = getOptionalIntegerV16(format, android.media.MediaFormat.KEY_CHANNEL_COUNT); int sampleRate = getOptionalIntegerV16(format, android.media.MediaFormat.KEY_SAMPLE_RATE); int encoderDelay = getOptionalIntegerV16(format, "encoder-delay"); int encoderPadding = getOptionalIntegerV16(format, "encoder-padding"); ArrayList<byte[]> initializationData = new ArrayList<>(); for (int i = 0; format.containsKey("csd-" + i); i++) { ByteBuffer buffer = format.getByteBuffer("csd-" + i); byte[] data = new byte[buffer.limit()]; buffer.get(data); initializationData.add(data); buffer.flip(); } long durationUs = format.containsKey(android.media.MediaFormat.KEY_DURATION) ? format.getLong(android.media.MediaFormat.KEY_DURATION) : C.UNKNOWN_TIME_US; int pcmEncoding = MimeTypes.AUDIO_RAW.equals(mimeType) ? C.ENCODING_PCM_16BIT : MediaFormat.NO_VALUE; MediaFormat mediaFormat = new MediaFormat(null, mimeType, MediaFormat.NO_VALUE, maxInputSize, durationUs, width, height, rotationDegrees, MediaFormat.NO_VALUE, channelCount, sampleRate, language, MediaFormat.OFFSET_SAMPLE_RELATIVE, initializationData, false, MediaFormat.NO_VALUE, MediaFormat.NO_VALUE, pcmEncoding, encoderDelay, encoderPadding, null, MediaFormat.NO_VALUE); mediaFormat.setFrameworkFormatV16(format); return mediaFormat; }
@Override public boolean isSecurePlaybackSupported(String mimeType, MediaCodecInfo.CodecCapabilities capabilities) { // Secure decoders weren't explicitly listed prior to API level 21. We assume that // a secure H264 decoder exists. return MimeTypes.VIDEO_H264.equals(mimeType); }
@Override protected boolean handlesTrack(MediaFormat mediaFormat) throws DecoderQueryException { // TODO: Use MediaCodecList.findDecoderForFormat on API 23. String mimeType = mediaFormat.mimeType; return MimeTypes.isVideo(mimeType) && (MimeTypes.VIDEO_UNKNOWN.equals(mimeType) || MediaCodecUtil.getDecoderInfo(mimeType, false) != null); }
@SuppressLint("InlinedApi") private void maybeSetMaxInputSize(android.media.MediaFormat format, boolean codecIsAdaptive) { if (!MimeTypes.VIDEO_H264.equals(format.getString(android.media.MediaFormat.KEY_MIME))) { // Only set a max input size for H264 for now. return; } if (format.containsKey(android.media.MediaFormat.KEY_MAX_INPUT_SIZE)) { // Already set. The source of the format may know better, so do nothing. return; } if ("BRAVIA 4K 2015".equals(Util.MODEL)) { // The Sony BRAVIA 4k TV has input buffers that are too small for the calculated 4k video // maximum input size, so use the default value. return; } int maxHeight = format.getInteger(android.media.MediaFormat.KEY_HEIGHT); if (codecIsAdaptive && format.containsKey(android.media.MediaFormat.KEY_MAX_HEIGHT)) { maxHeight = Math.max(maxHeight, format.getInteger(android.media.MediaFormat.KEY_MAX_HEIGHT)); } int maxWidth = format.getInteger(android.media.MediaFormat.KEY_WIDTH); if (codecIsAdaptive && format.containsKey(android.media.MediaFormat.KEY_MAX_WIDTH)) { maxWidth = Math.max(maxHeight, format.getInteger(android.media.MediaFormat.KEY_MAX_WIDTH)); } // H264 requires compression ratio of at least 2, and uses macroblocks. int maxInputSize = ((maxWidth + 15) / 16) * ((maxHeight + 15) / 16) * 192; format.setInteger(android.media.MediaFormat.KEY_MAX_INPUT_SIZE, maxInputSize); }
@TargetApi(18) private DrmInitData getDrmInitDataV18() { // MediaExtractor only supports psshInfo for MP4, so it's ok to hard code the mimeType here. Map<UUID, byte[]> psshInfo = extractor.getPsshInfo(); if (psshInfo == null || psshInfo.isEmpty()) { return null; } DrmInitData.Mapped drmInitData = new DrmInitData.Mapped(MimeTypes.VIDEO_MP4); for (UUID uuid : psshInfo.keySet()) { byte[] psshAtom = PsshAtomUtil.buildPsshAtom(uuid, psshInfo.get(uuid)); drmInitData.put(uuid, psshAtom); } return drmInitData; }
/** * Parses the sample header. */ private void parseHeader() { adtsScratch.setPosition(0); if (!hasOutputFormat) { int audioObjectType = adtsScratch.readBits(2) + 1; int sampleRateIndex = adtsScratch.readBits(4); adtsScratch.skipBits(1); int channelConfig = adtsScratch.readBits(3); byte[] audioSpecificConfig = CodecSpecificDataUtil.buildAacAudioSpecificConfig( audioObjectType, sampleRateIndex, channelConfig); Pair<Integer, Integer> audioParams = CodecSpecificDataUtil.parseAacAudioSpecificConfig( audioSpecificConfig); MediaFormat mediaFormat = MediaFormat.createAudioFormat(MimeTypes.AUDIO_AAC, MediaFormat.NO_VALUE, audioParams.second, audioParams.first, Collections.singletonList(audioSpecificConfig)); frameDurationUs = (C.MICROS_PER_SECOND * 1024L) / mediaFormat.sampleRate; output.format(mediaFormat); hasOutputFormat = true; } else { adtsScratch.skipBits(10); } adtsScratch.skipBits(4); sampleSize = adtsScratch.readBits(13) - 2 /* the sync word */ - HEADER_SIZE; if (hasCrc) { sampleSize -= CRC_SIZE; } }
private static StsdDataHolder parseStsd(ParsableByteArray stsd, long durationUs) { stsd.setPosition(Atom.FULL_HEADER_SIZE); int numberOfEntries = stsd.readInt(); StsdDataHolder holder = new StsdDataHolder(numberOfEntries); for (int i = 0; i < numberOfEntries; i++) { int childStartPosition = stsd.getPosition(); int childAtomSize = stsd.readInt(); Assertions.checkArgument(childAtomSize > 0, "childAtomSize should be positive"); int childAtomType = stsd.readInt(); if (childAtomType == Atom.TYPE_avc1 || childAtomType == Atom.TYPE_avc3 || childAtomType == Atom.TYPE_encv || childAtomType == Atom.TYPE_mp4v || childAtomType == Atom.TYPE_hvc1 || childAtomType == Atom.TYPE_hev1 || childAtomType == Atom.TYPE_s263) { parseVideoSampleEntry(stsd, childStartPosition, childAtomSize, durationUs, holder, i); } else if (childAtomType == Atom.TYPE_mp4a || childAtomType == Atom.TYPE_enca || childAtomType == Atom.TYPE_ac_3) { parseAudioSampleEntry(stsd, childAtomType, childStartPosition, childAtomSize, durationUs, holder, i); } else if (childAtomType == Atom.TYPE_TTML) { holder.mediaFormat = MediaFormat.createTextFormat(MimeTypes.APPLICATION_TTML, durationUs); } else if (childAtomType == Atom.TYPE_tx3g) { holder.mediaFormat = MediaFormat.createTextFormat(MimeTypes.APPLICATION_TX3G, durationUs); } stsd.setPosition(childStartPosition + childAtomSize); } return holder; }
private void assertVp9VideoFormat(int trackNumber, int timecodeScale) { MediaFormat format = getTrackOutput(trackNumber).format; assertEquals(Util.scaleLargeTimestamp(TEST_DURATION_TIMECODE, timecodeScale, 1000), format.durationUs); assertEquals(TEST_WIDTH, format.width); assertEquals(TEST_HEIGHT, format.height); assertEquals(MimeTypes.VIDEO_VP9, format.mimeType); }
@Override protected DecoderInfo getDecoderInfo(String mimeType, boolean requiresSecureDecoder) throws DecoderQueryException { if (MimeTypes.isPassthroughAudio(mimeType)) { return new DecoderInfo(RAW_DECODER_NAME, true); } return super.getDecoderInfo(mimeType, requiresSecureDecoder); }
@Override protected void configureCodec(MediaCodec codec, String codecName, android.media.MediaFormat format, android.media.MediaCrypto crypto) { if (RAW_DECODER_NAME.equals(codecName)) { // Override the MIME type used to configure the codec if we are using a passthrough decoder. String mimeType = format.getString(android.media.MediaFormat.KEY_MIME); format.setString(android.media.MediaFormat.KEY_MIME, MimeTypes.AUDIO_RAW); codec.configure(format, null, crypto, 0); format.setString(android.media.MediaFormat.KEY_MIME, mimeType); } else { codec.configure(format, null, crypto, 0); } }
@Override protected void onOutputFormatChanged(MediaFormat inputFormat, android.media.MediaFormat outputFormat) { if (MimeTypes.isPassthroughAudio(inputFormat.mimeType)) { audioTrack.reconfigure(inputFormat.getFrameworkMediaFormatV16()); } else { audioTrack.reconfigure(outputFormat); } }
protected int parseAdaptationSetType(String contentType) { return TextUtils.isEmpty(contentType) ? AdaptationSet.TYPE_UNKNOWN : MimeTypes.BASE_TYPE_AUDIO.equals(contentType) ? AdaptationSet.TYPE_AUDIO : MimeTypes.BASE_TYPE_VIDEO.equals(contentType) ? AdaptationSet.TYPE_VIDEO : MimeTypes.BASE_TYPE_TEXT.equals(contentType) ? AdaptationSet.TYPE_TEXT : AdaptationSet.TYPE_UNKNOWN; }
protected int parseAdaptationSetTypeFromMimeType(String mimeType) { return TextUtils.isEmpty(mimeType) ? AdaptationSet.TYPE_UNKNOWN : MimeTypes.isAudio(mimeType) ? AdaptationSet.TYPE_AUDIO : MimeTypes.isVideo(mimeType) ? AdaptationSet.TYPE_VIDEO : MimeTypes.isText(mimeType) || MimeTypes.isTtml(mimeType) ? AdaptationSet.TYPE_TEXT : AdaptationSet.TYPE_UNKNOWN; }
public void testPrepareOpus() throws IOException, InterruptedException { byte[] data = new StreamBuilder() .setHeader(WEBM_DOC_TYPE) .setInfo(DEFAULT_TIMECODE_SCALE, TEST_DURATION_TIMECODE) .addOpusTrack(AUDIO_TRACK_NUMBER, TEST_CHANNEL_COUNT, TEST_SAMPLE_RATE, TEST_CODEC_DELAY, TEST_SEEK_PRE_ROLL, TEST_OPUS_CODEC_PRIVATE) .build(1); TestUtil.consumeTestData(extractor, data); assertTracksEnded(); assertAudioFormat(AUDIO_TRACK_NUMBER, DEFAULT_TIMECODE_SCALE, MimeTypes.AUDIO_OPUS); assertIndex(DEFAULT_TIMECODE_SCALE, 1); }
private static String fourCCToMimeType(String fourCC) { if (fourCC.equalsIgnoreCase("H264") || fourCC.equalsIgnoreCase("X264") || fourCC.equalsIgnoreCase("AVC1") || fourCC.equalsIgnoreCase("DAVC")) { return MimeTypes.VIDEO_H264; } else if (fourCC.equalsIgnoreCase("AAC") || fourCC.equalsIgnoreCase("AACL") || fourCC.equalsIgnoreCase("AACH") || fourCC.equalsIgnoreCase("AACP")) { return MimeTypes.AUDIO_AAC; } else if (fourCC.equalsIgnoreCase("TTML")) { return MimeTypes.APPLICATION_TTML; } return null; }