@Override protected void parsePayload(ParsableByteArray data, long timeUs) { int packetType = data.readUnsignedByte(); if (packetType == AAC_PACKET_TYPE_SEQUENCE_HEADER && !hasOutputFormat) { // Parse the sequence header. byte[] audioSpecificConfig = new byte[data.bytesLeft()]; data.readBytes(audioSpecificConfig, 0, audioSpecificConfig.length); Pair<Integer, Integer> audioParams = CodecSpecificDataUtil.parseAacAudioSpecificConfig( audioSpecificConfig); Format format = Format.createAudioSampleFormat(null, MimeTypes.AUDIO_AAC, null, Format.NO_VALUE, Format.NO_VALUE, audioParams.second, audioParams.first, Collections.singletonList(audioSpecificConfig), null, 0, null); output.format(format); hasOutputFormat = true; } else if (audioFormat != AUDIO_FORMAT_AAC || packetType == AAC_PACKET_TYPE_AAC_RAW) { int sampleSize = data.bytesLeft(); output.sampleData(data, sampleSize); output.sampleMetadata(timeUs, C.BUFFER_FLAG_KEY_FRAME, sampleSize, 0, null); } }
@Override protected void parsePayload(ParsableByteArray data, long timeUs) { int packetType = data.readUnsignedByte(); // Parse sequence header just in case it was not done before. if (packetType == AAC_PACKET_TYPE_SEQUENCE_HEADER && !hasOutputFormat) { byte[] audioSpecifiConfig = new byte[data.bytesLeft()]; data.readBytes(audioSpecifiConfig, 0, audioSpecifiConfig.length); Pair<Integer, Integer> audioParams = CodecSpecificDataUtil.parseAacAudioSpecificConfig( audioSpecifiConfig); Format format = Format.createAudioSampleFormat(null, MimeTypes.AUDIO_AAC, null, Format.NO_VALUE, Format.NO_VALUE, audioParams.second, audioParams.first, Collections.singletonList(audioSpecifiConfig), null, 0, null); output.format(format); hasOutputFormat = true; } else if (packetType == AAC_PACKET_TYPE_AAC_RAW) { // Sample audio AAC frames int bytesToWrite = data.bytesLeft(); output.sampleData(data, bytesToWrite); output.sampleMetadata(timeUs, C.BUFFER_FLAG_KEY_FRAME, bytesToWrite, 0, null); } }
private static List<byte[]> buildCodecSpecificData(String codecSpecificDataString) { ArrayList<byte[]> csd = new ArrayList<>(); if (!TextUtils.isEmpty(codecSpecificDataString)) { byte[] codecPrivateData = Util.getBytesFromHexString(codecSpecificDataString); byte[][] split = CodecSpecificDataUtil.splitNalUnits(codecPrivateData); if (split == null) { csd.add(codecPrivateData); } else { Collections.addAll(csd, split); } } return csd; }
private int parseAudioSpecificConfig(ParsableBitArray data) throws ParserException { int bitsLeft = data.bitsLeft(); Pair<Integer, Integer> config = CodecSpecificDataUtil.parseAacAudioSpecificConfig(data, true); sampleRateHz = config.first; channelCount = config.second; return bitsLeft - data.bitsLeft(); }
/** * Parses the sample header. */ private void parseAdtsHeader() { adtsScratch.setPosition(0); if (!hasOutputFormat) { int audioObjectType = adtsScratch.readBits(2) + 1; if (audioObjectType != 2) { // The stream indicates AAC-Main (1), AAC-SSR (3) or AAC-LTP (4). When the stream indicates // AAC-Main it's more likely that the stream contains HE-AAC (5), which cannot be // represented correctly in the 2 bit audio_object_type field in the ADTS header. In // practice when the stream indicates AAC-SSR or AAC-LTP it more commonly contains AAC-LC or // HE-AAC. Since most Android devices don't support AAC-Main, AAC-SSR or AAC-LTP, and since // indicating AAC-LC works for HE-AAC streams, we pretend that we're dealing with AAC-LC and // hope for the best. In practice this often works. // See: https://github.com/google/ExoPlayer/issues/774 // See: https://github.com/google/ExoPlayer/issues/1383 Log.w(TAG, "Detected audio object type: " + audioObjectType + ", but assuming AAC LC."); audioObjectType = 2; } int sampleRateIndex = adtsScratch.readBits(4); adtsScratch.skipBits(1); int channelConfig = adtsScratch.readBits(3); byte[] audioSpecificConfig = CodecSpecificDataUtil.buildAacAudioSpecificConfig( audioObjectType, sampleRateIndex, channelConfig); Pair<Integer, Integer> audioParams = CodecSpecificDataUtil.parseAacAudioSpecificConfig( audioSpecificConfig); Format format = Format.createAudioSampleFormat(formatId, MimeTypes.AUDIO_AAC, null, Format.NO_VALUE, Format.NO_VALUE, audioParams.second, audioParams.first, Collections.singletonList(audioSpecificConfig), null, 0, language); // In this class a sample is an access unit, but the MediaFormat sample rate specifies the // number of PCM audio samples per second. sampleDurationUs = (C.MICROS_PER_SECOND * 1024) / format.sampleRate; output.format(format); hasOutputFormat = true; } else { adtsScratch.skipBits(10); } adtsScratch.skipBits(4); int sampleSize = adtsScratch.readBits(13) - 2 /* the sync word */ - HEADER_SIZE; if (hasCrc) { sampleSize -= CRC_SIZE; } setReadingSampleState(output, sampleDurationUs, 0, sampleSize); }
private static byte[] buildNalUnitForChild(ParsableByteArray data) { int length = data.readUnsignedShort(); int offset = data.getPosition(); data.skipBytes(length); return CodecSpecificDataUtil.buildNalUnit(data.data, offset, length); }
/** * Parses the sample header. */ private void parseAdtsHeader() { adtsScratch.setPosition(0); if (!hasOutputFormat) { int audioObjectType = adtsScratch.readBits(2) + 1; if (audioObjectType != 2) { // The stream indicates AAC-Main (1), AAC-SSR (3) or AAC-LTP (4). When the stream indicates // AAC-Main it's more likely that the stream contains HE-AAC (5), which cannot be // represented correctly in the 2 bit audio_object_type field in the ADTS header. In // practice when the stream indicates AAC-SSR or AAC-LTP it more commonly contains AAC-LC or // HE-AAC. Since most Android devices don't support AAC-Main, AAC-SSR or AAC-LTP, and since // indicating AAC-LC works for HE-AAC streams, we pretend that we're dealing with AAC-LC and // hope for the best. In practice this often works. // See: https://github.com/google/ExoPlayer/issues/774 // See: https://github.com/google/ExoPlayer/issues/1383 Log.w(TAG, "Detected audio object type: " + audioObjectType + ", but assuming AAC LC."); audioObjectType = 2; } int sampleRateIndex = adtsScratch.readBits(4); adtsScratch.skipBits(1); int channelConfig = adtsScratch.readBits(3); byte[] audioSpecificConfig = CodecSpecificDataUtil.buildAacAudioSpecificConfig( audioObjectType, sampleRateIndex, channelConfig); Pair<Integer, Integer> audioParams = CodecSpecificDataUtil.parseAacAudioSpecificConfig( audioSpecificConfig); Format format = Format.createAudioSampleFormat(null, MimeTypes.AUDIO_AAC, null, Format.NO_VALUE, Format.NO_VALUE, audioParams.second, audioParams.first, Collections.singletonList(audioSpecificConfig), null, 0, language); // In this class a sample is an access unit, but the MediaFormat sample rate specifies the // number of PCM audio samples per second. sampleDurationUs = (C.MICROS_PER_SECOND * 1024) / format.sampleRate; output.format(format); hasOutputFormat = true; } else { adtsScratch.skipBits(10); } adtsScratch.skipBits(4); int sampleSize = adtsScratch.readBits(13) - 2 /* the sync word */ - HEADER_SIZE; if (hasCrc) { sampleSize -= CRC_SIZE; } setReadingSampleState(output, sampleDurationUs, 0, sampleSize); }
/** * Parses the sample header. */ private void parseAdtsHeader() throws ParserException { adtsScratch.setPosition(0); if (!hasOutputFormat) { int audioObjectType = adtsScratch.readBits(2) + 1; if (audioObjectType != 2) { // The stream indicates AAC-Main (1), AAC-SSR (3) or AAC-LTP (4). When the stream indicates // AAC-Main it's more likely that the stream contains HE-AAC (5), which cannot be // represented correctly in the 2 bit audio_object_type field in the ADTS header. In // practice when the stream indicates AAC-SSR or AAC-LTP it more commonly contains AAC-LC or // HE-AAC. Since most Android devices don't support AAC-Main, AAC-SSR or AAC-LTP, and since // indicating AAC-LC works for HE-AAC streams, we pretend that we're dealing with AAC-LC and // hope for the best. In practice this often works. // See: https://github.com/google/ExoPlayer/issues/774 // See: https://github.com/google/ExoPlayer/issues/1383 Log.w(TAG, "Detected audio object type: " + audioObjectType + ", but assuming AAC LC."); audioObjectType = 2; } int sampleRateIndex = adtsScratch.readBits(4); adtsScratch.skipBits(1); int channelConfig = adtsScratch.readBits(3); byte[] audioSpecificConfig = CodecSpecificDataUtil.buildAacAudioSpecificConfig( audioObjectType, sampleRateIndex, channelConfig); Pair<Integer, Integer> audioParams = CodecSpecificDataUtil.parseAacAudioSpecificConfig( audioSpecificConfig); Format format = Format.createAudioSampleFormat(formatId, MimeTypes.AUDIO_AAC, null, Format.NO_VALUE, Format.NO_VALUE, audioParams.second, audioParams.first, Collections.singletonList(audioSpecificConfig), null, 0, language); // In this class a sample is an access unit, but the MediaFormat sample rate specifies the // number of PCM audio samples per second. sampleDurationUs = (C.MICROS_PER_SECOND * 1024) / format.sampleRate; output.format(format); hasOutputFormat = true; } else { adtsScratch.skipBits(10); } adtsScratch.skipBits(4); int sampleSize = adtsScratch.readBits(13) - 2 /* the sync word */ - HEADER_SIZE; if (hasCrc) { sampleSize -= CRC_SIZE; } setReadingSampleState(output, sampleDurationUs, 0, sampleSize); }