@Override public int read(ExtractorInput input, PositionHolder seekPosition) throws IOException, InterruptedException { if (synchronizedHeaderData == 0) { try { synchronize(input, false); } catch (EOFException e) { return RESULT_END_OF_INPUT; } } if (seeker == null) { seeker = setupSeeker(input); extractorOutput.seekMap(seeker); trackOutput.format(Format.createAudioSampleFormat(null, synchronizedHeader.mimeType, null, Format.NO_VALUE, MpegAudioHeader.MAX_FRAME_SIZE_BYTES, synchronizedHeader.channels, synchronizedHeader.sampleRate, Format.NO_VALUE, gaplessInfoHolder.encoderDelay, gaplessInfoHolder.encoderPadding, null, null, 0, null, (flags & FLAG_DISABLE_ID3_METADATA) != 0 ? null : metadata)); } return readSample(input); }
@Override public int read(ExtractorInput input, PositionHolder seekPosition) throws IOException, InterruptedException { if (synchronizedHeaderData == 0) { try { synchronize(input, false); } catch (EOFException e) { return RESULT_END_OF_INPUT; } } if (seeker == null) { seeker = setupSeeker(input); extractorOutput.seekMap(seeker); trackOutput.format(Format.createAudioSampleFormat(null, synchronizedHeader.mimeType, null, Format.NO_VALUE, MpegAudioHeader.MAX_FRAME_SIZE_BYTES, synchronizedHeader.channels, synchronizedHeader.sampleRate, Format.NO_VALUE, gaplessInfoHolder.encoderDelay, gaplessInfoHolder.encoderPadding, null, null, 0, null)); } return readSample(input); }
@Override public int read(ExtractorInput input, PositionHolder seekPosition) throws IOException, InterruptedException { if (synchronizedHeaderData == 0) { try { synchronize(input, false); } catch (EOFException e) { return RESULT_END_OF_INPUT; } } if (seeker == null) { seeker = maybeReadSeekFrame(input); if (seeker == null || (!seeker.isSeekable() && (flags & FLAG_ENABLE_CONSTANT_BITRATE_SEEKING) != 0)) { seeker = getConstantBitrateSeeker(input); } extractorOutput.seekMap(seeker); trackOutput.format(Format.createAudioSampleFormat(null, synchronizedHeader.mimeType, null, Format.NO_VALUE, MpegAudioHeader.MAX_FRAME_SIZE_BYTES, synchronizedHeader.channels, synchronizedHeader.sampleRate, Format.NO_VALUE, gaplessInfoHolder.encoderDelay, gaplessInfoHolder.encoderPadding, null, null, 0, null, (flags & FLAG_DISABLE_ID3_METADATA) != 0 ? null : metadata)); } return readSample(input); }
public MpegAudioReader(String language) { state = STATE_FINDING_HEADER; // The first byte of an MPEG Audio frame header is always 0xFF. headerScratch = new ParsableByteArray(4); headerScratch.data[0] = (byte) 0xFF; header = new MpegAudioHeader(); this.language = language; }
/** * Attempts to read the remaining two bytes of the frame header. * <p> * If a frame header is read in full then the state is changed to {@link #STATE_READING_FRAME}, * the media format is output if this has not previously occurred, the four header bytes are * output as sample data, and the position of the source is advanced to the byte that immediately * follows the header. * <p> * If a frame header is read in full but cannot be parsed then the state is changed to * {@link #STATE_READING_HEADER}. * <p> * If a frame header is not read in full then the position of the source is advanced to the limit, * and the method should be called again with the next source to continue the read. * * @param source The source from which to read. */ private void readHeaderRemainder(ParsableByteArray source) { int bytesToRead = Math.min(source.bytesLeft(), HEADER_SIZE - frameBytesRead); source.readBytes(headerScratch.data, frameBytesRead, bytesToRead); frameBytesRead += bytesToRead; if (frameBytesRead < HEADER_SIZE) { // We haven't read the whole header yet. return; } headerScratch.setPosition(0); boolean parsedHeader = MpegAudioHeader.populateHeader(headerScratch.readInt(), header); if (!parsedHeader) { // We thought we'd located a frame header, but we hadn't. frameBytesRead = 0; state = STATE_READING_HEADER; return; } frameSize = header.frameSize; if (!hasOutputFormat) { frameDurationUs = (C.MICROS_PER_SECOND * header.samplesPerFrame) / header.sampleRate; Format format = Format.createAudioSampleFormat(formatId, header.mimeType, null, Format.NO_VALUE, MpegAudioHeader.MAX_FRAME_SIZE_BYTES, header.channels, header.sampleRate, null, null, 0, language); output.format(format); hasOutputFormat = true; } headerScratch.setPosition(0); output.sampleData(headerScratch, HEADER_SIZE); state = STATE_READING_FRAME; }
/** * Constructs a new {@link Mp3Extractor}. * * @param flags Flags that control the extractor's behavior. * @param forcedFirstSampleTimestampUs A timestamp to force for the first sample, or * {@link C#TIME_UNSET} if forcing is not required. */ public Mp3Extractor(@Flags int flags, long forcedFirstSampleTimestampUs) { this.flags = flags; this.forcedFirstSampleTimestampUs = forcedFirstSampleTimestampUs; scratch = new ParsableByteArray(SCRATCH_LENGTH); synchronizedHeader = new MpegAudioHeader(); gaplessInfoHolder = new GaplessInfoHolder(); basisTimeUs = C.TIME_UNSET; }
private int readSample(ExtractorInput extractorInput) throws IOException, InterruptedException { if (sampleBytesRemaining == 0) { extractorInput.resetPeekPosition(); if (!extractorInput.peekFully(scratch.data, 0, 4, true)) { return RESULT_END_OF_INPUT; } scratch.setPosition(0); int sampleHeaderData = scratch.readInt(); if ((sampleHeaderData & HEADER_MASK) != (synchronizedHeaderData & HEADER_MASK) || MpegAudioHeader.getFrameSize(sampleHeaderData) == C.LENGTH_UNSET) { // We have lost synchronization, so attempt to resynchronize starting at the next byte. extractorInput.skipFully(1); synchronizedHeaderData = 0; return RESULT_CONTINUE; } MpegAudioHeader.populateHeader(sampleHeaderData, synchronizedHeader); if (basisTimeUs == C.TIME_UNSET) { basisTimeUs = seeker.getTimeUs(extractorInput.getPosition()); if (forcedFirstSampleTimestampUs != C.TIME_UNSET) { long embeddedFirstSampleTimestampUs = seeker.getTimeUs(0); basisTimeUs += forcedFirstSampleTimestampUs - embeddedFirstSampleTimestampUs; } } sampleBytesRemaining = synchronizedHeader.frameSize; } int bytesAppended = trackOutput.sampleData(extractorInput, sampleBytesRemaining, true); if (bytesAppended == C.RESULT_END_OF_INPUT) { return RESULT_END_OF_INPUT; } sampleBytesRemaining -= bytesAppended; if (sampleBytesRemaining > 0) { return RESULT_CONTINUE; } long timeUs = basisTimeUs + (samplesRead * C.MICROS_PER_SECOND / synchronizedHeader.sampleRate); trackOutput.sampleMetadata(timeUs, C.BUFFER_FLAG_KEY_FRAME, synchronizedHeader.frameSize, 0, null); samplesRead += synchronizedHeader.samplesPerFrame; sampleBytesRemaining = 0; return RESULT_CONTINUE; }
/** * Returns a {@link XingSeeker} for seeking in the stream, if required information is present. * Returns {@code null} if not. On returning, {@code frame}'s position is not specified so the * caller should reset it. * * @param mpegAudioHeader The MPEG audio header associated with the frame. * @param frame The data in this audio frame, with its position set to immediately after the * 'Xing' or 'Info' tag. * @param position The position (byte offset) of the start of this frame in the stream. * @param inputLength The length of the stream in bytes. * @return A {@link XingSeeker} for seeking in the stream, or {@code null} if the required * information is not present. */ public static XingSeeker create(MpegAudioHeader mpegAudioHeader, ParsableByteArray frame, long position, long inputLength) { int samplesPerFrame = mpegAudioHeader.samplesPerFrame; int sampleRate = mpegAudioHeader.sampleRate; long firstFramePosition = position + mpegAudioHeader.frameSize; int flags = frame.readInt(); int frameCount; if ((flags & 0x01) != 0x01 || (frameCount = frame.readUnsignedIntToInt()) == 0) { // If the frame count is missing/invalid, the header can't be used to determine the duration. return null; } long durationUs = Util.scaleLargeTimestamp(frameCount, samplesPerFrame * C.MICROS_PER_SECOND, sampleRate); if ((flags & 0x06) != 0x06) { // If the size in bytes or table of contents is missing, the stream is not seekable. return new XingSeeker(firstFramePosition, durationUs, inputLength); } long sizeBytes = frame.readUnsignedIntToInt(); frame.skipBytes(1); long[] tableOfContents = new long[99]; for (int i = 0; i < 99; i++) { tableOfContents[i] = frame.readUnsignedByte(); } // TODO: Handle encoder delay and padding in 3 bytes offset by xingBase + 213 bytes: // delay = (frame.readUnsignedByte() << 4) + (frame.readUnsignedByte() >> 4); // padding = ((frame.readUnsignedByte() & 0x0F) << 8) + frame.readUnsignedByte(); return new XingSeeker(firstFramePosition, durationUs, inputLength, tableOfContents, sizeBytes, mpegAudioHeader.frameSize); }
/** * Attempts to read the remaining two bytes of the frame header. * <p> * If a frame header is read in full then the state is changed to {@link #STATE_READING_FRAME}, * the media format is output if this has not previously occurred, the four header bytes are * output as sample data, and the position of the source is advanced to the byte that immediately * follows the header. * <p> * If a frame header is read in full but cannot be parsed then the state is changed to * {@link #STATE_READING_HEADER}. * <p> * If a frame header is not read in full then the position of the source is advanced to the limit, * and the method should be called again with the next source to continue the read. * * @param source The source from which to read. */ private void readHeaderRemainder(ParsableByteArray source) { int bytesToRead = Math.min(source.bytesLeft(), HEADER_SIZE - frameBytesRead); source.readBytes(headerScratch.data, frameBytesRead, bytesToRead); frameBytesRead += bytesToRead; if (frameBytesRead < HEADER_SIZE) { // We haven't read the whole header yet. return; } headerScratch.setPosition(0); boolean parsedHeader = MpegAudioHeader.populateHeader(headerScratch.readInt(), header); if (!parsedHeader) { // We thought we'd located a frame header, but we hadn't. frameBytesRead = 0; state = STATE_READING_HEADER; return; } frameSize = header.frameSize; if (!hasOutputFormat) { frameDurationUs = (C.MICROS_PER_SECOND * header.samplesPerFrame) / header.sampleRate; Format format = Format.createAudioSampleFormat(null, header.mimeType, null, Format.NO_VALUE, MpegAudioHeader.MAX_FRAME_SIZE_BYTES, header.channels, header.sampleRate, null, null, 0, language); output.format(format); hasOutputFormat = true; } headerScratch.setPosition(0); output.sampleData(headerScratch, HEADER_SIZE); state = STATE_READING_FRAME; }
/** * Constructs a new {@link Mp3Extractor}. * * @param forcedFirstSampleTimestampUs A timestamp to force for the first sample, or * {@link C#TIME_UNSET} if forcing is not required. */ public Mp3Extractor(long forcedFirstSampleTimestampUs) { this.forcedFirstSampleTimestampUs = forcedFirstSampleTimestampUs; scratch = new ParsableByteArray(4); synchronizedHeader = new MpegAudioHeader(); gaplessInfoHolder = new GaplessInfoHolder(); basisTimeUs = C.TIME_UNSET; }
@Override public void setUp() throws Exception { MpegAudioHeader xingFrameHeader = new MpegAudioHeader(); MpegAudioHeader.populateHeader(XING_FRAME_HEADER_DATA, xingFrameHeader); seeker = XingSeeker.create(xingFrameHeader, new ParsableByteArray(XING_FRAME_PAYLOAD), XING_FRAME_POSITION, C.LENGTH_UNSET); seekerWithInputLength = XingSeeker.create(xingFrameHeader, new ParsableByteArray(XING_FRAME_PAYLOAD), XING_FRAME_POSITION, INPUT_LENGTH); xingFrameSize = xingFrameHeader.frameSize; }
/** * @param flags Flags that control the extractor's behavior. * @param forcedFirstSampleTimestampUs A timestamp to force for the first sample, or * {@link C#TIME_UNSET} if forcing is not required. */ public Mp3Extractor(@Flags int flags, long forcedFirstSampleTimestampUs) { this.flags = flags; this.forcedFirstSampleTimestampUs = forcedFirstSampleTimestampUs; scratch = new ParsableByteArray(SCRATCH_LENGTH); synchronizedHeader = new MpegAudioHeader(); gaplessInfoHolder = new GaplessInfoHolder(); basisTimeUs = C.TIME_UNSET; }
private int readSample(ExtractorInput extractorInput) throws IOException, InterruptedException { if (sampleBytesRemaining == 0) { extractorInput.resetPeekPosition(); if (!extractorInput.peekFully(scratch.data, 0, 4, true)) { return RESULT_END_OF_INPUT; } scratch.setPosition(0); int sampleHeaderData = scratch.readInt(); if (!headersMatch(sampleHeaderData, synchronizedHeaderData) || MpegAudioHeader.getFrameSize(sampleHeaderData) == C.LENGTH_UNSET) { // We have lost synchronization, so attempt to resynchronize starting at the next byte. extractorInput.skipFully(1); synchronizedHeaderData = 0; return RESULT_CONTINUE; } MpegAudioHeader.populateHeader(sampleHeaderData, synchronizedHeader); if (basisTimeUs == C.TIME_UNSET) { basisTimeUs = seeker.getTimeUs(extractorInput.getPosition()); if (forcedFirstSampleTimestampUs != C.TIME_UNSET) { long embeddedFirstSampleTimestampUs = seeker.getTimeUs(0); basisTimeUs += forcedFirstSampleTimestampUs - embeddedFirstSampleTimestampUs; } } sampleBytesRemaining = synchronizedHeader.frameSize; } int bytesAppended = trackOutput.sampleData(extractorInput, sampleBytesRemaining, true); if (bytesAppended == C.RESULT_END_OF_INPUT) { return RESULT_END_OF_INPUT; } sampleBytesRemaining -= bytesAppended; if (sampleBytesRemaining > 0) { return RESULT_CONTINUE; } long timeUs = basisTimeUs + (samplesRead * C.MICROS_PER_SECOND / synchronizedHeader.sampleRate); trackOutput.sampleMetadata(timeUs, C.BUFFER_FLAG_KEY_FRAME, synchronizedHeader.frameSize, 0, null); samplesRead += synchronizedHeader.samplesPerFrame; sampleBytesRemaining = 0; return RESULT_CONTINUE; }
/** * Peeks the next frame and returns a {@link ConstantBitrateSeeker} based on its bitrate. */ private Seeker getConstantBitrateSeeker(ExtractorInput input) throws IOException, InterruptedException { input.peekFully(scratch.data, 0, 4); scratch.setPosition(0); MpegAudioHeader.populateHeader(scratch.readInt(), synchronizedHeader); return new ConstantBitrateSeeker(input.getLength(), input.getPosition(), synchronizedHeader); }
/** * @param inputLength The length of the stream in bytes, or {@link C#LENGTH_UNSET} if unknown. * @param firstFramePosition The position of the first frame in the stream. * @param mpegAudioHeader The MPEG audio header associated with the first frame. */ public ConstantBitrateSeeker(long inputLength, long firstFramePosition, MpegAudioHeader mpegAudioHeader) { this.firstFramePosition = firstFramePosition; this.frameSize = mpegAudioHeader.frameSize; this.bitrate = mpegAudioHeader.bitrate; if (inputLength == C.LENGTH_UNSET) { dataSize = C.LENGTH_UNSET; durationUs = C.TIME_UNSET; } else { dataSize = inputLength - firstFramePosition; durationUs = getTimeUs(inputLength); } }
/** * Returns a {@link XingSeeker} for seeking in the stream, if required information is present. * Returns {@code null} if not. On returning, {@code frame}'s position is not specified so the * caller should reset it. * * @param inputLength The length of the stream in bytes, or {@link C#LENGTH_UNSET} if unknown. * @param position The position of the start of this frame in the stream. * @param mpegAudioHeader The MPEG audio header associated with the frame. * @param frame The data in this audio frame, with its position set to immediately after the * 'Xing' or 'Info' tag. * @return A {@link XingSeeker} for seeking in the stream, or {@code null} if the required * information is not present. */ public static XingSeeker create(long inputLength, long position, MpegAudioHeader mpegAudioHeader, ParsableByteArray frame) { int samplesPerFrame = mpegAudioHeader.samplesPerFrame; int sampleRate = mpegAudioHeader.sampleRate; int flags = frame.readInt(); int frameCount; if ((flags & 0x01) != 0x01 || (frameCount = frame.readUnsignedIntToInt()) == 0) { // If the frame count is missing/invalid, the header can't be used to determine the duration. return null; } long durationUs = Util.scaleLargeTimestamp(frameCount, samplesPerFrame * C.MICROS_PER_SECOND, sampleRate); if ((flags & 0x06) != 0x06) { // If the size in bytes or table of contents is missing, the stream is not seekable. return new XingSeeker(position, mpegAudioHeader.frameSize, durationUs); } long dataSize = frame.readUnsignedIntToInt(); long[] tableOfContents = new long[100]; for (int i = 0; i < 100; i++) { tableOfContents[i] = frame.readUnsignedByte(); } // TODO: Handle encoder delay and padding in 3 bytes offset by xingBase + 213 bytes: // delay = (frame.readUnsignedByte() << 4) + (frame.readUnsignedByte() >> 4); // padding = ((frame.readUnsignedByte() & 0x0F) << 8) + frame.readUnsignedByte(); if (inputLength != C.LENGTH_UNSET && inputLength != position + dataSize) { Log.w(TAG, "XING data size mismatch: " + inputLength + ", " + (position + dataSize)); } return new XingSeeker(position, mpegAudioHeader.frameSize, durationUs, dataSize, tableOfContents); }
@Before public void setUp() throws Exception { MpegAudioHeader xingFrameHeader = new MpegAudioHeader(); MpegAudioHeader.populateHeader(XING_FRAME_HEADER_DATA, xingFrameHeader); seeker = XingSeeker.create(C.LENGTH_UNSET, XING_FRAME_POSITION, xingFrameHeader, new ParsableByteArray(XING_FRAME_PAYLOAD)); seekerWithInputLength = XingSeeker.create(STREAM_LENGTH, XING_FRAME_POSITION, xingFrameHeader, new ParsableByteArray(XING_FRAME_PAYLOAD)); xingFrameSize = xingFrameHeader.frameSize; }
private boolean synchronize(ExtractorInput input, boolean sniffing) throws IOException, InterruptedException { int validFrameCount = 0; int candidateSynchronizedHeaderData = 0; int peekedId3Bytes = 0; int searchedBytes = 0; int searchLimitBytes = sniffing ? MAX_SNIFF_BYTES : MAX_SYNC_BYTES; input.resetPeekPosition(); if (input.getPosition() == 0) { peekId3Data(input); peekedId3Bytes = (int) input.getPeekPosition(); if (!sniffing) { input.skipFully(peekedId3Bytes); } } while (true) { if (!input.peekFully(scratch.data, 0, 4, validFrameCount > 0)) { // We reached the end of the stream but found at least one valid frame. break; } scratch.setPosition(0); int headerData = scratch.readInt(); int frameSize; if ((candidateSynchronizedHeaderData != 0 && (headerData & HEADER_MASK) != (candidateSynchronizedHeaderData & HEADER_MASK)) || (frameSize = MpegAudioHeader.getFrameSize(headerData)) == C.LENGTH_UNSET) { // The header doesn't match the candidate header or is invalid. Try the next byte offset. if (searchedBytes++ == searchLimitBytes) { if (!sniffing) { throw new ParserException("Searched too many bytes."); } return false; } validFrameCount = 0; candidateSynchronizedHeaderData = 0; if (sniffing) { input.resetPeekPosition(); input.advancePeekPosition(peekedId3Bytes + searchedBytes); } else { input.skipFully(1); } } else { // The header matches the candidate header and/or is valid. validFrameCount++; if (validFrameCount == 1) { MpegAudioHeader.populateHeader(headerData, synchronizedHeader); candidateSynchronizedHeaderData = headerData; } else if (validFrameCount == 4) { break; } input.advancePeekPosition(frameSize - 4); } } // Prepare to read the synchronized frame. if (sniffing) { input.skipFully(peekedId3Bytes + searchedBytes); } else { input.resetPeekPosition(); } synchronizedHeaderData = candidateSynchronizedHeaderData; return true; }
/** * Returns a {@link Seeker} to seek using metadata read from {@code input}, which should provide * data from the start of the first frame in the stream. On returning, the input's position will * be set to the start of the first frame of audio. * * @param input The {@link ExtractorInput} from which to read. * @throws IOException Thrown if there was an error reading from the stream. Not expected if the * next two frames were already peeked during synchronization. * @throws InterruptedException Thrown if reading from the stream was interrupted. Not expected if * the next two frames were already peeked during synchronization. * @return a {@link Seeker}. */ private Seeker setupSeeker(ExtractorInput input) throws IOException, InterruptedException { // Read the first frame which may contain a Xing or VBRI header with seeking metadata. ParsableByteArray frame = new ParsableByteArray(synchronizedHeader.frameSize); input.peekFully(frame.data, 0, synchronizedHeader.frameSize); long position = input.getPosition(); long length = input.getLength(); int headerData = 0; Seeker seeker = null; // Check if there is a Xing header. int xingBase = (synchronizedHeader.version & 1) != 0 ? (synchronizedHeader.channels != 1 ? 36 : 21) // MPEG 1 : (synchronizedHeader.channels != 1 ? 21 : 13); // MPEG 2 or 2.5 if (frame.limit() >= xingBase + 4) { frame.setPosition(xingBase); headerData = frame.readInt(); } if (headerData == XING_HEADER || headerData == INFO_HEADER) { seeker = XingSeeker.create(synchronizedHeader, frame, position, length); if (seeker != null && !gaplessInfoHolder.hasGaplessInfo()) { // If there is a Xing header, read gapless playback metadata at a fixed offset. input.resetPeekPosition(); input.advancePeekPosition(xingBase + 141); input.peekFully(scratch.data, 0, 3); scratch.setPosition(0); gaplessInfoHolder.setFromXingHeaderValue(scratch.readUnsignedInt24()); } input.skipFully(synchronizedHeader.frameSize); } else if (frame.limit() >= 40) { // Check if there is a VBRI header. frame.setPosition(36); // MPEG audio header (4 bytes) + 32 bytes. headerData = frame.readInt(); if (headerData == VBRI_HEADER) { seeker = VbriSeeker.create(synchronizedHeader, frame, position, length); input.skipFully(synchronizedHeader.frameSize); } } if (seeker == null || (!seeker.isSeekable() && (flags & FLAG_ENABLE_CONSTANT_BITRATE_SEEKING) != 0)) { // Repopulate the synchronized header in case we had to skip an invalid seeking header, which // would give an invalid CBR bitrate. input.resetPeekPosition(); input.peekFully(scratch.data, 0, 4); scratch.setPosition(0); MpegAudioHeader.populateHeader(scratch.readInt(), synchronizedHeader); seeker = new ConstantBitrateSeeker(input.getPosition(), synchronizedHeader.bitrate, length); } return seeker; }
/** * Returns a {@link VbriSeeker} for seeking in the stream, if required information is present. * Returns {@code null} if not. On returning, {@code frame}'s position is not specified so the * caller should reset it. * * @param mpegAudioHeader The MPEG audio header associated with the frame. * @param frame The data in this audio frame, with its position set to immediately after the * 'VBRI' tag. * @param position The position (byte offset) of the start of this frame in the stream. * @param inputLength The length of the stream in bytes. * @return A {@link VbriSeeker} for seeking in the stream, or {@code null} if the required * information is not present. */ public static VbriSeeker create(MpegAudioHeader mpegAudioHeader, ParsableByteArray frame, long position, long inputLength) { frame.skipBytes(10); int numFrames = frame.readInt(); if (numFrames <= 0) { return null; } int sampleRate = mpegAudioHeader.sampleRate; long durationUs = Util.scaleLargeTimestamp(numFrames, C.MICROS_PER_SECOND * (sampleRate >= 32000 ? 1152 : 576), sampleRate); int entryCount = frame.readUnsignedShort(); int scale = frame.readUnsignedShort(); int entrySize = frame.readUnsignedShort(); frame.skipBytes(2); // Skip the frame containing the VBRI header. position += mpegAudioHeader.frameSize; // Read table of contents entries. long[] timesUs = new long[entryCount + 1]; long[] positions = new long[entryCount + 1]; timesUs[0] = 0L; positions[0] = position; for (int index = 1; index < timesUs.length; index++) { int segmentSize; switch (entrySize) { case 1: segmentSize = frame.readUnsignedByte(); break; case 2: segmentSize = frame.readUnsignedShort(); break; case 3: segmentSize = frame.readUnsignedInt24(); break; case 4: segmentSize = frame.readUnsignedIntToInt(); break; default: return null; } position += segmentSize * scale; timesUs[index] = index * durationUs / entryCount; positions[index] = inputLength == C.LENGTH_UNSET ? position : Math.min(inputLength, position); } return new VbriSeeker(timesUs, positions, durationUs); }
private boolean synchronize(ExtractorInput input, boolean sniffing) throws IOException, InterruptedException { int validFrameCount = 0; int candidateSynchronizedHeaderData = 0; int peekedId3Bytes = 0; int searchedBytes = 0; int searchLimitBytes = sniffing ? MAX_SNIFF_BYTES : MAX_SYNC_BYTES; input.resetPeekPosition(); if (input.getPosition() == 0) { Id3Util.parseId3(input, gaplessInfoHolder); peekedId3Bytes = (int) input.getPeekPosition(); if (!sniffing) { input.skipFully(peekedId3Bytes); } } while (true) { if (!input.peekFully(scratch.data, 0, 4, validFrameCount > 0)) { // We reached the end of the stream but found at least one valid frame. break; } scratch.setPosition(0); int headerData = scratch.readInt(); int frameSize; if ((candidateSynchronizedHeaderData != 0 && (headerData & HEADER_MASK) != (candidateSynchronizedHeaderData & HEADER_MASK)) || (frameSize = MpegAudioHeader.getFrameSize(headerData)) == C.LENGTH_UNSET) { // The header doesn't match the candidate header or is invalid. Try the next byte offset. if (searchedBytes++ == searchLimitBytes) { if (!sniffing) { throw new ParserException("Searched too many bytes."); } return false; } validFrameCount = 0; candidateSynchronizedHeaderData = 0; if (sniffing) { input.resetPeekPosition(); input.advancePeekPosition(peekedId3Bytes + searchedBytes); } else { input.skipFully(1); } } else { // The header matches the candidate header and/or is valid. validFrameCount++; if (validFrameCount == 1) { MpegAudioHeader.populateHeader(headerData, synchronizedHeader); candidateSynchronizedHeaderData = headerData; } else if (validFrameCount == 4) { break; } input.advancePeekPosition(frameSize - 4); } } // Prepare to read the synchronized frame. if (sniffing) { input.skipFully(peekedId3Bytes + searchedBytes); } else { input.resetPeekPosition(); } synchronizedHeaderData = candidateSynchronizedHeaderData; return true; }
/** * Returns a {@link Seeker} to seek using metadata read from {@code input}, which should provide * data from the start of the first frame in the stream. On returning, the input's position will * be set to the start of the first frame of audio. * * @param input The {@link ExtractorInput} from which to read. * @throws IOException Thrown if there was an error reading from the stream. Not expected if the * next two frames were already peeked during synchronization. * @throws InterruptedException Thrown if reading from the stream was interrupted. Not expected if * the next two frames were already peeked during synchronization. * @return a {@link Seeker}. */ private Seeker setupSeeker(ExtractorInput input) throws IOException, InterruptedException { // Read the first frame which may contain a Xing or VBRI header with seeking metadata. ParsableByteArray frame = new ParsableByteArray(synchronizedHeader.frameSize); input.peekFully(frame.data, 0, synchronizedHeader.frameSize); long position = input.getPosition(); long length = input.getLength(); int headerData = 0; Seeker seeker = null; // Check if there is a Xing header. int xingBase = (synchronizedHeader.version & 1) != 0 ? (synchronizedHeader.channels != 1 ? 36 : 21) // MPEG 1 : (synchronizedHeader.channels != 1 ? 21 : 13); // MPEG 2 or 2.5 if (frame.limit() >= xingBase + 4) { frame.setPosition(xingBase); headerData = frame.readInt(); } if (headerData == XING_HEADER || headerData == INFO_HEADER) { seeker = XingSeeker.create(synchronizedHeader, frame, position, length); if (seeker != null && !gaplessInfoHolder.hasGaplessInfo()) { // If there is a Xing header, read gapless playback metadata at a fixed offset. input.resetPeekPosition(); input.advancePeekPosition(xingBase + 141); input.peekFully(scratch.data, 0, 3); scratch.setPosition(0); gaplessInfoHolder.setFromXingHeaderValue(scratch.readUnsignedInt24()); } input.skipFully(synchronizedHeader.frameSize); } else if (frame.limit() >= 40) { // Check if there is a VBRI header. frame.setPosition(36); // MPEG audio header (4 bytes) + 32 bytes. headerData = frame.readInt(); if (headerData == VBRI_HEADER) { seeker = VbriSeeker.create(synchronizedHeader, frame, position, length); input.skipFully(synchronizedHeader.frameSize); } } if (seeker == null) { // Repopulate the synchronized header in case we had to skip an invalid seeking header, which // would give an invalid CBR bitrate. input.resetPeekPosition(); input.peekFully(scratch.data, 0, 4); scratch.setPosition(0); MpegAudioHeader.populateHeader(scratch.readInt(), synchronizedHeader); seeker = new ConstantBitrateSeeker(input.getPosition(), synchronizedHeader.bitrate, length); } return seeker; }
private boolean synchronize(ExtractorInput input, boolean sniffing) throws IOException, InterruptedException { int validFrameCount = 0; int candidateSynchronizedHeaderData = 0; int peekedId3Bytes = 0; int searchedBytes = 0; int searchLimitBytes = sniffing ? MAX_SNIFF_BYTES : MAX_SYNC_BYTES; input.resetPeekPosition(); if (input.getPosition() == 0) { peekId3Data(input); peekedId3Bytes = (int) input.getPeekPosition(); if (!sniffing) { input.skipFully(peekedId3Bytes); } } while (true) { if (!input.peekFully(scratch.data, 0, 4, validFrameCount > 0)) { // We reached the end of the stream but found at least one valid frame. break; } scratch.setPosition(0); int headerData = scratch.readInt(); int frameSize; if ((candidateSynchronizedHeaderData != 0 && !headersMatch(headerData, candidateSynchronizedHeaderData)) || (frameSize = MpegAudioHeader.getFrameSize(headerData)) == C.LENGTH_UNSET) { // The header doesn't match the candidate header or is invalid. Try the next byte offset. if (searchedBytes++ == searchLimitBytes) { if (!sniffing) { throw new ParserException("Searched too many bytes."); } return false; } validFrameCount = 0; candidateSynchronizedHeaderData = 0; if (sniffing) { input.resetPeekPosition(); input.advancePeekPosition(peekedId3Bytes + searchedBytes); } else { input.skipFully(1); } } else { // The header matches the candidate header and/or is valid. validFrameCount++; if (validFrameCount == 1) { MpegAudioHeader.populateHeader(headerData, synchronizedHeader); candidateSynchronizedHeaderData = headerData; } else if (validFrameCount == 4) { break; } input.advancePeekPosition(frameSize - 4); } } // Prepare to read the synchronized frame. if (sniffing) { input.skipFully(peekedId3Bytes + searchedBytes); } else { input.resetPeekPosition(); } synchronizedHeaderData = candidateSynchronizedHeaderData; return true; }
/** * Returns a {@link VbriSeeker} for seeking in the stream, if required information is present. * Returns {@code null} if not. On returning, {@code frame}'s position is not specified so the * caller should reset it. * * @param inputLength The length of the stream in bytes, or {@link C#LENGTH_UNSET} if unknown. * @param position The position of the start of this frame in the stream. * @param mpegAudioHeader The MPEG audio header associated with the frame. * @param frame The data in this audio frame, with its position set to immediately after the * 'VBRI' tag. * @return A {@link VbriSeeker} for seeking in the stream, or {@code null} if the required * information is not present. */ public static VbriSeeker create(long inputLength, long position, MpegAudioHeader mpegAudioHeader, ParsableByteArray frame) { frame.skipBytes(10); int numFrames = frame.readInt(); if (numFrames <= 0) { return null; } int sampleRate = mpegAudioHeader.sampleRate; long durationUs = Util.scaleLargeTimestamp(numFrames, C.MICROS_PER_SECOND * (sampleRate >= 32000 ? 1152 : 576), sampleRate); int entryCount = frame.readUnsignedShort(); int scale = frame.readUnsignedShort(); int entrySize = frame.readUnsignedShort(); frame.skipBytes(2); long minPosition = position + mpegAudioHeader.frameSize; // Read table of contents entries. long[] timesUs = new long[entryCount]; long[] positions = new long[entryCount]; for (int index = 0; index < entryCount; index++) { timesUs[index] = (index * durationUs) / entryCount; // Ensure positions do not fall within the frame containing the VBRI header. This constraint // will normally only apply to the first entry in the table. positions[index] = Math.max(position, minPosition); int segmentSize; switch (entrySize) { case 1: segmentSize = frame.readUnsignedByte(); break; case 2: segmentSize = frame.readUnsignedShort(); break; case 3: segmentSize = frame.readUnsignedInt24(); break; case 4: segmentSize = frame.readUnsignedIntToInt(); break; default: return null; } position += segmentSize * scale; } if (inputLength != C.LENGTH_UNSET && inputLength != position) { Log.w(TAG, "VBRI data size mismatch: " + inputLength + ", " + position); } return new VbriSeeker(timesUs, positions, durationUs); }