/** * Constructs a JSBufferedSampleRecorder that expects audio in the given AudioFormat and * which will save to a file with given name. * * @param format the AudioFormat you want to record in * @param name the name of the file to save to (not including the extension) */ JSBufferedSampleRecorder(JSMinim sys, String fileName, AudioFileFormat.Type fileType, AudioFormat fileFormat, int bufferSize) { name = fileName; type = fileType; format = fileFormat; buffers = new ArrayList<FloatBuffer>(20); left = FloatBuffer.allocate(bufferSize*10); if ( format.getChannels() == Minim.STEREO ) { right = FloatBuffer.allocate(bufferSize*10); } else { right = null; } system = sys; }
public AudioFileFormat.Type[] getAudioFileTypes(AudioInputStream stream) { AudioFileFormat.Type[] filetypes = new AudioFileFormat.Type[types.length]; System.arraycopy(types, 0, filetypes, 0, types.length); // make sure we can write this stream AudioFormat format = stream.getFormat(); AudioFormat.Encoding encoding = format.getEncoding(); if( (AudioFormat.Encoding.ALAW.equals(encoding)) || (AudioFormat.Encoding.ULAW.equals(encoding)) || (AudioFormat.Encoding.PCM_SIGNED.equals(encoding)) || (AudioFormat.Encoding.PCM_UNSIGNED.equals(encoding)) ) { return filetypes; } return new AudioFileFormat.Type[0]; }
private static boolean testSampleRate(float sampleRate) { boolean result = true; try { // create AudioInputStream with sample rate of 10000 Hz ByteArrayInputStream data = new ByteArrayInputStream(new byte[1]); AudioFormat format = new AudioFormat(sampleRate, 8, 1, true, true); AudioInputStream stream = new AudioInputStream(data, format, 1); // write to AIFF file ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); AudioSystem.write(stream, AudioFileFormat.Type.AIFF, outputStream); byte[] fileData = outputStream.toByteArray(); InputStream inputStream = new ByteArrayInputStream(fileData); AudioFileFormat aff = AudioSystem.getAudioFileFormat(inputStream); if (! equals(sampleRate, aff.getFormat().getFrameRate())) { out("error for sample rate " + sampleRate); result = false; } } catch (Exception e) { out(e); out("Test NOT FAILED"); } return result; }
WaveFileFormat(AudioFileFormat.Type type, int lengthInBytes, AudioFormat format, int lengthInFrames) { super(type,lengthInBytes,format,lengthInFrames); AudioFormat.Encoding encoding = format.getEncoding(); if( encoding.equals(AudioFormat.Encoding.ALAW) ) { waveType = WAVE_FORMAT_ALAW; } else if( encoding.equals(AudioFormat.Encoding.ULAW) ) { waveType = WAVE_FORMAT_MULAW; } else if( encoding.equals(AudioFormat.Encoding.PCM_SIGNED) || encoding.equals(AudioFormat.Encoding.PCM_UNSIGNED) ) { waveType = WAVE_FORMAT_PCM; } else { waveType = WAVE_FORMAT_UNKNOWN; } }
/** * Obtains an audio stream from the File provided. The File must * point to valid audio file data. * @param file the File for which the <code>AudioInputStream</code> should be * constructed * @return an <code>AudioInputStream</code> object based on the audio file data pointed * to by the File * @throws UnsupportedAudioFileException if the File does not point to valid audio * file data recognized by the system * @throws IOException if an I/O exception occurs */ public AudioInputStream getAudioInputStream(File file) throws UnsupportedAudioFileException, IOException { FileInputStream fis = new FileInputStream(file); // throws IOException AudioFileFormat fileFormat = null; // part of fix for 4325421 try { fileFormat = getCOMM(fis, false); } finally { if (fileFormat == null) { fis.close(); } } return new AudioInputStream(fis, fileFormat.getFormat(), fileFormat.getFrameLength()); }
@Override public int write(AudioInputStream stream, AudioFileFormat.Type fileType, OutputStream out) throws IOException { Objects.requireNonNull(stream); Objects.requireNonNull(fileType); Objects.requireNonNull(out); //$$fb the following check must come first ! Otherwise // the next frame length check may throw an IOException and // interrupt iterating File Writers. (see bug 4351296) // throws IllegalArgumentException if not supported WaveFileFormat waveFileFormat = (WaveFileFormat)getAudioFileFormat(fileType, stream); //$$fb when we got this far, we are committed to write this file // we must know the total data length to calculate the file length if( stream.getFrameLength() == AudioSystem.NOT_SPECIFIED ) { throw new IOException("stream length not specified"); } return writeWaveFile(stream, waveFileFormat, out); }
public int write(AudioInputStream stream, AudioFileFormat.Type fileType, OutputStream out) throws IOException { //$$fb the following check must come first ! Otherwise // the next frame length check may throw an IOException and // interrupt iterating File Writers. (see bug 4351296) // throws IllegalArgumentException if not supported WaveFileFormat waveFileFormat = (WaveFileFormat)getAudioFileFormat(fileType, stream); //$$fb when we got this far, we are committed to write this file // we must know the total data length to calculate the file length if( stream.getFrameLength() == AudioSystem.NOT_SPECIFIED ) { throw new IOException("stream length not specified"); } int bytesWritten = writeWaveFile(stream, waveFileFormat, out); return bytesWritten; }
public AudioInputStream getAudioInputStream(InputStream stream) throws UnsupportedAudioFileException, IOException { AudioFileFormat format = getAudioFileFormat(stream); RIFFReader riffiterator = new RIFFReader(stream); if (!riffiterator.getFormat().equals("RIFF")) throw new UnsupportedAudioFileException(); if (!riffiterator.getType().equals("WAVE")) throw new UnsupportedAudioFileException(); while (riffiterator.hasNextChunk()) { RIFFReader chunk = riffiterator.nextChunk(); if (chunk.getFormat().equals("data")) { return new AudioInputStream(chunk, format.getFormat(), chunk.getSize()); } } throw new UnsupportedAudioFileException(); }
public AudioInputStream getAudioInputStream(InputStream stream) throws UnsupportedAudioFileException, IOException { AudioFileFormat format = getAudioFileFormat(stream); RIFFReader riffiterator = new RIFFReader(stream); if (!riffiterator.getFormat().equals("RIFF")) throw new UnsupportedAudioFileException(); if (!riffiterator.getType().equals("WAVE")) throw new UnsupportedAudioFileException(); while (riffiterator.hasNextChunk()) { RIFFReader chunk = riffiterator.nextChunk(); if (chunk.getFormat().equals("data")) { return new AudioInputStream(chunk, format.getFormat(), chunk .getSize()); } } throw new UnsupportedAudioFileException(); }
public int write(AudioInputStream stream, AudioFileFormat.Type fileType, OutputStream out) throws IOException { //$$fb the following check must come first ! Otherwise // the next frame length check may throw an IOException and // interrupt iterating File Writers. (see bug 4351296) // throws IllegalArgumentException if not supported AiffFileFormat aiffFileFormat = (AiffFileFormat)getAudioFileFormat(fileType, stream); // we must know the total data length to calculate the file length if( stream.getFrameLength() == AudioSystem.NOT_SPECIFIED ) { throw new IOException("stream length not specified"); } int bytesWritten = writeAiffFile(stream, aiffFileFormat, out); return bytesWritten; }
/** * Verifies the frame length after the stream was saved/read to/from file. */ private static void testAfterSaveToFile(final AudioFileWriter afw, final AudioFileFormat.Type type, AudioInputStream ais) throws IOException { final File temp = File.createTempFile("sound", ".tmp"); try { afw.write(ais, type, temp); ais = AudioSystem.getAudioInputStream(temp); final long frameLength = ais.getFrameLength(); ais.close(); validate(frameLength); } catch (IllegalArgumentException | UnsupportedAudioFileException ignored) { } finally { Files.delete(Paths.get(temp.getAbsolutePath())); } }
public static void main(String[] args) throws Exception { boolean foundDuplicates = false; AudioFileFormat.Type[] aTypes = AudioSystem.getAudioFileTypes(); for (int i = 0; i < aTypes.length; i++) { for (int j = 0; j < aTypes.length; j++) { if (aTypes[i].equals(aTypes[j]) && i != j) { foundDuplicates = true; } } } if (foundDuplicates) { throw new Exception("Test failed"); } else { System.out.println("Test passed"); } }
public static void main(final String[] args) { final AudioFileFormat.Type type; try { type = new AudioFileFormat.Type(null, null); } catch (final Exception ignored) { // behaviour of null is not specified so ignore possible exceptions return; } final Object stub = new Object() { @Override public String toString() { return null; } }; if (stub.equals(type) || type.equals(stub)) { throw new RuntimeException("Should not be equal"); } }
public AudioFileFormat.Type[] getAudioFileTypes(AudioInputStream stream) { AudioFileFormat.Type[] filetypes = new AudioFileFormat.Type[types.length]; System.arraycopy(types, 0, filetypes, 0, types.length); // make sure we can write this stream AudioFormat format = stream.getFormat(); AudioFormat.Encoding encoding = format.getEncoding(); if( AudioFormat.Encoding.ALAW.equals(encoding) || AudioFormat.Encoding.ULAW.equals(encoding) || AudioFormat.Encoding.PCM_SIGNED.equals(encoding) || AudioFormat.Encoding.PCM_UNSIGNED.equals(encoding) ) { return filetypes; } return new AudioFileFormat.Type[0]; }
@Override public int write(AudioInputStream stream, AudioFileFormat.Type fileType, OutputStream out) throws IOException { Objects.requireNonNull(stream); Objects.requireNonNull(fileType); Objects.requireNonNull(out); //$$fb the following check must come first ! Otherwise // the next frame length check may throw an IOException and // interrupt iterating File Writers. (see bug 4351296) // throws IllegalArgumentException if not supported AiffFileFormat aiffFileFormat = (AiffFileFormat)getAudioFileFormat(fileType, stream); // we must know the total data length to calculate the file length if( stream.getFrameLength() == AudioSystem.NOT_SPECIFIED ) { throw new IOException("stream length not specified"); } return writeAiffFile(stream, aiffFileFormat, out); }
/** * Saves the double array as an audio file (using .wav or .au format). * * @param filename * the name of the audio file * @param samples * the array of samples * @throws IllegalArgumentException * if unable to save {@code filename} * @throws IllegalArgumentException * if {@code samples} is {@code null} */ public static void save(String filename, double[] samples) { if (samples == null) { throw new IllegalArgumentException("samples[] is null"); } // assumes 44,100 samples per second // use 16-bit audio, mono, signed PCM, little Endian AudioFormat format = new AudioFormat(SAMPLE_RATE, 16, 1, true, false); byte[] data = new byte[2 * samples.length]; for (int i = 0; i < samples.length; i++) { int temp = (short) (samples[i] * MAX_16_BIT); data[2 * i + 0] = (byte) temp; data[2 * i + 1] = (byte) (temp >> 8); } // now save the file try { ByteArrayInputStream bais = new ByteArrayInputStream(data); AudioInputStream ais = new AudioInputStream(bais, format, samples.length); if (filename.endsWith(".wav") || filename.endsWith(".WAV")) { AudioSystem.write(ais, AudioFileFormat.Type.WAVE, new File(filename)); } else if (filename.endsWith(".au") || filename.endsWith(".AU")) { AudioSystem.write(ais, AudioFileFormat.Type.AU, new File(filename)); } else { throw new IllegalArgumentException("unsupported audio format: '" + filename + "'"); } } catch (IOException ioe) { throw new IllegalArgumentException("unable to save file '" + filename + "'", ioe); } }
/** * Indicates whether an audio file of the type specified can be written * from the audio input stream indicated. * @param fileType file type for which write capabilities are queried * @param stream for which file writing support is queried * @return <code>true</code> if the file type is supported for this audio input stream, * otherwise <code>false</code> */ public boolean isFileTypeSupported(AudioFileFormat.Type fileType, AudioInputStream stream) { AudioFileFormat.Type types[] = getAudioFileTypes( stream ); for(int i=0; i<types.length; i++) { if( fileType.equals( types[i] ) ) { return true; } } return false; }
/** * Returns AudioFileFormat from URL. */ public AudioFileFormat getAudioFileFormat(URL url) throws UnsupportedAudioFileException, IOException { if (TDebug.TraceAudioFileReader) { TDebug.out("MpegAudioFileReader.getAudioFileFormat(URL): begin"); } long lFileLengthInBytes = AudioSystem.NOT_SPECIFIED; URLConnection conn = url.openConnection(); // Tell shoucast server (if any) that SPI support shoutcast stream. conn.setRequestProperty("Icy-Metadata", "1"); InputStream inputStream = conn.getInputStream(); AudioFileFormat audioFileFormat = null; try { audioFileFormat = getAudioFileFormat(inputStream, lFileLengthInBytes); } finally { inputStream.close(); } if (TDebug.TraceAudioFileReader) { TDebug.out("MpegAudioFileReader.getAudioFileFormat(URL): end"); } return audioFileFormat; }
/** * * @param fileName * @param fileType * @param fileFormat */ JSStreamingSampleRecorder(JSMinim sys, String fileName, AudioFileFormat.Type fileType, AudioFormat fileFormat, int bufferSize) { name = fileName; type = fileType; format = fileFormat; system = sys; try { aos = AudioSystemShadow.getAudioOutputStream( type, format, AudioSystem.NOT_SPECIFIED, new File(name) ); } catch (IOException e) { system.error("Error obtaining new output stream: " + e.getMessage()); } catch (IllegalArgumentException badarg) { system.error("Error obtaining new output stream for " + fileName + " with type " + type.toString() + " format " + format.toString() + " and bufferSize " + bufferSize + ".\n" + "The reason is " + badarg.getMessage()); } fsb = new FloatSampleBuffer(format.getChannels(), bufferSize, format.getSampleRate()); recording = false; }
/** * Obtains the audio file format of the URL provided. The URL must * point to valid audio file data. * @param url the URL from which file format information should be * extracted * @return an <code>AudioFileFormat</code> object describing the audio file format * @throws UnsupportedAudioFileException if the URL does not point to valid audio * file data recognized by the system * @throws IOException if an I/O exception occurs */ public AudioFileFormat getAudioFileFormat(URL url) throws UnsupportedAudioFileException, IOException { InputStream urlStream = url.openStream(); // throws IOException AudioFileFormat fileFormat = null; try { fileFormat = getFMT(urlStream, false); } finally { urlStream.close(); } return fileFormat; }
/** * Obtains the audio file format of the File provided. The File must * point to valid audio file data. * @param file the File from which file format information should be * extracted * @return an <code>AudioFileFormat</code> object describing the audio file format * @throws UnsupportedAudioFileException if the File does not point to valid audio * file data recognized by the system * @throws IOException if an I/O exception occurs */ public AudioFileFormat getAudioFileFormat(File file) throws UnsupportedAudioFileException, IOException { AudioFileFormat fileFormat = null; FileInputStream fis = new FileInputStream(file); // throws IOException // part of fix for 4325421 try { fileFormat = getFMT(fis, false); } finally { fis.close(); } return fileFormat; }
public static void test(byte[] file) throws Exception { InputStream inputStream = new ByteArrayInputStream(file); AudioFileFormat aff = AudioSystem.getAudioFileFormat(inputStream); if (aff.getFormat().getSampleSizeInBits() != 12) { throw new Exception("Wrong sample size. test FAILED"); } if (aff.getFormat().getFrameSize() != 2) { throw new Exception("Wrong frame size. test FAILED"); } if (aff.getFrameLength() != 100) { throw new Exception("Wrong file length. test FAILED"); } }
/** * Tests the {@code AudioFileFormat} fetched from the fake header. * <p> * Note that the frameLength and byteLength are stored as int which means * that {@code AudioFileFormat} will store the data above {@code MAX_INT} as * NOT_SPECIFIED. */ private static void testAFF(final byte bits, final int rate, final int channel, final long frameLength) throws Exception { final byte[] header = createHeader(bits, rate, channel, frameLength); final ByteArrayInputStream fake = new ByteArrayInputStream(header); final AudioFileFormat aff = AudioSystem.getAudioFileFormat(fake); if (aff.getType() != AudioFileFormat.Type.AIFF) { throw new RuntimeException("Error"); } if (frameLength <= Integer.MAX_VALUE) { if (aff.getFrameLength() != frameLength) { System.err.println("Expected: " + frameLength); System.err.println("Actual: " + aff.getFrameLength()); throw new RuntimeException(); } } else { if (aff.getFrameLength() != AudioSystem.NOT_SPECIFIED) { System.err.println("Expected: " + AudioSystem.NOT_SPECIFIED); System.err.println("Actual: " + aff.getFrameLength()); throw new RuntimeException(); } } validateFormat(bits, rate, channel, aff.getFormat()); }
/** * Obtains the audio file format of the URL provided. The URL must * point to valid audio file data. * @param url the URL from which file format information should be * extracted * @return an <code>AudioFileFormat</code> object describing the audio file format * @throws UnsupportedAudioFileException if the URL does not point to valid audio * file data recognized by the system * @throws IOException if an I/O exception occurs */ public AudioFileFormat getAudioFileFormat(URL url) throws UnsupportedAudioFileException, IOException { AudioFileFormat fileFormat = null; InputStream urlStream = url.openStream(); // throws IOException try { fileFormat = getCOMM(urlStream, false); } finally { urlStream.close(); } return fileFormat; }
/** * Obtains the audio file format of the File provided. The File must * point to valid audio file data. * @param file the File from which file format information should be * extracted * @return an <code>AudioFileFormat</code> object describing the audio file format * @throws UnsupportedAudioFileException if the File does not point to valid audio * file data recognized by the system * @throws IOException if an I/O exception occurs */ public AudioFileFormat getAudioFileFormat(File file) throws UnsupportedAudioFileException, IOException { AudioFileFormat fileFormat = null; FileInputStream fis = new FileInputStream(file); // throws IOException // part of fix for 4325421 try { fileFormat = getCOMM(fis, false); } finally { fis.close(); } return fileFormat; }
public int write(AudioInputStream stream, AudioFileFormat.Type fileType, File out) throws IOException { // throws IllegalArgumentException if not supported WaveFileFormat waveFileFormat = (WaveFileFormat)getAudioFileFormat(fileType, stream); // first write the file without worrying about length fields FileOutputStream fos = new FileOutputStream( out ); // throws IOException BufferedOutputStream bos = new BufferedOutputStream( fos, bisBufferSize ); int bytesWritten = writeWaveFile(stream, waveFileFormat, bos ); bos.close(); // now, if length fields were not specified, calculate them, // open as a random access file, write the appropriate fields, // close again.... if( waveFileFormat.getByteLength()== AudioSystem.NOT_SPECIFIED ) { int dataLength=bytesWritten-waveFileFormat.getHeaderSize(); int riffLength=dataLength + waveFileFormat.getHeaderSize() - 8; RandomAccessFile raf=new RandomAccessFile(out, "rw"); // skip RIFF magic raf.skipBytes(4); raf.writeInt(big2little( riffLength )); // skip WAVE magic, fmt_ magic, fmt_ length, fmt_ chunk, data magic raf.skipBytes(4+4+4+WaveFileFormat.getFmtChunkSize(waveFileFormat.getWaveType())+4); raf.writeInt(big2little( dataLength )); // that's all raf.close(); } return bytesWritten; }
public AudioFileFormat getAudioFileFormat(Sequence seq) throws UnsupportedAudioFileException, IOException { long totallen = seq.getMicrosecondLength() / 1000000; long len = (long) (format.getFrameRate() * (totallen + 4)); return new AudioFileFormat(MIDI, format, (int) len); }
public int write(AudioInputStream stream, AudioFileFormat.Type fileType, OutputStream out) throws IOException { // we must know the total data length to calculate the file length //$$fb 2001-07-13: fix for bug 4351296: do not throw an exception //if( stream.getFrameLength() == AudioSystem.NOT_SPECIFIED ) { // throw new IOException("stream length not specified"); //} // throws IllegalArgumentException if not supported AuFileFormat auFileFormat = (AuFileFormat)getAudioFileFormat(fileType, stream); int bytesWritten = writeAuFile(stream, auFileFormat, out); return bytesWritten; }
AuFileFormat(final AudioFileFormat.Type type, final long byteLength, final AudioFormat format, final long frameLength) { super(type, byteLength, format, frameLength); AudioFormat.Encoding encoding = format.getEncoding(); auType = -1; if (AudioFormat.Encoding.ALAW.equals(encoding)) { if (format.getSampleSizeInBits() == 8) { auType = AU_ALAW_8; } } else if (AudioFormat.Encoding.ULAW.equals(encoding)) { if (format.getSampleSizeInBits() == 8) { auType = AU_ULAW_8; } } else if (AudioFormat.Encoding.PCM_SIGNED.equals(encoding)) { if (format.getSampleSizeInBits() == 8) { auType = AU_LINEAR_8; } else if (format.getSampleSizeInBits() == 16) { auType = AU_LINEAR_16; } else if (format.getSampleSizeInBits() == 24) { auType = AU_LINEAR_24; } else if (format.getSampleSizeInBits() == 32) { auType = AU_LINEAR_32; } } else if (AudioFormat.Encoding.PCM_FLOAT.equals(encoding)) { if (format.getSampleSizeInBits() == 32) { auType = AU_FLOAT; } } }
AuFileFormat(AudioFileFormat.Type type, int lengthInBytes, AudioFormat format, int lengthInFrames) { super(type,lengthInBytes,format,lengthInFrames); AudioFormat.Encoding encoding = format.getEncoding(); auType = -1; if( AudioFormat.Encoding.ALAW.equals(encoding) ) { if( format.getSampleSizeInBits()==8 ) { auType = AU_ALAW_8; } } else if( AudioFormat.Encoding.ULAW.equals(encoding) ) { if( format.getSampleSizeInBits()==8 ) { auType = AU_ULAW_8; } } else if( AudioFormat.Encoding.PCM_SIGNED.equals(encoding) ) { if( format.getSampleSizeInBits()==8 ) { auType = AU_LINEAR_8; } else if( format.getSampleSizeInBits()==16 ) { auType = AU_LINEAR_16; } else if( format.getSampleSizeInBits()==24 ) { auType = AU_LINEAR_24; } else if( format.getSampleSizeInBits()==32 ) { auType = AU_LINEAR_32; } } }
public AudioFileFormat getAudioFileFormat(URL url) throws UnsupportedAudioFileException, IOException { InputStream stream = url.openStream(); AudioFileFormat format; try { format = getAudioFileFormat(new BufferedInputStream(stream)); } finally { stream.close(); } return format; }
public AudioFileFormat getAudioFileFormat(File file) throws UnsupportedAudioFileException, IOException { InputStream stream = new FileInputStream(file); AudioFileFormat format; try { format = getAudioFileFormat(new BufferedInputStream(stream)); } finally { stream.close(); } return format; }
public AudioFileFormat getAudioFileFormat(InputStream stream) throws UnsupportedAudioFileException, IOException { stream.mark(200); AudioFileFormat format; try { format = internal_getAudioFileFormat(stream); } finally { stream.reset(); } return format; }
public static void main(String argv[]) throws Exception { AudioFormat format = new AudioFormat(44100, 16, 2, true, true); InputStream is = new ByteArrayInputStream(new byte[1000]); AudioInputStream ais = new AudioInputStream(is, format, AudioSystem.NOT_SPECIFIED); AudioSystem.write(ais, AudioFileFormat.Type.AU, new ByteArrayOutputStream()); System.out.println("Test passed."); }
/** * Returns the AudioFileFormat describing the file that will be written from this AudioInputStream. * Throws IllegalArgumentException if not supported. */ private AudioFileFormat getAudioFileFormat(Type type, AudioInputStream stream) { if (!isFileTypeSupported(type, stream)) { throw new IllegalArgumentException("File type " + type + " not supported."); } AudioFormat streamFormat = stream.getFormat(); AudioFormat.Encoding encoding = streamFormat.getEncoding(); if (AudioFormat.Encoding.PCM_UNSIGNED.equals(encoding)) { encoding = AudioFormat.Encoding.PCM_SIGNED; } // We always write big endian au files, this is by far the standard AudioFormat format = new AudioFormat(encoding, streamFormat.getSampleRate(), streamFormat.getSampleSizeInBits(), streamFormat.getChannels(), streamFormat.getFrameSize(), streamFormat.getFrameRate(), true); int fileSize; if (stream.getFrameLength() != AudioSystem.NOT_SPECIFIED) { fileSize = (int)stream.getFrameLength()*streamFormat.getFrameSize() + AuFileFormat.AU_HEADERSIZE; } else { fileSize = AudioSystem.NOT_SPECIFIED; } return new AuFileFormat(Type.AU, fileSize, format, (int) stream.getFrameLength()); }