private static void init() { try { // 44,100 samples per second, 16-bit audio, mono, signed PCM, little // Endian AudioFormat format = new AudioFormat((float) SAMPLE_RATE, BITS_PER_SAMPLE, 1, true, false); DataLine.Info info = new DataLine.Info(SourceDataLine.class, format); line = (SourceDataLine) AudioSystem.getLine(info); line.open(format, SAMPLE_BUFFER_SIZE * BYTES_PER_SAMPLE); // the internal buffer is a fraction of the actual buffer size, this // choice is arbitrary // it gets divided because we can't expect the buffered data to line // up exactly with when // the sound card decides to push out its samples. buffer = new byte[SAMPLE_BUFFER_SIZE * BYTES_PER_SAMPLE / 3]; } catch (LineUnavailableException e) { System.out.println(e.getMessage()); } // no sound gets made before this call line.start(); }
/** * Streams data from the TargetDataLine to the API. * * @param urlStr * The URL to stream to * @param tl * The target data line to stream from. * @param af * The AudioFormat to stream with.` * @throws LineUnavailableException * If cannot open or stream the TargetDataLine. */ private Thread upChannel(String urlStr , TargetDataLine tl , AudioFormat af) throws LineUnavailableException { final String murl = urlStr; final TargetDataLine mtl = tl; final AudioFormat maf = af; if (!mtl.isOpen()) { mtl.open(maf); mtl.start(); } Thread upChannelThread = new Thread("Upstream Thread") { public void run() { openHttpsPostConnection(murl, mtl, (int) maf.getSampleRate()); } }; upChannelThread.start(); return upChannelThread; }
@Override public void start() { if (line != null) { LOG.debug("Sound already started"); return; } LOG.debug("Start sound"); try { line = AudioSystem.getSourceDataLine(FORMAT); line.open(FORMAT, BUFFER_SIZE); } catch (LineUnavailableException e) { throw new RuntimeException(e); } line.start(); buffer = new byte[line.getBufferSize()]; divider = (int) (Gameboy.TICKS_PER_SEC / FORMAT.getSampleRate()); }
public static void main(String[] args) throws IOException, LineUnavailableException { File folder=new File("/home/rizsi/tmp/video"); byte[] data=UtilFile.loadFile(new File(folder, "remote.sw")); byte[] data2=UtilFile.loadFile(new File(folder, "local.sw")); System.out.println("remote.sw max: "+measureMax(data)); System.out.println("local.sw max: "+measureMax(data2)); byte[] data3=sum(data, data2); UtilFile.saveAsFile(new File(folder, "rawmic.sw"), data3); AudioFormat format=ManualTestEchoCancel.getFormat(); final Mixer mixer = AudioSystem.getMixer(null); Play p=new Play(mixer, format, ManualTestEchoCancel.frameSamples) { @Override protected void switchBuffer() { if(getSample()==data) { setSample(data2); }else if(getSample()==data2) { setSample(data3); } } }; p.start(); p.setSample(data); }
/** * Play a sound at a given frequency freq during duration (in seconds) with volume as strenght * <br/><br/> * <code>SoundGenerator.playSound(440.0,1.0,0.5,SoundGenerator.FADE_LINEAR,SoundGenerator.WAVE_SIN);</code><br/> * Available fades : FADE_NONE, FADE_LINEAR, FADE_QUADRATIC<br/> * Available waves : WAVE_SIN, WAVE_SQUARE, WAVE_TRIANGLE, WAVE_SAWTOOTH<br/> */ public static void playSound(double freq,double duration,double volume,byte fade,byte wave){ double[] soundData = generateSoundData(freq,duration,volume,fade,wave); byte[] freqdata = new byte[soundData.length]; for(int i = 0;i < soundData.length;i++) { freqdata[i] = (byte)soundData[i]; } // Play it try { final AudioFormat af = new AudioFormat(SAMPLE_RATE, 8, 1, true, true); SourceDataLine line = AudioSystem.getSourceDataLine(af); line.open(af, SAMPLE_RATE); line.start(); line.write(freqdata, 0, freqdata.length); line.drain(); line.close(); }catch(LineUnavailableException e) { e.printStackTrace(); } }
public void restartSDL(){ AudioFormat form = new AudioFormat(sys.getSampleRate(),16,2,true,false); bufptr=0; audioints = new int[(int)((sys.getSampleRate()/1000.0)*sys.getBufferSize())*2]; if(scope!=null) scope.setAudio(audioints); audiobuffer = new byte[audioints.length*2]; try { if(sdl!=null) sdl.close(); sdl = AudioSystem.getSourceDataLine(form); sdl.open(form,audiobuffer.length*3); sdl.start(); } catch (LineUnavailableException e) { e.printStackTrace(); } }
/** Creates a new instance of test. Opens the microphone input as the target line. * To start the reporting, {@link #start} the thread. * @throws LineUnavailableException if microphone input is not available */ public VirtualDrummerMicrophoneInput () throws LineUnavailableException{ // getAudioInfo(); // prints lots of useless information format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED,sampleRate,8,1,1,sampleRate,false); DataLine.Info dlinfo = new DataLine.Info(TargetDataLine.class, format); if ( AudioSystem.isLineSupported(dlinfo) ){ targetDataLine = (TargetDataLine)AudioSystem.getLine(dlinfo); } targetDataLine.open(format,bufferSize); bufferSize=targetDataLine.getBufferSize(); gui = new DrumSoundDetectorDemo(); gui.setVirtualDrummerMicrophoneInput(this); }
/** * Picks a player to attack */ public void attack() { Player weakest = null; for (Entity e : getLevel().getEntities()) if (e.isActive() && e instanceof Player) if (!getLevel().isThroughWall(getLocation(), e.getLocation())) { Player p = (Player) e; if (weakest == null || p.getHealth() < weakest.getHealth()) weakest = p; } weakest.takeDamage(75); try { SoundStuff cam = new SoundStuff(); cam.AWP(); } catch (UnsupportedAudioFileException | IOException | LineUnavailableException e1) { e1.printStackTrace(); } }
private void loadClip() { try { clip = AudioSystem.getClip(); InputStream in = getClass().getClassLoader().getResourceAsStream(filePath); BufferedInputStream bufferedIn = new BufferedInputStream(in); AudioInputStream audioIn = AudioSystem.getAudioInputStream(bufferedIn); clip.open(audioIn); volume = (FloatControl) clip.getControl(FloatControl.Type.MASTER_GAIN); } catch (LineUnavailableException | UnsupportedAudioFileException | IOException e) { e.printStackTrace(); System.exit(-1); } }
void implOpen() throws LineUnavailableException { if (Printer.trace) Printer.trace(">> PortMixerPort: implOpen()."); long newID = ((PortMixer) mixer).getID(); if ((id == 0) || (newID != id) || (controls.length == 0)) { id = newID; Vector vector = new Vector(); synchronized (vector) { nGetControls(id, portIndex, vector); controls = new Control[vector.size()]; for (int i = 0; i < controls.length; i++) { controls[i] = (Control) vector.elementAt(i); } } } else { enableControls(controls, true); } if (Printer.trace) Printer.trace("<< PortMixerPort: implOpen() succeeded"); }
public void open() throws LineUnavailableException { synchronized (mixer) { // if the line is not currently open, try to open it with this format and buffer size if (!isOpen()) { if (Printer.trace) Printer.trace("> PortMixerPort: open"); // reserve mixer resources for this line mixer.open(this); try { // open the line. may throw LineUnavailableException. implOpen(); // if we succeeded, set the open state to true and send events setOpen(true); } catch (LineUnavailableException e) { // release mixer resources for this line and then throw the exception mixer.close(this); throw e; } if (Printer.trace) Printer.trace("< PortMixerPort: open succeeded"); } } }
public void run () { //create a separate thread for recording Thread recordThread = new Thread(new Runnable() { @Override public void run() { try { System.out.println("Start recording..."); recorder.start(); } catch (LineUnavailableException ex) { ex.printStackTrace(); System.exit(-1); } } }); recordThread.start(); // try { // Thread.sleep(RECORD_TIME); // } catch (InterruptedException ex) { // ex.printStackTrace(); // } }
public static void main(String[] args) throws Exception { Mixer.Info[] infos = AudioSystem.getMixerInfo(); log("" + infos.length + " mixers detected"); for (int i=0; i<infos.length; i++) { Mixer mixer = AudioSystem.getMixer(infos[i]); log("Mixer " + (i+1) + ": " + infos[i]); try { mixer.open(); for (Scenario scenario: scenarios) { testSDL(mixer, scenario); testTDL(mixer, scenario); } mixer.close(); } catch (LineUnavailableException ex) { log("LineUnavailableException: " + ex); } } if (failed == 0) { log("PASSED (" + total + " tests)"); } else { log("FAILED (" + failed + " of " + total + " tests)"); throw new Exception("Test FAILED"); } }
@Override public Line getLine(Line.Info info) throws LineUnavailableException { if (!isLineSupported(info)) throw new IllegalArgumentException("Line unsupported: " + info); if ((info.getLineClass() == SourceDataLine.class)) { return new SoftMixingSourceDataLine(this, (DataLine.Info) info); } if ((info.getLineClass() == Clip.class)) { return new SoftMixingClip(this, (DataLine.Info) info); } throw new IllegalArgumentException("Line unsupported: " + info); }
/** * This method allows you to stream a continuous stream of data to the API. * <p> * Note: This feature is experimental. * </p> * * @param tl * TL * @param af * AF * @throws LineUnavailableException * If the Line is unavailable * @throws InterruptedException * InterruptedException */ public void recognize(TargetDataLine tl , AudioFormat af) throws LineUnavailableException , InterruptedException { //Generates a unique ID for the response. final long PAIR = MIN + (long) ( Math.random() * ( ( MAX - MIN ) + 1L ) ); //Generates the Downstream URL final String API_DOWN_URL = GOOGLE_DUPLEX_SPEECH_BASE + "down?maxresults=1&pair=" + PAIR; //Generates the Upstream URL final String API_UP_URL = GOOGLE_DUPLEX_SPEECH_BASE + "up?lang=" + language + "&lm=dictation&client=chromium&pair=" + PAIR + "&key=" + API_KEY + "&continuous=true&interim=true"; //Tells Google to constantly monitor the stream; //Opens downChannel Thread downChannel = this.downChannel(API_DOWN_URL); //Opens upChannel Thread upChannel = this.upChannel(API_UP_URL, tl, af); try { downChannel.join(); upChannel.interrupt(); upChannel.join(); } catch (InterruptedException e) { downChannel.interrupt(); downChannel.join(); upChannel.interrupt(); upChannel.join(); } }
public void playAudio(int tempo, boolean flag) throws UnsupportedAudioFileException, LineUnavailableException, IOException, InterruptedException{ Clip clip = AudioSystem.getClip(); URL url = getClass().getResource("/audio/smb_die.wav"); URL urlToHot = this.getClass().getResource("/audio/smb_die.wav"); System.out.println(urlToHot); this.audio = Applet.newAudioClip(url); if(flag) audio.loop(); else audio.stop(); }
public PacienteInterface(String nome) throws FileNotFoundException, IOException, UnsupportedAudioFileException, LineUnavailableException, InterruptedException{ initComponents(); nomeUser = nome; jLabel2NP.setText(nome); setVisible(true); initXYK(); updateRemedy(false); compareHour(); }
/** * Play the audio. */ public void play() { try { if (!isLoaded()) { load(); } clip.start(); } catch (IOException | LineUnavailableException | UnsupportedAudioFileException e) { System.out.println("ERROR PLAYING SOUND. /n"); e.printStackTrace(); this.stop(); } }
private void enableHorn () throws UnsupportedAudioFileException, IOException, LineUnavailableException { if ( ( this.clip == null || !this.clip.isRunning () ) && Activator.getDefault ().getPreferenceStore ().getBoolean ( PreferenceConstants.BELL_ACTIVATED_KEY ) ) { final AudioInputStream sound = AudioSystem.getAudioInputStream ( this.soundFile ); final DataLine.Info info = new DataLine.Info ( Clip.class, sound.getFormat () ); this.clip = (Clip)AudioSystem.getLine ( info ); this.clip.open ( sound ); this.clip.loop ( Clip.LOOP_CONTINUOUSLY ); } if ( !this.bellIcon.isDisposed () ) { this.bellIcon.setImage ( getBellIcon () ); } }
@Override public void run(String... args) { String inputResource = AudioResourceUtils.sanitizeResource(args[0]); AudioDispatcher d; try { d = AudioDispatcherFactory.fromPipe(inputResource, TARGET_SAMPLE_RATE, 2028, 0); d.addAudioProcessor(new AudioPlayer(JVMAudioInputStream.toAudioFormat(d.getFormat()))); d.run(); } catch (LineUnavailableException e) { e.printStackTrace(); System.err.print(e.getLocalizedMessage()); } }
@Override public void monitor(String query, int maxNumberOfReqults, Set<Integer> avoid, QueryResultHandler handler) { int samplerate = Config.getInt(Key.RAFS_SAMPLE_RATE); int size = Config.getInt(Key.MONITOR_STEP_SIZE) * samplerate; int overlap = Config.getInt(Key.MONITOR_OVERLAP) * samplerate; AudioDispatcher d ; if (query.equals(Panako.DEFAULT_MICROPHONE)){ try { d = AudioDispatcherFactory.fromDefaultMicrophone(samplerate,size, overlap); } catch (LineUnavailableException e) { LOG.warning("Could not connect to default microphone!" + e.getMessage()); e.printStackTrace(); d = null; } }else{ d = AudioDispatcherFactory.fromPipe(query, samplerate, size, overlap); } d.setZeroPadFirstBuffer(true); d.addAudioProcessor(new AudioProcessor() { @Override public boolean process(AudioEvent audioEvent) { double timeStamp = audioEvent.getTimeStamp() - Config.getInt(Key.MONITOR_OVERLAP); processMonitorQuery(audioEvent.getFloatBuffer().clone(), handler,timeStamp,avoid); return true; } @Override public void processingFinished() { } }); d.run(); }
public void monitor(String query,final SerializedFingerprintsHandler handler){ int samplerate = Config.getInt(Key.NFFT_SAMPLE_RATE); int size = Config.getInt(Key.MONITOR_STEP_SIZE) * samplerate; int overlap = Config.getInt(Key.MONITOR_OVERLAP) * samplerate; AudioDispatcher d ; if (query.equals(Panako.DEFAULT_MICROPHONE)){ try { d = AudioDispatcherFactory.fromDefaultMicrophone(samplerate,size, overlap); } catch (LineUnavailableException e) { LOG.warning("Could not connect to default microphone!" + e.getMessage()); e.printStackTrace(); d = null; } }else{ d = AudioDispatcherFactory.fromPipe(query, samplerate, size, overlap); } d.addAudioProcessor(new AudioProcessor() { @Override public boolean process(AudioEvent audioEvent) { double timeStamp = audioEvent.getTimeStamp() - Config.getInt(Key.MONITOR_OVERLAP); processMonitorQueryToSerializeFingerprints(audioEvent.getFloatBuffer().clone(), handler,timeStamp); return true; } @Override public void processingFinished() { } }); d.run(); }
@Override public void monitor(String query,final int maxNumberOfResults,Set<Integer> avoid, final QueryResultHandler handler) { int samplerate = Config.getInt(Key.NFFT_SAMPLE_RATE); int size = Config.getInt(Key.MONITOR_STEP_SIZE) * samplerate; int overlap = Config.getInt(Key.MONITOR_OVERLAP) * samplerate; AudioDispatcher d ; if (query.equals(Panako.DEFAULT_MICROPHONE)){ try { d = AudioDispatcherFactory.fromDefaultMicrophone(samplerate,size, overlap); } catch (LineUnavailableException e) { LOG.warning("Could not connect to default microphone!" + e.getMessage()); e.printStackTrace(); d = null; } }else{ d = AudioDispatcherFactory.fromPipe(query, samplerate, size, overlap); } d.addAudioProcessor(new AudioProcessor() { @Override public boolean process(AudioEvent audioEvent) { double timeStamp = audioEvent.getTimeStamp() - Config.getInt(Key.MONITOR_OVERLAP); processMonitorQuery(audioEvent.getFloatBuffer().clone(), maxNumberOfResults, handler,timeStamp,avoid); return true; } @Override public void processingFinished() { } }); d.run(); }
/** * Opens the audio capturing device so that it will be ready for capturing * audio. Attempts to create a converter if the requested audio format is * not directly available. * * @return true if the audio capturing device is opened successfully; false * otherwise */ private boolean open() { TargetDataLine audioLine = getAudioLine(); if (audioLine != null) { if (!audioLine.isOpen()) { logger.info("open"); try { audioLine.open(finalFormat, audioBufferSize); } catch (LineUnavailableException e) { logger.severe("Can't open microphone " + e.getMessage()); return false; } audioStream = new AudioInputStream(audioLine); if (doConversion) { audioStream = AudioSystem.getAudioInputStream(desiredFormat, audioStream); assert (audioStream != null); } /* * Set the frame size depending on the sample rate. */ float sec = ((float) msecPerRead) / 1000.f; frameSizeInBytes = (audioStream.getFormat().getSampleSizeInBits() / 8) * (int) (sec * audioStream.getFormat().getSampleRate()); logger.info("Frame size: " + frameSizeInBytes + " bytes"); } return true; } else { logger.severe("Can't find microphone"); return false; } }
public Sound(String file){ try { ais = AudioSystem.getAudioInputStream(new File(file)); clip = AudioSystem.getClip(); clip.open(ais); } catch (LineUnavailableException | IOException | UnsupportedAudioFileException e) { e.printStackTrace(); } }
private void startSpeechRecognition() { new Thread( () -> { try { gSpeechDuplex.recognize(mic.getTargetDataLine(), mic.getAudioFormat()); } catch (InterruptedException | LineUnavailableException exception) { System.out.println("Exception caused : " + exception.getMessage()); } }).start(); }
public static void main(String argv[]) throws Exception { boolean success = true; Mixer.Info [] infos = AudioSystem.getMixerInfo(); for (int i=0; i<infos.length; i++) { Mixer mixer = AudioSystem.getMixer(infos[i]); System.out.println("Mixer is: " + mixer); Line.Info [] target_line_infos = mixer.getTargetLineInfo(); for (int j = 0; j < target_line_infos.length; j++) { try { System.out.println("Trying to get:" + target_line_infos[j]); mixer.getLine(target_line_infos[j]); } catch (IllegalArgumentException iae) { System.out.println("Unexpected IllegalArgumentException raised:"); iae.printStackTrace(); success = false; } catch (LineUnavailableException lue) { System.out.println("Unexpected LineUnavailableException raised:"); lue.printStackTrace(); success = false; } } } if (success) { System.out.println("Test passed"); } else { throw new Exception("Test FAILED"); } }
@Override public void open() throws LineUnavailableException { if (data == null) { throw new IllegalArgumentException( "Illegal call to open() in interface Clip"); } open(format, data, offset, bufferSize); }
/** Creates new SampledSoundPlayer labeled number i, using sampled sound stored as preference for this number. * * @param i an index, used to look up the preferred sound file. * @throws IOException * @throws LineUnavailableException * @throws UnsupportedAudioFileException */ public SampledSoundPlayer(int i)throws IOException, UnsupportedAudioFileException, LineUnavailableException{ drumNumber=i; soundNumber=prefs.getInt(prefsKey(),0); if(soundNumber>=SampledSoundPlayer.getSoundFilePaths().size()) { throw new IOException("There is no sound number "+soundNumber+" available"); } setFile(soundNumber); open(); }
private byte[] record() throws LineUnavailableException { AudioFormat format = AudioUtil.getAudioFormat(audioConf); DataLine.Info info = new DataLine.Info(TargetDataLine.class, format); // Checks if system supports the data line if (!AudioSystem.isLineSupported(info)) { LOGGER.error("Line not supported"); System.exit(0); } microphone = (TargetDataLine) AudioSystem.getLine(info); microphone.open(format); microphone.start(); LOGGER.info("Listening, tap enter to stop ..."); ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); int numBytesRead; byte[] data = new byte[microphone.getBufferSize() / 5]; // Begin audio capture. microphone.start(); // Here, stopped is a global boolean set by another thread. while (!stopped) { // Read the next chunk of data from the TargetDataLine. numBytesRead = microphone.read(data, 0, data.length); // Save this chunk of data. byteArrayOutputStream.write(data, 0, numBytesRead); } return byteArrayOutputStream.toByteArray(); }
public void swnat() throws UnsupportedAudioFileException, IOException, LineUnavailableException { try { audioClip.open(audioStream); audioClip.start(); } catch (MalformedURLException murle) { System.out.println(murle); } }
public void Phazing() throws UnsupportedAudioFileException, IOException, LineUnavailableException { try { Phazer.open(Paser); Phazer.start(); } catch (MalformedURLException murle) { System.out.println(murle); } }
@Override void implOpen(AudioFormat format, int bufferSize) throws LineUnavailableException { // only if audioData wasn't set in a calling open(format, byte[], frameSize) // this call is allowed. if (audioData == null) { throw new IllegalArgumentException("illegal call to open() in interface Clip"); } super.implOpen(format, bufferSize); }
public void dbo() throws UnsupportedAudioFileException, IOException, LineUnavailableException { try { audioClip.close(); audioStream.close(); aC.open(aS); aC.start(); } catch (MalformedURLException murle) { System.out.println(murle); } }
public void dbol() throws UnsupportedAudioFileException, IOException, LineUnavailableException { try { audioClip.close(); audioStream.close(); aC.open(aS); aC.start(); aC.loop(10); } catch (MalformedURLException murle) { System.out.println(murle); } }
/** * Create AudioInputStream and AudioFileFormat from the data source. * * @throws StreamPlayerException * the stream player exception */ private void initAudioInputStream() throws StreamPlayerException { try { logger.info("Entered initAudioInputStream\n"); //Reset reset(); //Notify Status status = Status.OPENING; generateEvent(Status.OPENING, getEncodedStreamPosition(), dataSource); // Audio resources from file||URL||inputStream. initAudioInputStreamPart2(); // Create the Line createLine(); // Determine Properties determineProperties(); // System out all properties // System.out.println(properties.size()) // properties.keySet().forEach(key -> { // System.out.println(key + ":" + properties.get(key)); // }) status = Status.OPENED; generateEvent(Status.OPENED, getEncodedStreamPosition(), null); } catch (LineUnavailableException | UnsupportedAudioFileException | IOException ¢) { logger.log(Level.INFO, ¢.getMessage(), ¢); throw new StreamPlayerException(¢); } logger.info("Exited initAudioInputStream\n"); }
public void CCCP() throws UnsupportedAudioFileException, IOException, LineUnavailableException { try { aC.stop(); Lenin.open(Marx); Lenin.start(); } catch (MalformedURLException murle) { System.out.println(murle); } }
public static void main(String[] args) throws InterruptedException { try { test(); } catch (LineUnavailableException | IllegalArgumentException | IllegalStateException ignored) { System.out.println("Test is not applicable. Automatically passed"); return; } if (countErrors > 0) { throw new RuntimeException( "Test FAILED: " + countErrors + " error detected (total " + LOOP_COUNT + ")"); } }