/** * Attack the player */ public void attack() { for (Entity e : getLevel().getEntities()){ if (e.isActive() && e instanceof Player) if (!getLevel().isThroughWall(getLocation(), e.getLocation()) && getLocation().dist(((Player) e).getLocation()) < 4) { Player p = (Player) e; p.takeDamage(80); } try { SoundStuff cam = new SoundStuff(); cam.AWP(); } catch (UnsupportedAudioFileException | IOException | LineUnavailableException e1) { e1.printStackTrace(); } } }
/** * @param args the command line arguments * @throws java.io.IOException */ public static void main(String[] args) throws IOException, InterruptedException, PropertyVetoException, UnsupportedAudioFileException, LineUnavailableException { File file = new File("CADASTRADOS.txt"); if(!file.exists()){ file.createNewFile();// TODO code application logic here } File dir = new File("CadastroRemedios"); if(!dir.exists()){ dir.mkdir(); } LoginInterface inFrame = new LoginInterface(); CadastroInterface inCadas = new CadastroInterface(); TelaInicial tl = new TelaInicial(inFrame); tl.setVisible(true); System.out.println("Permitir Alteração"); inFrame.setVisible(true); }
@Override public AudioInputStream getAudioInputStream(final InputStream stream) throws UnsupportedAudioFileException, IOException { stream.mark(200); // The biggest value which was historically used try { final StandardFileFormat format = getAudioFileFormatImpl(stream); // we've got everything, the stream is supported and it is at the // beginning of the audio data, so return an AudioInputStream return new AudioInputStream(stream, format.getFormat(), format.getLongFrameLength()); } catch (UnsupportedAudioFileException | EOFException ignored) { // stream is unsupported or the header is less than was expected stream.reset(); throw new UnsupportedAudioFileException(); } }
public AudioInputStream getAudioInputStream(InputStream stream) throws UnsupportedAudioFileException, IOException { AudioFileFormat format = getAudioFileFormat(stream); RIFFReader riffiterator = new RIFFReader(stream); if (!riffiterator.getFormat().equals("RIFF")) throw new UnsupportedAudioFileException(); if (!riffiterator.getType().equals("WAVE")) throw new UnsupportedAudioFileException(); while (riffiterator.hasNextChunk()) { RIFFReader chunk = riffiterator.nextChunk(); if (chunk.getFormat().equals("data")) { return new AudioInputStream(chunk, format.getFormat(), chunk.getSize()); } } throw new UnsupportedAudioFileException(); }
public AudioInputStream getAudioInputStream(InputStream stream) throws UnsupportedAudioFileException, IOException { AudioFileFormat format = getAudioFileFormat(stream); RIFFReader riffiterator = new RIFFReader(stream); if (!riffiterator.getFormat().equals("RIFF")) throw new UnsupportedAudioFileException(); if (!riffiterator.getType().equals("WAVE")) throw new UnsupportedAudioFileException(); while (riffiterator.hasNextChunk()) { RIFFReader chunk = riffiterator.nextChunk(); if (chunk.getFormat().equals("data")) { return new AudioInputStream(chunk, format.getFormat(), chunk .getSize()); } } throw new UnsupportedAudioFileException(); }
private void extractFromInputStream() throws IOException, PlatformAccessError, UnsupportedAudioFileException { if (inputType == InputType.WAV) { AudioInput wavFile = new AudioInput(inputStream, defaultChannelName); sampleSource = wavFile; configureSampleStreamInput(); } else if (inputType == InputType.PULSES) { pulseSource = new PulseSourceFromInputStream(inputStream); connector = new DummySampleSource(); } else if (inputType == InputType.COMMODORE_TAP) { intervalSource = new IntervalSourceFromTAPInputStream(inputStream); connector = new DummySampleSource(); } linkSourceToPlatforms(); runThroughSource(); }
public Soundbank getSoundbank(File file) throws InvalidMidiDataException, IOException { try { AudioInputStream ais = AudioSystem.getAudioInputStream(file); ais.close(); ModelByteBufferWavetable osc = new ModelByteBufferWavetable( new ModelByteBuffer(file, 0, file.length()), -4800); ModelPerformer performer = new ModelPerformer(); performer.getOscillators().add(osc); SimpleSoundbank sbk = new SimpleSoundbank(); SimpleInstrument ins = new SimpleInstrument(); ins.add(performer); sbk.addInstrument(ins); return sbk; } catch (UnsupportedAudioFileException e1) { return null; } catch (IOException e) { return null; } }
private void processMonitorQuery(float[] audioData,QueryResultHandler handler, double timeStamp,Set<Integer> avoid){ int samplerate = Config.getInt(Key.RAFS_SAMPLE_RATE); int size = Config.getInt(Key.RAFS_FFT_SIZE); int overlap = size - Config.getInt(Key.RAFS_FFT_STEP_SIZE); AudioDispatcher d; try { d = AudioDispatcherFactory.fromFloatArray(audioData, samplerate, size, overlap); d.setZeroPadFirstBuffer(true); final RafsExtractor processor = new RafsExtractor(null,true); d.addAudioProcessor(processor); d.run(); queryForMonitor(processor.fingerprints, processor.fingerprintProbabilities, 10 , avoid, handler); } catch (UnsupportedAudioFileException e) { LOG.severe("Unsupported audio"); } }
private void processMonitorQueryToSerializeFingerprints(float[] audioBuffer,SerializedFingerprintsHandler handler,double queryOffset){ int samplerate = Config.getInt(Key.NFFT_SAMPLE_RATE); int size = Config.getInt(Key.NFFT_SIZE); int overlap = size - Config.getInt(Key.NFFT_STEP_SIZE); AudioDispatcher d; try { d = AudioDispatcherFactory.fromFloatArray(audioBuffer, samplerate, size, overlap); final NFFTEventPointProcessor minMaxProcessor = new NFFTEventPointProcessor(size,overlap,samplerate); d.addAudioProcessor(minMaxProcessor); d.run(); double queryDuration = d.secondsProcessed(); List<NFFTFingerprint> fingerprints = new ArrayList<NFFTFingerprint>(minMaxProcessor.getFingerprints()); handler.handleSerializedFingerprints(PanakoWebserviceClient.serializeFingerprintsToJson(fingerprints),queryDuration,queryOffset); } catch (UnsupportedAudioFileException e) { LOG.severe("Unsupported audio"); } }
/** * Picks a player to attack */ public void attack() { Player weakest = null; for (Entity e : getLevel().getEntities()) if (e.isActive() && e instanceof Player) if (!getLevel().isThroughWall(getLocation(), e.getLocation())) { Player p = (Player) e; if (weakest == null || p.getHealth() < weakest.getHealth()) weakest = p; } weakest.takeDamage(75); try { SoundStuff cam = new SoundStuff(); cam.AWP(); } catch (UnsupportedAudioFileException | IOException | LineUnavailableException e1) { e1.printStackTrace(); } }
@Override public AudioInputStream getAudioInputStream(final InputStream stream) throws UnsupportedAudioFileException, IOException { final StandardFileFormat format = getAudioFileFormat(stream); final AudioFormat af = format.getFormat(); final long length = format.getLongFrameLength(); // we've got everything, the stream is supported and it is at the // beginning of the header, so find the data chunk again and return an // AudioInputStream final RIFFReader riffiterator = new RIFFReader(stream); while (riffiterator.hasNextChunk()) { RIFFReader chunk = riffiterator.nextChunk(); if (chunk.getFormat().equals("data")) { return new AudioInputStream(chunk, af, length); } } throw new UnsupportedAudioFileException(); }
/** * Obtains an audio stream from the File provided. The File must * point to valid audio file data. * @param file the File for which the <code>AudioInputStream</code> should be * constructed * @return an <code>AudioInputStream</code> object based on the audio file data pointed * to by the File * @throws UnsupportedAudioFileException if the File does not point to valid audio * file data recognized by the system * @throws IOException if an I/O exception occurs */ public AudioInputStream getAudioInputStream(File file) throws UnsupportedAudioFileException, IOException { FileInputStream fis = new FileInputStream(file); // throws IOException AudioFileFormat fileFormat = null; // part of fix for 4325421 try { fileFormat = getCOMM(fis, false); } finally { if (fileFormat == null) { fis.close(); } } return new AudioInputStream(fis, fileFormat.getFormat(), fileFormat.getFrameLength()); }
/** * Verifies the frame length after the stream was saved/read to/from file. */ private static void testAfterSaveToFile(final AudioFileWriter afw, final AudioFileFormat.Type type, AudioInputStream ais) throws IOException { final File temp = File.createTempFile("sound", ".tmp"); try { afw.write(ais, type, temp); ais = AudioSystem.getAudioInputStream(temp); final long frameLength = ais.getFrameLength(); ais.close(); validate(frameLength); } catch (IllegalArgumentException | UnsupportedAudioFileException ignored) { } finally { Files.delete(Paths.get(temp.getAbsolutePath())); } }
/** * Plays an audio file (in .wav, .mid, or .au format) in a background * thread. * * @param filename * the name of the audio file * @throws IllegalArgumentException * if unable to play {@code filename} * @throws IllegalArgumentException * if {@code filename} is {@code null} */ public static synchronized void play(final String filename) { if (filename == null) throw new IllegalArgumentException(); InputStream is = StdAudio.class.getResourceAsStream(filename); if (is == null) { throw new IllegalArgumentException("could not read '" + filename + "'"); } // code adapted from: // http://stackoverflow.com/questions/26305/how-can-i-play-sound-in-java try { // check if file format is supported // (if not, will throw an UnsupportedAudioFileException) @SuppressWarnings("unused") AudioInputStream ais = AudioSystem.getAudioInputStream(is); new Thread(new Runnable() { @Override public void run() { stream(filename); } }).start(); } // let's try Applet.newAudioClip() instead catch (UnsupportedAudioFileException e) { playApplet(filename); return; } // something else went wrong catch (IOException ioe) { throw new IllegalArgumentException("could not play '" + filename + "'", ioe); } }
public void playAudio(int tempo, boolean flag) throws UnsupportedAudioFileException, LineUnavailableException, IOException, InterruptedException{ Clip clip = AudioSystem.getClip(); URL url = getClass().getResource("/audio/smb_die.wav"); URL urlToHot = this.getClass().getResource("/audio/smb_die.wav"); System.out.println(urlToHot); this.audio = Applet.newAudioClip(url); if(flag) audio.loop(); else audio.stop(); }
public PacienteInterface(String nome) throws FileNotFoundException, IOException, UnsupportedAudioFileException, LineUnavailableException, InterruptedException{ initComponents(); nomeUser = nome; jLabel2NP.setText(nome); setVisible(true); initXYK(); updateRemedy(false); compareHour(); }
@Override public final AudioInputStream getAudioInputStream(final File file) throws UnsupportedAudioFileException, IOException { final InputStream fileStream = new FileInputStream(file); try { return getAudioInputStream(new BufferedInputStream(fileStream)); } catch (final Throwable e) { closeSilently(fileStream); throw e; } }
/** * Returns AudioFileFormat from URL. */ public AudioFileFormat getAudioFileFormat(URL url) throws UnsupportedAudioFileException, IOException { if (TDebug.TraceAudioFileReader) { TDebug.out("MpegAudioFileReader.getAudioFileFormat(URL): begin"); } long lFileLengthInBytes = AudioSystem.NOT_SPECIFIED; URLConnection conn = url.openConnection(); // Tell shoucast server (if any) that SPI support shoutcast stream. conn.setRequestProperty("Icy-Metadata", "1"); InputStream inputStream = conn.getInputStream(); AudioFileFormat audioFileFormat = null; try { audioFileFormat = getAudioFileFormat(inputStream, lFileLengthInBytes); } finally { inputStream.close(); } if (TDebug.TraceAudioFileReader) { TDebug.out("MpegAudioFileReader.getAudioFileFormat(URL): end"); } return audioFileFormat; }
private void initialise() throws IOException, UnsupportedAudioFileException { ensureMarkResetIsSupported(); currentTimeIndex = 0.0; audioInputStream = AudioSystem.getAudioInputStream(inputStream); convertAudioInputStreamToPcmSigned(); sampleLengthInSeconds = (double)(1.0f / audioInputStream.getFormat().getSampleRate()); bytesPerFrame = audioInputStream.getFormat().getFrameSize(); if (bytesPerFrame == AudioSystem.NOT_SPECIFIED) bytesPerFrame = 1; audioBytes = new byte[bytesPerFrame]; recordFileDataInLoggingOutput(); }
private void runWithArguments(String [] args) throws IOException, PlatformAccessError, UnsupportedAudioFileException { parseArguments(args); if (error) System.exit(10); if (needToDisplayHelp) { displayHelp(); return; } configureLogOutput(); preparePlatformList(); preparePlatformByteScrapingOutputFiles(); printIntroductionText(); initialiseChosenPlatforms(); configurePlatforms(); linkDestinationToPlatforms(); counter = new TimeCounter(); configureProgessIndicator(); switch (inputSource) { case LINE: processLineInput(); break; default: case STREAM: processStreamInput(); break; } printFinalOutput(); }
private void processStreamInput() throws UnsupportedAudioFileException, PlatformAccessError, IOException { int fileCount = 0; int numberOfFiles = inputFilenames.size(); if (numberOfFiles == 0) numberOfFiles = 1; List <InputStreamSource> streamList = getDataStreamList(); for (InputStreamSource source : streamList) { fileCount++; updateProgressPercent(fileCount * 100 / numberOfFiles, source.getName()); inputStream = source.getStream(); extractFromInputStream(); inputStream.close(); } }
private void setConfiguration ( final AlarmNotifierConfiguration cfg ) throws UnsupportedAudioFileException, IOException, LineUnavailableException { this.connectionId = cfg.getConnectionId (); this.prefix = cfg.getPrefix (); this.soundFile = cfg.getSoundFile (); this.ackAlarmsAvailableCommand = cfg.getAckAlarmsAvailableCommand (); this.alarmsAvailableCommand = cfg.getAlarmsAvailableCommand (); initConnection (); }
private void processMonitorQuery(float[] audioBuffer,int maxNumberOfResults, QueryResultHandler handler,double queryOffset,Set<Integer> avoid){ int samplerate = Config.getInt(Key.NFFT_SAMPLE_RATE); int size = Config.getInt(Key.NFFT_SIZE); int overlap = size - Config.getInt(Key.NFFT_STEP_SIZE); AudioDispatcher d; try { d = AudioDispatcherFactory.fromFloatArray(audioBuffer, samplerate, size, overlap); final NFFTEventPointProcessor minMaxProcessor = new NFFTEventPointProcessor(size,overlap,samplerate); d.addAudioProcessor(minMaxProcessor); d.run(); List<NFFTFingerprint> fingerprints = new ArrayList<NFFTFingerprint>(minMaxProcessor.getFingerprints()); final List<NFFTFingerprintQueryMatch> queryMatches = new ArrayList<NFFTFingerprintQueryMatch>(); queryMatches.addAll(storage.getMatches(fingerprints, maxNumberOfResults)); double queryDuration = d.secondsProcessed(); if(queryMatches.isEmpty()){ QueryResult result = QueryResult.emptyQueryResult(queryOffset,queryOffset+queryDuration); handler.handleEmptyResult(result); }else{ for(NFFTFingerprintQueryMatch match : queryMatches){ //avoid the results in the avoid hash set if(!avoid.contains(match.identifier)){ String description = storage.getAudioDescription(match.identifier); handler.handleQueryResult(new QueryResult(queryOffset,queryOffset+queryDuration,String.valueOf(match.identifier), description, match.score, match.getStartTime(),100.0,100.0)); } } } } catch (UnsupportedAudioFileException e) { LOG.severe("Unsupported audio"); } }
public final void sendAudio(InputStream wavStream) { AudioInputStream pcmStream; try { pcmStream = adjustAudioEncoding(wavStream); } catch (UnsupportedAudioFileException | IOException ex) { log.error("Problem adjusting audio", ex); return; } send16khzMonoPcmAudio(pcmStream); }
private static AudioInputStream adjustAudioEncoding(InputStream sourceWavStream) throws UnsupportedAudioFileException, IOException { AudioInputStream audioPcm = getAudioInputStream(sourceWavStream); AudioInputStream audio16khz = to16khz(audioPcm); AudioInputStream audio16khzMono = toMono(audio16khz); AudioInputStream audio16khzMonoPcm = toPcm(audio16khzMono); skipRiffHeader(audio16khzMonoPcm); return audio16khzMonoPcm; }
public Sound(String file){ try { ais = AudioSystem.getAudioInputStream(new File(file)); clip = AudioSystem.getClip(); clip.open(ais); } catch (LineUnavailableException | IOException | UnsupportedAudioFileException e) { e.printStackTrace(); } }
/** Creates new SlotcarSoundEffects labeled number i, using sampled sound stored as preference for this number. * * @param soundNumber an index, used to look up the preferred sound file. * @throws IOException * @throws LineUnavailableException * @throws UnsupportedAudioFileException */ public SlotCarSoundSample(String filename) throws IOException, UnsupportedAudioFileException, LineUnavailableException { // if (soundNumber >= SlotCarSoundSample.getSoundFilePaths().size()) { // throw new IOException("There is no sound number " + soundNumber + " available"); // } setFile(filename); open(); }
/** Creates new SampledSoundPlayer labeled number i, using sampled sound stored as preference for this number. * * @param i an index, used to look up the preferred sound file. * @throws IOException * @throws LineUnavailableException * @throws UnsupportedAudioFileException */ public SampledSoundPlayer(int i)throws IOException, UnsupportedAudioFileException, LineUnavailableException{ drumNumber=i; soundNumber=prefs.getInt(prefsKey(),0); if(soundNumber>=SampledSoundPlayer.getSoundFilePaths().size()) { throw new IOException("There is no sound number "+soundNumber+" available"); } setFile(soundNumber); open(); }
public AudioFileFormat getAudioFileFormat(InputStream stream) throws UnsupportedAudioFileException, IOException { stream.mark(200); AudioFileFormat format; try { format = internal_getAudioFileFormat(stream); } finally { stream.reset(); } return format; }
public void swnat() throws UnsupportedAudioFileException, IOException, LineUnavailableException { try { audioClip.open(audioStream); audioClip.start(); } catch (MalformedURLException murle) { System.out.println(murle); } }
/** * Obtains the audio file format of the File provided. The File must * point to valid audio file data. * @param file the File from which file format information should be * extracted * @return an <code>AudioFileFormat</code> object describing the audio file format * @throws UnsupportedAudioFileException if the File does not point to valid audio * file data recognized by the system * @throws IOException if an I/O exception occurs */ public AudioFileFormat getAudioFileFormat(File file) throws UnsupportedAudioFileException, IOException { AudioFileFormat fileFormat = null; FileInputStream fis = new FileInputStream(file); // throws IOException // part of fix for 4325421 try { fileFormat = getCOMM(fis, false); } finally { fis.close(); } return fileFormat; }
@Override public final AudioInputStream getAudioInputStream(final URL url) throws UnsupportedAudioFileException, IOException { final InputStream urlStream = url.openStream(); try { return getAudioInputStream(new BufferedInputStream(urlStream)); } catch (final Throwable e) { closeSilently(urlStream); throw e; } }
public void dbo() throws UnsupportedAudioFileException, IOException, LineUnavailableException { try { audioClip.close(); audioStream.close(); aC.open(aS); aC.start(); } catch (MalformedURLException murle) { System.out.println(murle); } }
public void dbol() throws UnsupportedAudioFileException, IOException, LineUnavailableException { try { audioClip.close(); audioStream.close(); aC.open(aS); aC.start(); aC.loop(10); } catch (MalformedURLException murle) { System.out.println(murle); } }
public void flame() throws UnsupportedAudioFileException, IOException, LineUnavailableException { try { insane.open(flames); insane.start(); } catch (MalformedURLException murle) { System.out.println(murle); } }
/** * Obtains an audio stream from the File provided. The File must * point to valid audio file data. * @param file the File for which the <code>AudioInputStream</code> should be * constructed * @return an <code>AudioInputStream</code> object based on the audio file data pointed * to by the File * @throws UnsupportedAudioFileException if the File does not point to valid audio * file data recognized by the system * @throws IOException if an I/O exception occurs */ public AudioInputStream getAudioInputStream(File file) throws UnsupportedAudioFileException, IOException { FileInputStream fis = new FileInputStream(file); // throws IOException AudioFileFormat fileFormat = null; // part of fix for 4325421 try { fileFormat = getFMT(fis, false); } finally { if (fileFormat == null) { fis.close(); } } return new AudioInputStream(fis, fileFormat.getFormat(), fileFormat.getFrameLength()); }
public void AWP() throws UnsupportedAudioFileException, IOException, LineUnavailableException { try { snope.open(csgo); snope.start(); } catch (MalformedURLException murle) { System.out.println(murle); } }
/** * Attack the enemy from a distance */ @Override @UIEventHandle(value = "Key_P", turn = "Player") public void attack(Entity target) { if (target == null) { return; } if (!(target instanceof Damageable)) { return; } if (!target.isActive()) { return; } if (getLevel().isThroughWall(getLocation(), target.getLocation())) { return; } final int d = getLocation().dist(target.getLocation()); if (d > 15) { return; } if (shots_taken + 1 > total_shots || move + 2 > speed) { getLevel().getUIInterface().selectTile(null); return; } move += 2; shots_taken += 1; try { SoundStuff cam = new SoundStuff(); cam.AWP(); } catch (IOException | LineUnavailableException | UnsupportedAudioFileException e) { e.printStackTrace(); } ((Damageable) target).takeDamage(40); getLevel().getUIInterface().startAnimation(new CompoundAnimation.Sequential(new Laser(getLocation(), target.getLocation()), new TileDamage(target.getLocation()))); if (move >= speed && shots_taken >= total_shots) getLevel().getUIInterface().selectTile(null); }
public JavaSoundAudioClip(InputStream in) throws IOException { if (DEBUG || Printer.debug)Printer.debug("JavaSoundAudioClip.<init>"); BufferedInputStream bis = new BufferedInputStream(in, STREAM_BUFFER_SIZE); bis.mark(STREAM_BUFFER_SIZE); boolean success = false; try { AudioInputStream as = AudioSystem.getAudioInputStream(bis); // load the stream data into memory success = loadAudioData(as); if (success) { success = false; if (loadedAudioByteLength < CLIP_THRESHOLD) { success = createClip(); } if (!success) { success = createSourceDataLine(); } } } catch (UnsupportedAudioFileException e) { // not an audio file try { MidiFileFormat mff = MidiSystem.getMidiFileFormat(bis); success = createSequencer(bis); } catch (InvalidMidiDataException e1) { success = false; } } if (!success) { throw new IOException("Unable to create AudioClip from input stream"); } }