public AudioRecord findAudioRecord() { for (int rate : mSampleRates) { for (short audioFormat : new short[] { AudioFormat.ENCODING_PCM_16BIT }) { for (short channelConfig : new short[] { AudioFormat.CHANNEL_IN_MONO }) { try { Log.d("C.TAG", "Attempting rate " + rate + "Hz, bits: " + audioFormat + ", channel: " + channelConfig); int bufferSize = AudioRecord.getMinBufferSize(rate, AudioFormat.CHANNEL_IN_MONO , AudioFormat.ENCODING_PCM_16BIT); if (bufferSize != AudioRecord.ERROR_BAD_VALUE) { // check if we can instantiate and have a success AudioRecord recorder = new AudioRecord(AudioSource.MIC, DEFAULT_RATE, channelConfig, audioFormat, bufferSize); if (recorder.getState() == AudioRecord.STATE_INITIALIZED) return recorder; } } catch (Exception e) { Log.e("C.TAG", rate + "Exception, keep trying.",e); } } } } return null; }
@SuppressWarnings("deprecation") public static ExtAudioRecorder getInstanse(Boolean recordingCompressed) { ExtAudioRecorder result = null; if (recordingCompressed) { result = new ExtAudioRecorder(false, AudioSource.MIC, sampleRates[3], AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT); } else { int i = 0; do { result = new ExtAudioRecorder(true, AudioSource.MIC, sampleRates[3], AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT); } while ((++i < sampleRates.length) & !(result.getState() == ExtAudioRecorder.State.INITIALIZING)); } return result; }
/** * * * Resets the recorder to the INITIALIZING state, as if it was just created. * In case the class was in RECORDING state, the recording is stopped. In * case of exceptions the class is set to the ERROR state. * */ public void reset() { try { if (state != State.ERROR) { release(); filePath = null; // Reset file path cAmplitude = 0; // Reset amplitude if (rUncompressed) { audioRecorder = new AudioRecord(aSource, sRate, nChannels + 1, aFormat, bufferSize); } else { mediaRecorder = new MediaRecorder(); mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC); mediaRecorder .setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP); mediaRecorder .setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB); } state = State.INITIALIZING; } } catch (Exception e) { Log.e(ExtAudioRecorder.class.getName(), e.getMessage()); state = State.ERROR; } }
public AudioRecorder(LoudnessSensor sensor) { this.mSensor = sensor; int channel = AudioFormat.CHANNEL_IN_MONO; int mic = AudioSource.MIC; // Berechne den Puffer int minAudioBuffer = AudioRecord.getMinBufferSize( COMMON_AUDIO_FREQUENCY, channel, AudioFormat.ENCODING_PCM_16BIT); int audioBuffer = minAudioBuffer * 6; // Erstelle den Recorder audioInput = new AudioRecord( mic, COMMON_AUDIO_FREQUENCY, channel, AudioFormat.ENCODING_PCM_16BIT, audioBuffer); }
public static ExtAudioRecorder getInstance(Boolean recordingCompressed, VoiceCallback callback) { if (recordingCompressed) { result = new ExtAudioRecorder(false, AudioSource.MIC, sampleRates[3], AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, callback); } else { int i = 3; do { result = new ExtAudioRecorder(true, AudioSource.MIC, sampleRates[i], AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, callback); } while ((--i >= 0) && !(result.getState() == ExtAudioRecorder.State.INITIALIZING)); } return result; }
/** * * * Resets the recorder to the INITIALIZING state, as if it was just created. * In case the class was in RECORDING state, the recording is stopped. In * case of exceptions the class is set to the ERROR state. * */ public void reset() { try { if (state != State.ERROR) { release(); filePath = null; // Reset file path cAmplitude = 0; // Reset amplitude if (rUncompressed) { audioRecorder = new AudioRecord(aSource, sRate, nChannels + 1, aFormat, bufferSize); } else { mediaRecorder = new MediaRecorder(); mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC); mediaRecorder .setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP); mediaRecorder .setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB); } state = State.INITIALIZING; } } catch (Exception e) { Log4jLog.e(LONG_TAG, e.getMessage()); state = State.ERROR; } }
public void startRecordingPCM() throws IOException { String methodTAG = "startRecordingPCM"; //Create record object recWAV = new RecordingWAV(AudioSource.MIC, sampleFreq, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); log.logD(TAG,"recWAV.State after constructor is: " + recWAV.getState()); if (recWAV.state == RecordingWAV.State.ERROR) { log.logD(methodTAG, "recWAV.State after constructor is ERROR, thus shutting down. Writing a log."); log.logCriticalError(TAG, methodTAG, "recWAV.State after constructor is ERROR, thus shutting down."); } recWAV.setOutputFile(fPath); log.logI(TAG,"recWAV.State after setOutputFile() is: " + recWAV.getState()); recWAV.prepare(); log.logI(TAG,"recWAV.State after prepare() is: " + recWAV.getState()); tPromptString.setTextColor(getResources().getColor(R.color.hltGreen)); recWAV.start(); log.logI(TAG,"recWAV.State after start() is: " + recWAV.getState()); }
@SuppressWarnings("deprecation") public static ExtAudioRecorder getInstanse(Boolean recordingCompressed) { ExtAudioRecorder result = null; if (recordingCompressed) { result = new ExtAudioRecorder(false, AudioSource.MIC, sampleRates[3], AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT); } else { int i = 2; do { result = new ExtAudioRecorder(true, AudioSource.MIC, sampleRates[i], AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT); } while ((++i < sampleRates.length) & !(result.getState() == ExtAudioRecorder.State.INITIALIZING)); } return result; }
/** * Resets the recorder to the INITIALIZING state, as if it was just created. * In case the class was in RECORDING state, the recording is stopped. * In case of exceptions the class is set to the ERROR state. */ public void reset() { try { if (state != State.ERROR) { release(); filePath = null; // Reset file path cAmplitude = 0; // Reset amplitude if (rUncompressed) { audioRecorder = new AudioRecord(aSource, sRate, nChannels + 1, aFormat, bufferSize); } else { mediaRecorder = new MediaRecorder(); mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC); mediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP); mediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB); } state = State.INITIALIZING; } } catch (Exception e) { Log.e(ExtAudioRecorder.class.getName(), e.getMessage()); state = State.ERROR; } }
@Override public void run() { int minBufferSize = AudioRecord.getMinBufferSize(sampleRate, CHANNEL, ENCODING); AudioRecord recorder = new AudioRecord(AudioSource.MIC, sampleRate, CHANNEL, ENCODING, minBufferSize); recorder.startRecording(); PcmAudioRecordReader in = new PcmAudioRecordReader(recorder); PcmDftFilter dft = new PcmDftFilter(sampleRate, 12000, 22000, 100); data = dft.getData(); PcmFilterReader fin = new PcmFilterReader(in, dft); try { while (!stopped) { double read = fin.read(); } } catch (IOException e) { e.printStackTrace(); } finally { recorder.stop(); recorder.release(); } }
@SuppressWarnings("deprecation") public static AudioRecorder getInstanse(Boolean recordingCompressed) { AudioRecorder result = null; if (recordingCompressed) { result = new AudioRecorder( false, AudioSource.MIC, sampleRates[2], AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT); } // wav format else { int i = 0; do { result = new AudioRecorder( true, AudioSource.MIC, sampleRates[i], AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT); } while((++i<sampleRates.length) & !(result.getState() == AudioRecorder.State.INITIALIZING)); } return result; }
/** * Resets the recorder to the INITIALIZING state, as if it was just created. * In case the class was in RECORDING state, the recording is stopped. * In case of exceptions the class is set to the ERROR state. */ public void reset() { try { if (state != State.ERROR) { release(); filePath = null; // Reset file path cAmplitude = 0; // Reset amplitude if (rUncompressed) { audioRecorder = new AudioRecord(aSource, sRate, nChannels+1, aFormat, bufferSize); } else { mediaRecorder = new MediaRecorder(); mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC); mediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP); mediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB); } state = State.INITIALIZING; } } catch (Exception e) { Log.e(AudioRecorder.class.getName(), e.getMessage()); state = State.ERROR; } }
public static ExtAudioRecorder getInstance(WAVRecorder handler, String id, int sampleRate, int channels, int encoding) { ExtAudioRecorder result = null; int[] processedSampleRates = sampleRates; if (0 != sampleRate) { processedSampleRates = new int[1]; processedSampleRates[0] = sampleRate; } int i=0; do { result = new ExtAudioRecorder( handler, id, AudioSource.MIC, processedSampleRates[i], channels, encoding); } while((++i<processedSampleRates.length) & !(result.getState() == ExtAudioRecorder.State.INITIALIZING)); return result; }
@Override public void run() { android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_AUDIO); AudioRecord record = new AudioRecord(AudioSource.MIC, SAMPLING_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, mBufferSize); record.startRecording(); while (shouldContinue()) { record.read(mAudioBuffer, 0, mBufferSize / 2); mWaveformView.updateAudioData(mAudioBuffer); updateDecibelLevel(); } record.stop(); record.release(); }
private byte[] fillBuffer(byte[] audioData, int bufferSize) { AudioRecord recorder = new AudioRecord(AudioSource.MIC, 8000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize); // instantiate the // AudioRecorder if (recorder.getRecordingState() == android.media.AudioRecord.RECORDSTATE_STOPPED) recorder.startRecording(); // check to see if the Recorder // has stopped or is not // recording, and make it // record. recorder.read(audioData, 0, bufferSize); // read the PCM // audio data // into the // audioData // array if (recorder.getState() == android.media.AudioRecord.RECORDSTATE_RECORDING) recorder.stop(); // stop the recorder return audioData; }
public AudioRecord findAudioRecord() { try { int bufferSize = AudioRecord .getMinBufferSize(sampleRate, channelConfiguration, AudioFormat.ENCODING_PCM_16BIT); if (bufferSize != AudioRecord.ERROR_BAD_VALUE) { // check if we can instantiate and have a success AudioRecord recorder = new AudioRecord(AudioSource.MIC, sampleRate, channelConfiguration, AudioFormat.ENCODING_PCM_16BIT, bufferSize); Log.d("tag", "done1"); if (recorder.getState() == AudioRecord.STATE_INITIALIZED) Log.d("tag", "done1.6"); return recorder; } } catch (Exception e) { Log.d("tag", "done2"); } return null; }
public static String getDefaultMicroSource() { // Except for galaxy S II :( if (!isCompatible(11) && Build.DEVICE.toUpperCase().startsWith("GT-I9100")) { return Integer.toString(AudioSource.MIC); } if (isCompatible(10)) { // Note that in APIs this is only available from level 11. // VOICE_COMMUNICATION return Integer.toString(0x7); } /* * Too risky in terms of regressions else if (isCompatible(4)) { // * VOICE_CALL return 0x4; } */ /* * if(android.os.Build.MODEL.equalsIgnoreCase("X10i")) { // VOICE_CALL * return Integer.toString(0x4); } */ /* * Not relevant anymore, atrix I tested sounds fine with that * if(android.os.Build.DEVICE.equalsIgnoreCase("olympus")) { //Motorola * atrix bug // CAMCORDER return Integer.toString(0x5); } */ return Integer.toString(AudioSource.DEFAULT); }
private void createAudioRecord() throws InitializationException { // The AudioRecord configurations parameters used here, are guaranteed // to be supported on all devices. // AudioFormat.CHANNEL_IN_MONO should be used in place of deprecated // AudioFormat.CHANNEL_CONFIGURATION_MONO, but it is not available for // API level 3. // Unlike AudioTrack buffer, AudioRecord buffer could be larger than // minimum without causing any problems. But minimum works well. final int audioRecordBufferSizeInBytes = AudioRecord.getMinBufferSize( SpeechTrainerConfig.SAMPLE_RATE_HZ, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT); if (audioRecordBufferSizeInBytes <= 0) { throw new InitializationException("Failed to initialize recording."); } // CHANNEL_IN_MONO is guaranteed to work on all devices. // ENCODING_PCM_16BIT is guaranteed to work on all devices. audioRecord = new AudioRecord(AudioSource.MIC, SpeechTrainerConfig.SAMPLE_RATE_HZ, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, audioRecordBufferSizeInBytes); if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED) { audioRecord = null; throw new InitializationException("Failed to initialize recording."); } }
@TargetApi(23) private AudioRecord createAudioRecordOnMarshmallowOrHigher( int sampleRateInHz, int channelConfig, int bufferSizeInBytes) { Logging.d(TAG, "createAudioRecordOnMarshmallowOrHigher"); return new AudioRecord.Builder() .setAudioSource(AudioSource.VOICE_COMMUNICATION) .setAudioFormat(new AudioFormat.Builder() .setEncoding(AudioFormat.ENCODING_PCM_16BIT) .setSampleRate(sampleRateInHz) .setChannelMask(channelConfig) .build()) .setBufferSizeInBytes(bufferSizeInBytes) .build(); }
@Override public void run() { FileOutputStream out = prepareWriting(); if (out == null) { return; } AudioRecord record = new AudioRecord(AudioSource.VOICE_RECOGNITION, /*AudioSource.MIC*/ SAMPLING_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize); record.startRecording(); int read = 0; while (isRecording) { read = record.read(audioBuffer, 0, bufferSize); if ((read == AudioRecord.ERROR_INVALID_OPERATION) || (read == AudioRecord.ERROR_BAD_VALUE) || (read <= 0)) { continue; } proceed(); write(out); } record.stop(); record.release(); finishWriting(out); convertRawToWav(); }
/** * Returns {@code true} if speech should be silenced. Does not prevent * haptic or auditory feedback from occurring. The controller will run * utterance completion actions immediately for silenced utterances. * <p> * Silences speech in the following cases: * <ul> * <li>Speech recognition is active and the user is not using a headset * </ul> */ @SuppressWarnings("deprecation") private boolean shouldSilenceSpeech(FeedbackItem item) { if (item == null) { return false; } // Unless otherwise flagged, don't speak during speech recognition. return !item.hasFlag(FeedbackItem.FLAG_DURING_RECO) && AudioSystemCompatUtils.isSourceActive(AudioSource.VOICE_RECOGNITION) && !mAudioManager.isBluetoothA2dpOn() && !mAudioManager.isWiredHeadsetOn(); }
private AudioRecord getAudioRecorder() { for (int rate : sampleRates) { for (short audioFormat : new short[] { AudioFormat.ENCODING_PCM_16BIT, AudioFormat.ENCODING_PCM_8BIT }) { for (short channelConfig : new short[] { AudioFormat.CHANNEL_IN_MONO }) { try { bufferSize = AudioRecord.getMinBufferSize(rate, channelConfig, audioFormat); if (bufferSize > 0) { // check if we can instantiate and have a success AudioRecord recorder = new AudioRecord(AudioSource.MIC, rate, channelConfig, audioFormat, bufferSize); if (recorder.getState() == AudioRecord.STATE_INITIALIZED) { selectedRate = rate; selectedChannel = channelConfig == AudioFormat.CHANNEL_IN_STEREO ? (short)2 : (short)1; selectedBPP = audioFormat == AudioFormat.ENCODING_PCM_16BIT ? (short)16 : (short)8; String format = audioFormat == AudioFormat.ENCODING_PCM_16BIT ? "PCM 16 Bit" : "PCM 8 Bit"; String channels = channelConfig == AudioFormat.CHANNEL_IN_STEREO ? "Stereo" : "Mono"; String diags = "Audio recorded using following settings: Rate: " + String.valueOf(rate) + " " + "Audio Format: " + format + " " + "Channel Config: " + channels; JTApp.logMessage(TAG, JTApp.LOG_SEVERITY_INFO, diags); return recorder; } } } catch (Exception ignored) { } } } } return null; }
public static AudioRecord findAudioRecord() { for (int rate : mSampleRates) { for (short audioFormat : new short[] { AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT }) { for (short channelConfig : new short[] { AudioFormat.CHANNEL_IN_STEREO, AudioFormat.CHANNEL_IN_MONO }) { try { int bufferSize = AudioRecord.getMinBufferSize(rate, channelConfig, audioFormat); Log.d(TAG, "findAudioRecord:Attempting rate " + rate + "Hz, bits: " + audioFormat + ", channel: " + channelConfig + ", bufferSizeInBytes:" + bufferSize); if (bufferSize > 0) { AudioRecord recorder = new AudioRecord( AudioSource.MIC, rate, channelConfig, audioFormat, bufferSize); if (recorder.getState() == AudioRecord.STATE_INITIALIZED) { gBufferSize = bufferSize; return recorder; } recorder.release(); recorder = null; } } catch (Exception e) { Log.e(TAG, rate + "Exception, keep trying.", e); } } } } return null; }
/** * Instantiates a new recorder thread. * * @param context * the context */ public RecorderThread(MoSTApplication context, InputAudio input) { super("MoST InputAudio Recorder Thread"); SharedPreferences sp = context.getSharedPreferences(MoSTApplication.PREF_INPUT, Context.MODE_PRIVATE); int sampleRate = sp.getInt(InputAudio.PREF_KEY_SAMPLE_RATE, InputAudio.PREF_DEFAULT_SAMPLE_RATE); _bufferSize = AudioRecord.getMinBufferSize(sampleRate, CHANNEL_CONFIGURATION, ENCODING) * 8; _recorder = new AudioRecord(AudioSource.MIC, sampleRate, CHANNEL_CONFIGURATION, ENCODING, _bufferSize); _recording = new AtomicBoolean(false); _input = input; }
private int initAudioRecord(int rate) { try { Log.v("===========Attempting rate ", rate + "Hz, bits: " + audioFormat + ", channel: " + channelConfig); bufferSize = AudioRecord.getMinBufferSize(rate, channelConfig, audioFormat); if (bufferSize != AudioRecord.ERROR_BAD_VALUE) { // check if we can instantiate and have a success recorder = new AudioRecord(AudioSource.MIC, rate, channelConfig, audioFormat, bufferSize); if (recorder.getState() == AudioRecord.STATE_INITIALIZED) { Log.v("===========final rate ", rate + "Hz, bits: " + audioFormat + ", channel: " + channelConfig); return rate; } } } catch (Exception e) { Log.v("error", "" + rate); } return -1; }
@Override public void run() { mic = new AudioRecord( AudioSource.VOICE_COMMUNICATION, SampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffer.length * 2); if (mic.getState() != AudioRecord.STATE_INITIALIZED) { Logg.d(TAG, "couldn't initialize microphone"); onException(); return; } mic.startRecording(); while (inPump()) { mic.read(buffer, 0, buffer.length); for (int i = 0; i < buffer.length; i += Codecs.AudioFrameSize) { int outlen = streamer.getCodecs().encodeAudioFrame(buffer, i, packet); if (outlen > 0) { if (streamer.getNetwork().isReady()) { streamer.getNetwork().sendAudioFrame(packet, outlen); } } } } mic.stop(); mic.release(); }
/** * Returns an AssistantManager if all required parameters have been supplied. * * @return An inactive AssistantManager. Call {@link EmbeddedAssistant#connect()} to start * it. */ public EmbeddedAssistant build() { if (mEmbeddedAssistant.mRequestCallback == null) { throw new NullPointerException("There must be a defined RequestCallback"); } if (mEmbeddedAssistant.mConversationCallback == null) { throw new NullPointerException("There must be a defined ConversationCallback"); } if (mEmbeddedAssistant.mUserCredentials == null) { throw new NullPointerException("There must be provided credentials"); } if (mSampleRate == 0) { throw new NullPointerException("There must be a defined sample rate"); } final int audioEncoding = AudioFormat.ENCODING_PCM_16BIT; // Construct audio configurations. mEmbeddedAssistant.mAudioInConfig = AudioInConfig.newBuilder() .setEncoding(AudioInConfig.Encoding.LINEAR16) .setSampleRateHertz(mSampleRate) .build(); mEmbeddedAssistant.mAudioOutConfig = AudioOutConfig.newBuilder() .setEncoding(AudioOutConfig.Encoding.LINEAR16) .setSampleRateHertz(mSampleRate) .setVolumePercentage(mEmbeddedAssistant.mVolume) .build(); // Initialize Audio framework parameters. mEmbeddedAssistant.mAudioInputFormat = new AudioFormat.Builder() .setChannelMask(AudioFormat.CHANNEL_IN_MONO) .setEncoding(audioEncoding) .setSampleRate(mSampleRate) .build(); mEmbeddedAssistant.mAudioInputBufferSize = AudioRecord.getMinBufferSize( mEmbeddedAssistant.mAudioInputFormat.getSampleRate(), mEmbeddedAssistant.mAudioInputFormat.getChannelMask(), mEmbeddedAssistant.mAudioInputFormat.getEncoding()); mEmbeddedAssistant.mAudioOutputFormat = new AudioFormat.Builder() .setChannelMask(AudioFormat.CHANNEL_OUT_MONO) .setEncoding(audioEncoding) .setSampleRate(mSampleRate) .build(); mEmbeddedAssistant.mAudioOutputBufferSize = AudioTrack.getMinBufferSize( mEmbeddedAssistant.mAudioOutputFormat.getSampleRate(), mEmbeddedAssistant.mAudioOutputFormat.getChannelMask(), mEmbeddedAssistant.mAudioOutputFormat.getEncoding()); // create new AudioRecord to workaround audio routing issues. mEmbeddedAssistant.mAudioRecord = new AudioRecord.Builder() .setAudioSource(AudioSource.VOICE_RECOGNITION) .setAudioFormat(mEmbeddedAssistant.mAudioInputFormat) .setBufferSizeInBytes(mEmbeddedAssistant.mAudioInputBufferSize) .build(); if (mEmbeddedAssistant.mAudioInputDevice != null) { boolean result = mEmbeddedAssistant.mAudioRecord.setPreferredDevice( mEmbeddedAssistant.mAudioInputDevice); if (!result) { Log.e(TAG, "failed to set preferred input device"); } } return mEmbeddedAssistant; }
/** * * * Default constructor * * Instantiates a new recorder, in case of compressed recording the * parameters can be left as 0. In case of errors, no exception is thrown, * but the state is set to ERROR * */ @SuppressWarnings("deprecation") public ExtAudioRecorder(boolean uncompressed, int audioSource, int sampleRate, int channelConfig, int audioFormat) { try { rUncompressed = uncompressed; if (rUncompressed) { // RECORDING_UNCOMPRESSED if (audioFormat == AudioFormat.ENCODING_PCM_16BIT) { bSamples = 16; } else { bSamples = 8; } if (channelConfig == AudioFormat.CHANNEL_CONFIGURATION_MONO) { nChannels = 1; } else { nChannels = 2; } aSource = audioSource; sRate = sampleRate; aFormat = audioFormat; framePeriod = sampleRate * TIMER_INTERVAL / 1000; bufferSize = framePeriod * 2 * bSamples * nChannels / 8; if (bufferSize < AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat)) { // Check to make sure // buffer size is not // smaller than the // smallest allowed one bufferSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat); // Set frame period and timer interval accordingly framePeriod = bufferSize / (2 * bSamples * nChannels / 8); Log.w(ExtAudioRecorder.class.getName(), "Increasing buffer size to " + Integer.toString(bufferSize)); } audioRecorder = new AudioRecord(audioSource, sampleRate, channelConfig, audioFormat, bufferSize); if (audioRecorder.getState() != AudioRecord.STATE_INITIALIZED) throw new Exception("AudioRecord initialization failed"); audioRecorder.setRecordPositionUpdateListener(updateListener); audioRecorder.setPositionNotificationPeriod(framePeriod); } else { // RECORDING_COMPRESSED mediaRecorder = new MediaRecorder(); mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC); mediaRecorder .setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP); mediaRecorder .setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB); } cAmplitude = 0; filePath = null; state = State.INITIALIZING; } catch (Exception e) { if (e.getMessage() != null) { Log.e(ExtAudioRecorder.class.getName(), e.getMessage()); } else { Log.e(ExtAudioRecorder.class.getName(), "Unknown error occured while initializing recording"); } state = State.ERROR; } }
@Override public void run() { android.os.Process .setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO); AudioRecord recorder = null; int ix = 0; try { // ... initialize recorder = new AudioRecord(AudioSource.DEFAULT, AudioIn.SAMPLING_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, AudioIn.READ_BUFFER_SIZE * AudioIn.BYTES_IN_SAMPLE); // ... loop recorder.setPositionNotificationPeriod(AudioIn.READ_BUFFER_SIZE); recorder.setRecordPositionUpdateListener(this); int nReadBytes; while (true) { if (!m_fRunning) { recorder.stop(); synchronized (this) { wait(); } } recorder.startRecording(); AudioReadBuffer readBuffer = m_ReadBuffers[ix++ % m_ReadBuffers.length]; synchronized (readBuffer) { // Log.d("RECORDING", "Writing buffer" // + readBuffer); readBuffer.eStatus = EBufferStatus.eBufferStatus_Filling; short[] arBuffer = readBuffer.arSamples; for (int i = 0; i < arBuffer.length; i++) { arBuffer[i] = 0; } nReadBytes = recorder.read(arBuffer, 0, arBuffer.length); // Log.i(TAG, "buffer received"); readBuffer.eStatus = EBufferStatus.eBufferStatus_Full; m_queReadyBuffers.put(readBuffer); } if ((AudioRecord.ERROR_INVALID_OPERATION == nReadBytes) || (nReadBytes == AudioRecord.ERROR_BAD_VALUE)) { Log.e("AUDIO_IN/ERROR", "Read failed :("); } } } catch (Throwable x) { Log.e("AUDIO/IN", "Error reading voice audio", x); } finally { if ((recorder != null) && (recorder.getState() == AudioRecord.STATE_INITIALIZED)) { recorder.stop(); } } }
/** * Default constructor * <p/> * Instantiates a new recorder, in case of compressed recording the parameters can be left as 0. * In case of errors, no exception is thrown, but the state is set to ERROR */ @SuppressWarnings("deprecation") public ExtAudioRecorder(boolean uncompressed, int audioSource, int sampleRate, int channelConfig, int audioFormat) { try { rUncompressed = uncompressed; if (rUncompressed) { // RECORDING_UNCOMPRESSED if (audioFormat == AudioFormat.ENCODING_PCM_16BIT) { bSamples = 16; } else { bSamples = 8; } if (channelConfig == AudioFormat.CHANNEL_CONFIGURATION_MONO) { nChannels = 1; } else { nChannels = 2; } aSource = audioSource; sRate = sampleRate; aFormat = audioFormat; framePeriod = sampleRate * TIMER_INTERVAL / 1000; bufferSize = framePeriod * 2 * bSamples * nChannels / 8; if (bufferSize < AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat)) { // Check to make sure buffer size is not smaller than the smallest allowed one bufferSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat); // Set frame period and timer interval accordingly framePeriod = bufferSize / (2 * bSamples * nChannels / 8); Log.w(ExtAudioRecorder.class.getName(), "Increasing buffer size to " + Integer.toString(bufferSize)); } audioRecorder = new AudioRecord(audioSource, sampleRate, channelConfig, audioFormat, bufferSize); if (audioRecorder.getState() != AudioRecord.STATE_INITIALIZED) throw new Exception("AudioRecord initialization failed"); audioRecorder.setRecordPositionUpdateListener(updateListener); audioRecorder.setPositionNotificationPeriod(framePeriod); } else { // RECORDING_COMPRESSED mediaRecorder = new MediaRecorder(); mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC); mediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP); mediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB); } cAmplitude = 0; filePath = null; state = State.INITIALIZING; } catch (Exception e) { if (e.getMessage() != null) { Log.e(ExtAudioRecorder.class.getName(), e.getMessage()); } else { Log.e(ExtAudioRecorder.class.getName(), "Unknown error occured while initializing recording"); } state = State.ERROR; } }
/** * Default constructor. Leaves the recorder in {@link State#INITIALIZING}, except if some kind * of error happens. * * @param sampleRate * Audio sampling rate. */ public WaveRecorder(int sampleRate) { try { bitsPerSample = 16; numChannels = 1; audioSource = AudioSource.MIC; this.sampleRate = sampleRate; audioFormat = AudioFormat.ENCODING_PCM_16BIT; framePeriod = sampleRate * TIMER_INTERVAL / 1000; bufferSize = framePeriod * 2 * bitsPerSample * numChannels / 8; if (bufferSize < AudioRecord.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT)) { // increase buffer size if needed bufferSize = AudioRecord.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT); // Set frame period and timer interval accordingly framePeriod = bufferSize / (2 * bitsPerSample * numChannels / 8); Log.w(TAG, "Increasing buffer size to " + bufferSize); } aRecorder = new AudioRecord(audioSource, sampleRate, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize); if (aRecorder.getState() != AudioRecord.STATE_INITIALIZED) { throw new Exception("AudioRecord initialization failed"); } aRecorder.setRecordPositionUpdateListener(updateListener); aRecorder.setPositionNotificationPeriod(framePeriod); fPath = null; state = State.INITIALIZING; } catch (Exception e) { if (e.getMessage() != null) { Log.e(TAG, e.getMessage()); } else { Log.e(TAG, "Unknown error occured while initializing recording"); } state = State.ERROR; } }
@Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); /// INIT FSK CONFIG try { mConfig = new FSKConfig(FSKConfig.SAMPLE_RATE_44100, FSKConfig.PCM_16BIT, FSKConfig.CHANNELS_MONO, FSKConfig.SOFT_MODEM_MODE_4, FSKConfig.THRESHOLD_20P); } catch (IOException e1) { e1.printStackTrace(); } /// INIT FSK DECODER mDecoder = new FSKDecoder(mConfig, new FSKDecoderCallback() { @Override public void decoded(byte[] newData) { final String text = new String(newData); runOnUiThread(new Runnable() { public void run() { TextView view = ((TextView) findViewById(R.id.result)); view.setText(view.getText()+text); } }); } }); /// //make sure that the settings of the recorder match the settings of the decoder //most devices cant record anything but 44100 samples in 16bit PCM format... mBufferSize = AudioRecord.getMinBufferSize(FSKConfig.SAMPLE_RATE_44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); //scale up the buffer... reading larger amounts of data //minimizes the chance of missing data because of thread priority mBufferSize *= 10; //again, make sure the recorder settings match the decoder settings mRecorder = new AudioRecord(AudioSource.MIC, FSKConfig.SAMPLE_RATE_44100, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, mBufferSize); if (mRecorder.getState() == AudioRecord.STATE_INITIALIZED) { mRecorder.startRecording(); //start a thread to read the audio data Thread thread = new Thread(mRecordFeed); thread.setPriority(Thread.MAX_PRIORITY); thread.start(); } else { Log.i("FSKDecoder", "Please check the recorder settings, something is wrong!"); } }
@Override protected Void doInBackground(Void... arg0) { log.v("audioIn thread started"); final int N = 4096;// AudioRecord.getMinBufferSize(SAMPLE_RATE, // AudioFormat.CHANNEL_IN_MONO, // AudioFormat.ENCODING_PCM_16BIT); short buffer[] = new short[N]; log.v("N=%d", N); AudioRecord recorder = new AudioRecord(AudioSource.MIC, SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, N * 2); recorder.startRecording(); final double COEFF[] = new double[IN_TONES.length]; for (int i = 0; i < IN_TONES.length; i++) { COEFF[i] = 2.0 * Math.cos(2 * Math.PI * IN_TONES[i] / SAMPLE_RATE); } for (int i = 0; i < msgs.length; i++) { msgs[i] = new Msg(); } final double ON_THRESHOLD = 8000;//16384; final double OFF_THRESHOLD = ON_THRESHOLD * 1 / 2; while (true) { int read = 0; while (read < N) { read += recorder.read(buffer, read, N - read); } for (int j = 0; j < read / WINDOW_SIZE; j++) { stamp++; double mag = goertzelSimple(buffer, j * WINDOW_SIZE, COEFF[0]); if (mag > maxMag) maxMag = mag; if(mag < minMag) minMag = mag; if (mag > ON_THRESHOLD) { // log.v("mag = %f",mag); FMSimpleRecv(true); } else if (mag < OFF_THRESHOLD) { FMSimpleRecv(false); } } } // recorder.stop(); // recorder.release(); // return null; }
private void createAudioRecord() { if (mSampleRate > 0 && mAudioFormat > 0 && mChannelConfig > 0) { mAudioRecord = new AudioRecord(AudioSource.MIC, mSampleRate, mChannelConfig, mAudioFormat, mBufferSize); return; } // Find best/compatible AudioRecord for (int sampleRate : new int[] { 8000, 11025, 16000, 22050, 32000, 44100, 47250, 48000 }) { for (short audioFormat : new short[] { AudioFormat.ENCODING_PCM_16BIT, AudioFormat.ENCODING_PCM_8BIT }) { for (short channelConfig : new short[] { AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_IN_STEREO, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.CHANNEL_CONFIGURATION_STEREO }) { // Try to initialize try { mBufferSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat); if (mBufferSize < 0) { continue; } mBuffer = new short[mBufferSize]; mAudioRecord = new AudioRecord(AudioSource.MIC, sampleRate, channelConfig, audioFormat, mBufferSize); if (mAudioRecord.getState() == AudioRecord.STATE_INITIALIZED) { mSampleRate = sampleRate; mAudioFormat = audioFormat; mChannelConfig = channelConfig; return; } mAudioRecord.release(); mAudioRecord = null; } catch (Exception e) { // Do nothing } } } } }
/** * Default constructor * * Instantiates a new recorder, in case of compressed recording the parameters can be left as 0. * In case of errors, no exception is thrown, but the state is set to ERROR */ @SuppressWarnings("deprecation") public AudioRecorder(boolean uncompressed, int audioSource, int sampleRate, int channelConfig, int audioFormat) { try { rUncompressed = uncompressed; if (rUncompressed) { // RECORDING_UNCOMPRESSED if (audioFormat == AudioFormat.ENCODING_PCM_16BIT) { bSamples = 16; } else { bSamples = 8; } if (channelConfig == AudioFormat.CHANNEL_CONFIGURATION_MONO) { nChannels = 1; } else { nChannels = 2; } aSource = audioSource; sRate = sampleRate; aFormat = audioFormat; framePeriod = sampleRate * TIMER_INTERVAL / 1000; bufferSize = framePeriod * 2 * bSamples * nChannels / 8; if (bufferSize < AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat)) { // Check to make sure buffer size is not smaller than the smallest allowed one bufferSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat); // Set frame period and timer interval accordingly framePeriod = bufferSize / ( 2 * bSamples * nChannels / 8 ); Log.w(AudioRecorder.class.getName(), "Increasing buffer size to " + Integer.toString(bufferSize)); } audioRecorder = new AudioRecord(audioSource, sampleRate, channelConfig, audioFormat, bufferSize); if (audioRecorder.getState() != AudioRecord.STATE_INITIALIZED) throw new Exception("AudioRecord initialization failed"); audioRecorder.setRecordPositionUpdateListener(updateListener); audioRecorder.setPositionNotificationPeriod(framePeriod); } else { // RECORDING_COMPRESSED mediaRecorder = new MediaRecorder(); mediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC); mediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP); mediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB); } cAmplitude = 0; filePath = null; state = State.INITIALIZING; } catch (Exception e) { if (e.getMessage() != null) { Log.e(AudioRecorder.class.getName(), e.getMessage()); } else { Log.e(AudioRecorder.class.getName(), "Unknown error occured while initializing recording"); } state = State.ERROR; } }
@Override public boolean initCapturer() { // initalize audio mode audioManagerMode.acquireMode(audioManager); // get the minimum buffer size that can be used int minRecBufSize = AudioRecord.getMinBufferSize( captureSettings.getSampleRate(), NUM_CHANNELS_CAPTURING == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO, AudioFormat.ENCODING_PCM_16BIT ); // double size to be more safe int recBufSize = minRecBufSize * 2; // release the object if (noiseSuppressor != null) { noiseSuppressor.release(); noiseSuppressor = null; } if (echoCanceler != null) { echoCanceler.release(); echoCanceler = null; } if (audioRecord != null) { audioRecord.release(); audioRecord = null; } try { audioRecord = new AudioRecord(AudioSource.VOICE_COMMUNICATION, captureSettings.getSampleRate(), NUM_CHANNELS_CAPTURING == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO, AudioFormat.ENCODING_PCM_16BIT, recBufSize); if (NoiseSuppressor.isAvailable()) { noiseSuppressor = NoiseSuppressor.create(audioRecord.getAudioSessionId()); } if (AcousticEchoCanceler.isAvailable()) { echoCanceler = AcousticEchoCanceler.create(audioRecord.getAudioSessionId()); } } catch (Exception e) { throw new RuntimeException(e.getMessage()); } // check that the audioRecord is ready to be used if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED) { throw new RuntimeException("Audio capture is not initialized " + captureSettings.getSampleRate()); } shutdownCaptureThread = false; new Thread(captureThread).start(); return true; }
@SuppressWarnings("unused") private int InitRecording(int audioSource, int sampleRate) { audioSource = AudioSource.VOICE_COMMUNICATION; // get the minimum buffer size that can be used int minRecBufSize = AudioRecord.getMinBufferSize( sampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT); // DoLog("min rec buf size is " + minRecBufSize); // double size to be more safe int recBufSize = minRecBufSize * 2; // On average half of the samples have been recorded/buffered and the // recording interval is 1/100s. _bufferedRecSamples = sampleRate / 200; // DoLog("rough rec delay set to " + _bufferedRecSamples); // release the object if (_audioRecord != null) { _audioRecord.release(); _audioRecord = null; } try { _audioRecord = new AudioRecord( audioSource, sampleRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, recBufSize); } catch (Exception e) { DoLog(e.getMessage()); return -1; } // check that the audioRecord is ready to be used if (_audioRecord.getState() != AudioRecord.STATE_INITIALIZED) { // DoLog("rec not initialized " + sampleRate); return -1; } // DoLog("rec sample rate set to " + sampleRate); return _bufferedRecSamples; }