// file: $isip/class/java/AudioInterface/AudioInterface.java // version: $Id: AudioInterface.java 10230 2005-09-13 14:29:30Z stanley $ // // import necessary java libraries // import java.io.*; import javax.sound.sampled.*; /** * Presents a simple interface to read and write from a line. */ public class AudioInterface { //------------------------------------------------------------------------- // // public constants // //------------------------------------------------------------------------- /** * The constant for read mode */ public static final int READ = 10; /** * The constant for write mode */ public static final int WRITE = 20; //------------------------------------------------------------------------ // // protected data // //----------------------------------------------------------------------- /** * This object is used to set the format of the audio data such as * the encoding, sample size, sample rate, number of channels, * byte order, frame rate and frame size */ protected AudioFormat format_d; /** * Create an object of TargetDataLine to open and read the audio * data */ protected TargetDataLine target_d; /** * Create an object of SourceDataLine to open and write the audio * data */ protected SourceDataLine source_d; /** * Create a circular buffer object */ protected AudioCircularBuffer cb_d; /** * The flag that is set when recording is taking place */ protected static boolean recording_on = false; /** * The flag that is set when playback is taking place */ protected static boolean stop_play_back = false; /** * Gives the specific type of data representation used for an * audio stream. */ protected AudioFormat.Encoding encoding_d; /** * The number of bits in each sample of a sound. */ protected int sample_size_d; /** * The number of samples played or recorded per second */ protected float sample_rate_d; /** * The number of audio channels */ protected int num_channels_d; /** * Indicates whether the audio data is stored in big-endian or * little-endian order. */ protected boolean big_endian_d; /** * The number of bytes in each frame */ protected int frame_size_d; /** * The number of frames played or recorded per second */ protected float frame_rate_d; /** * The number of bytes in each sample of a sound. */ protected int sampleSizeInBytes; /** * The length of the buffer that should be read from the * dataline's buffer for each call */ protected int buffer_length_to_read_d = 500; /** * The length of the buffer that should be written the dataline's * buffer. */ protected int buffer_length_to_write_d = 500; /** * The buffer that contains the data to be played back */ protected byte[] buf_playback; /** * Set the mode to read or write. */ protected int mode_d; //------------------------------------------------------------------------- // // public methods // //------------------------------------------------------------------------- /** * Constructor for the class. */ public AudioInterface() { // initialize the circular buffer. // cb_d = new AudioCircularBuffer(AudioCircularBuffer.INFINITE_SIZE, false); cb_d.clear(); } //------------------------------------------------------------------------- // // class-specific public methods: // set methods // //------------------------------------------------------------------------- /** * Sets the audio parameters for the format_d data field. It first * checks if the AudioFormat object passed as an argument is * supported by the line. If it is supported the format_d data field * is initialized with those audio parameters. * * @param format_a An AudioFormat object * * @return a boolean value indicating status */ public boolean setAudioFormat(AudioFormat format_a) { DataLine.Info info; // read mode. // if (mode_d == READ) { info = new DataLine.Info(TargetDataLine.class, format_a); // check if the format is supported. // if (!AudioSystem.isLineSupported(info)) { System.out.println("format not supported - setFormat"); return false; } // set the format // this.format_d = new AudioFormat(format_a.getEncoding(), format_a.getSampleRate(), format_a.getSampleSizeInBits(), format_a.getChannels(), format_a.getFrameSize(), format_a.getFrameRate(), format_a.isBigEndian()); // set the values for all the individual parameters // encoding_d = format_d.getEncoding(); sample_rate_d = format_d.getSampleRate(); sample_size_d = format_d.getSampleSizeInBits(); num_channels_d = format_d.getChannels(); frame_size_d = format_d.getFrameSize(); frame_rate_d = format_d.getFrameRate(); big_endian_d = format_d.isBigEndian(); return true; } // write mode. // else if (mode_d == WRITE) { info = new DataLine.Info(SourceDataLine.class, format_a); // check if the format is supported. // if (!AudioSystem.isLineSupported(info)) { return false; } } // set the format // this.format_d = new AudioFormat(format_a.getEncoding(), format_a.getSampleRate(), format_a.getSampleSizeInBits(), format_a.getChannels(), format_a.getFrameSize(), format_a.getFrameRate(), format_a.isBigEndian()); // set the values for all the individual parameters // encoding_d = format_d.getEncoding(); sample_rate_d = format_d.getSampleRate(); sample_size_d = format_d.getSampleSizeInBits(); num_channels_d = format_d.getChannels(); frame_size_d = format_d.getFrameSize(); frame_rate_d = format_d.getFrameRate(); big_endian_d = format_d.isBigEndian(); return true; } /** * Set the encoding (PCM_SIGNED, PCM_UNSIGNED, ULAW,ALAW) * * @param encoding_a a string object containing the encoding type * * @return a boolean value indicating status */ public boolean setEncoding(AudioFormat.Encoding encoding_a) { // create an AudioFormat.Encoding object with the user given // encoding type // encoding_d = encoding_a; return true; } /** * Set the sample size * * @param sample_size_a an integer containing the sample size * * @return a boolean value indicating status */ public boolean setSampleSize(int sample_size_a) { // set the sample size // sample_size_d = sample_size_a; return true; } /** * Set the sample rate * * @param sample_rate_a a float containing the sample rate * * @return a boolean value indicating status */ public boolean setSampleRate(float sample_rate_a) { // set the sample rate // sample_rate_d = sample_rate_a; return true; } /** * Set the number of channels * * @param num_channels_a an integer containing the number of * channels * * @return a boolean value indicating status */ public boolean setNumChannels(int num_channels_a) { // set the number of channels // num_channels_d = num_channels_a; return true; } /** * Set the byte order (BigEndian , LittleEndian) * * @param big_endian_a a boolean value indicating if it is a big * endian * * @return a boolean value indicating status */ public boolean setBigEndian(boolean big_endian_a) { // set the byte order // big_endian_d = big_endian_a; return true; } /** * Set the frame size * * @param frame_size_a an integer containing the frame size * * @return a boolean value indicating status */ public boolean setFrameSize(int frame_size_a) { // set the frame size // frame_size_d = frame_size_a; return true; } /** * Set the frame rate * * @param frame_rate_a an integer containing the frame rate * * @return a boolean value indicating status */ public boolean setFrameRate(float frame_rate_a) { // set the frame rate // frame_rate_d = frame_rate_a; return true; } /** * Set the mode * * @param mode_a an integer value indicating the mode * * @return a boolean value indicating status */ public boolean setMode (int mode_a) { // set the mode // mode_d = mode_a; return true; } /** * Extract the audio parameters from ConfigData object * * @param data_a a ConfigData object * * @return a boolean value indicating status */ public boolean setAudioConfig(ConfigData data_a) { // loop all values // for (int i = 0; i < data_a.getAssociation().size(); i++) { // set the sample rate // if (data_a.getName().get(i).equals("SampleRate")) { setSampleRate(Float.parseFloat((String)(data_a.getValue().get(i)))); setFrameRate(Float.parseFloat((String)(data_a.getValue().get(i)))); } // set the sample size // else if (data_a.getName().get(i).equals("SampleSize")) { setSampleSize((int)(Float.parseFloat((String)(data_a.getValue().get(i))))); } // set the encoding // else if (data_a.getName().get(i).equals("Encoding")) { String encoding =(String)(data_a.getValue().get(i)); // try to the user set format(which is a string) and // try to set the corresponding AudioFormat.Encoding // object // if(encoding == "PCM_SIGNED") { setEncoding(AudioFormat.Encoding.PCM_SIGNED); } else if (encoding == "PCM_UNSIGNED") { setEncoding(AudioFormat.Encoding.PCM_UNSIGNED); } else if (encoding == "ULAW") { setEncoding(AudioFormat.Encoding.ULAW); } else if (encoding == "ALAW") { setEncoding(AudioFormat.Encoding.ALAW); } } // set the number of channels // else if (data_a.getName().get(i).equals("NumChannels")) { setNumChannels((int)(Float.parseFloat((String)(data_a.getValue().get(i))))); } // set the byte order // else if (data_a.getName().get(i).equals("ByteOrder")) { String byteorder = (String)(data_a.getValue().get(i)); // get the user set byte order (which is a string) and // set the correspoding boolean value // if (byteorder == "BigEndian") { setBigEndian(true); } else if (byteorder == "LittleEndian") { setBigEndian(false); } } } setFrameSize((int)((sample_size_d/8)*num_channels_d)); // using the individual audio parameters set the AudioFormat // object // format_d = new AudioFormat(encoding_d, sample_rate_d, sample_size_d, num_channels_d, frame_size_d, frame_rate_d, big_endian_d); return true; } //------------------------------------------------------------------------- // // class-specific public methods: // get methods // //------------------------------------------------------------------------- /** * Get the audioformat. This method returns a AudioFormat object * with the same audio parameters as that of format_d * * @return AudioFormat object */ public AudioFormat getAudioFormat() { return new AudioFormat(format_d.getEncoding(), format_d.getSampleRate(), format_d.getSampleSizeInBits(), format_d.getChannels(), format_d.getFrameSize(), format_d.getFrameRate(), format_d.isBigEndian()); } /** * Get the encoding * * @return a AudioFormat.Encoding object */ public AudioFormat.Encoding getEncoding() { return format_d.getEncoding(); } /** * Get the sample size * * @return an int value containing the sample size */ public int getSampleSize() { return format_d.getSampleSizeInBits(); } /** * Get the sample rate * * @return a float value containing the sample rate */ public float getSampleRate() { return format_d.getSampleRate(); } /** * Get the number of channels * * @return an int value containing the number of channels */ public int getNumOfChannels() { return format_d.getChannels(); } /** * Get the flag to indicate if it is big endian * * @return a boolean value to indicate whether it is big endian */ public boolean getBigEndian() { return big_endian_d; } /** * Get the frame size * * @return an int value containing the frame size */ public int getFrameSize() { return format_d.getFrameSize(); } /** * Get the frame rate * * @return a int value containing the frame rate */ public float getFrameRate() { return format_d.getFrameRate(); } /** * Get the mode * * @return a int value indicating the mode */ public int getMode() { return mode_d; } //------------------------------------------------------------------------- // // class-specific other public methods: // // //------------------------------------------------------------------------- /** * If it is read mode, it creates an audioformat object with the * audio parameters the user has set. It checks if the format is * supported. If supported it creates a TargetDataLine object and * opens the line. If it is write mode, it does the same things * that it does in read mode except that it creates an * SourceDataLine object and opens it for writing. * * @return a boolean value */ public boolean open() { // if it is read mode, use a TargetDataLine object // if (mode_d == READ) { // create an AudioFormat object with the user specified // audio parameters // System.out.println("the num of channels" + num_channels_d); format_d = new AudioFormat (encoding_d,sample_rate_d, sample_size_d, num_channels_d, frame_size_d, frame_rate_d, big_endian_d); System.out.println("The format that has been set" +format_d); // constructs a Info object from the specified information // using the AudioFormat object // DataLine.Info info = new DataLine.Info(TargetDataLine.class, format_d); if(!AudioSystem.isLineSupported(info)) { System.out.println("Format not supported"); return false; } try { // obtains a line that matches the description in the // specified DataLine.Info object // target_d = (TargetDataLine)AudioSystem.getLine(info); // open the line for reading // target_d.open(format_d); } catch (Exception e) { errorHandler(e); } } // if it is write mode, use a SourceDataLine object // else if (mode_d == WRITE) { // create an AudioFormat object with the user specified // audio parameters // format_d = new AudioFormat(encoding_d,sample_rate_d, sample_size_d, num_channels_d, frame_size_d, frame_rate_d, big_endian_d); // constructs a data Info object from the specified // information using the AudioFormat object // DataLine.Info info =new DataLine.Info(SourceDataLine.class, format_d); // check if the line is supported // if(!AudioSystem.isLineSupported(info)) { return false; } try { // obtains a line that matches the description in the // specified DataLine.Info object // source_d =(SourceDataLine)AudioSystem.getLine(info); // open the line for writing // source_d.open(format_d); } catch (Exception e) { errorHandler(e); } } return true; } /** * Allows a line to engage in data I/O. If invoked on a line that * is already running, this method does nothing. Unless the data * in the buffer has been flushed, the line resumes I/O starting * with the first frame that was unprocessed at the time the line * was stopped. for read mode this also starts a record thread * which constantly reads the thread and puts it in a circular * buffer. * * @return a boolean value indicating status */ public boolean start() { // read mode. // if (mode_d == READ) { target_d.start(); } // write mode. // else if (mode_d == WRITE) { source_d.start(); } return true; } /** * Stops the line. A stopped line should cease I/O activity. If the * line is open and running, however, it should retain the resources * required to resume activity. A stopped line should retain any * audio data in its buffer instead of discarding it, so that upon * resumption the I/O can continue where it left off, if * possible. In read mode, it sets the recording_on flag to false * which stops the record thread from running. In the write mode, * the stop_play_back flag is set to true so that the playback * thread stops running. * * @return a boolean value indicating status */ public boolean stop() { // read mode. // if (mode_d == READ) { // stop the I/O activity // target_d.stop(); } // write mode. // else if (mode_d == WRITE) { // stop the I/O activity // source_d.stop(); stop_play_back = false; } return true; } /** * Closes the line, indicating that any system resources in use by * the line can be released. * * @return a boolean value indicating status */ public boolean close() { // read mode. // if (mode_d == READ) { target_d.close(); System.out.println("The line has been closed."); } // write mode. // else if (mode_d == WRITE) { source_d.close(); System.out.println("closing the line opened for playback."); } return true; } /** * Read the audio data from the circular buffer and then creates a * AudioSignal object ans returns the object to the calling * program. If the audio data in the circular buffer is less than * the requested number of samples the function returns what ever is * in the circular buffer. If the circular buffer has more data than * the requested then the requested number of samples is returned. * * @param sample_size_a number of samples that should be read. * @param signal_a the object which will contain the audio data. * * @return an int value indicating the number of float values read * into the signal. If circular buffer is empty and producer has * stopped writing to the circular buffer it returns -1. */ public int read (AudioSignal signal_a ,int sample_size_a) { int num_channels = getNumOfChannels(); // calculate the sample size in Bytes // sampleSizeInBytes = (format_d.getSampleSizeInBits()/8); int availSamples = (int)(cb_d.getNumElements()/sampleSizeInBytes); float[][] fbuf ; if (availSamples == 0 && cb_d.isDone()) { return -1; } else if (availSamples == 0) { return 0; } // check whether the available samples in the circular buffer // is less than the requested samples // else if (availSamples < sample_size_a ) { Object [] buffer = new Object[availSamples*sampleSizeInBytes]; try { // read the available samples from the circular buffer // cb_d.read(buffer,0,availSamples*sampleSizeInBytes); } catch (Exception e) { errorHandler(e); } byte [] buf = new byte[availSamples*sampleSizeInBytes]; fbuf = new float [num_channels][availSamples/num_channels]; // the loop below converts java's Object array into a byte // array so that is can be written to an AudioSignal // object // for(int i=0;i= sample_size_a) { Object [] buffer = new Object[sample_size_a*sampleSizeInBytes]; try { // read the requested number of samples from the // circular buffer // cb_d.read(buffer,0,sample_size_a*sampleSizeInBytes); } catch (Exception e) { errorHandler(e); } byte [] buf = new byte [ sample_size_a*sampleSizeInBytes]; fbuf = new float [num_channels] [sample_size_a/num_channels]; // the loop below converts java's Object array into a byte // array so that is can be written to an AudioSignal // object // for(int i=0;i 0) { // write data to the internal buffer of // the data line where it will be // delivered to the speaker. // source_d.write(tempBuffer, 0, cnt); } } stop_play_back = false; } catch (Exception e) { errorHandler(e); } } } //------------------------------------------------------------------------- // // class-specific other private methods: // // //------------------------------------------------------------------------- /** * Convert float array to byte array. * * @param byteSound User provided byte array to return result in. * @param dbuf User provided float array to convert. */ private void floatToByte(byte[] byteSound, float [][] dbuf) { // check whether this will work // int bufsz = dbuf[0].length; int ib = 0; if(format_d.isBigEndian()) { for(int i = 0; i < bufsz; i++) { int j=0; while (j < getNumOfChannels()) { short y = (short)Math.round(32767 * dbuf[j][i]); byteSound[ib] = (byte)(y >> 8); ib++; byteSound[ib] = (byte)(y & 0x00ff); ib++; j++; } } } else { for(int i=0;i> 8); ib++; j++; } } } } /** * Convert byte array to float array. * * @param dbuf User provided float array to return result in. * @param dbuf User provided byte array to convert. * @param bufsz Number of float samples to write. */ private void byteToFloat(float [][] dbuf, byte[] bbuf, int bufsz) { int num_channels = getNumOfChannels(); int ib = 0; int j; if(format_d.isBigEndian()) { for(int i = 0; i < (bufsz/num_channels); i++) { j = 0; while (j < num_channels) { short y = bbuf[ib]; y = (short)(y << 8); y |= (bbuf[ib+1]&0x00ff); ib += 2; dbuf[j][i] = y / 32767.f; j++; } } } else { for(int i = 0; i < (bufsz/num_channels); i++) { j = 0; while (j < num_channels) { short y = (short)bbuf[ib+1]; y = (short)(y << 8); y |= (bbuf[ib]&0x00ff); ib += 2; dbuf[j][i] = y / 32767.f; j++; } } } } /** * Load the byte array into a AudioInputStream object * * @param audio_data_a the byte array that needs to be played back * * @return AudioInputStream an AudioInputStream object containing * the byte array */ private AudioInputStream getAudioInputStream(byte[] audio_data_a){ ByteArrayInputStream bais = new ByteArrayInputStream(audio_data_a); return new AudioInputStream(bais, format_d, (audio_data_a.length)); } //------------------------------------------------------------------------- // // main diagnostic method for this class // //------------------------------------------------------------------------- /** * Diagnostic method for this class * * @param args arguments from the command line */ public static void main (String[] args) { // create an AudioInterface object // AudioInterface audioInterface = new AudioInterface(); // a default format that can be used by the test program // AudioFormat SPEECH = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, 16000.0F, 16, 1, 2, 16000.0F, false); // set the mode to READ // audioInterface.mode_d = audioInterface.READ; // check if the format is supported // if(!audioInterface.setAudioFormat(SPEECH)) { System.out.println("format not supported - main"); } // create an TargetDataLine object and open the line // audioInterface.open(); // start the line // audioInterface.start(); // start the recording thread and keep writing the audio data // into the circular buffer // audioInterface.startRecording(); // Sleep for sometime to allow the recording to take place // try { Thread.sleep(5500); } catch (Exception e) { } // stop the recording thread // audioInterface.stopRecording(); // stop the line // audioInterface.stop(); // close the line // audioInterface.close(); // create an AudioSignal object // AudioSignal signal = new AudioSignal(); // read the data from an circular buffer into a AudioSignal // object // if(audioInterface.read(signal,79000)== -1) { System.out.println("There is a problem with read"); } // get the data from the AudioSignal object // float [][] audio =signal.getData(); // switch to write mode // audioInterface.mode_d = audioInterface.WRITE; if(!(audioInterface.setAudioFormat(SPEECH))){ System.out.println("format not supported"); } // create an SourceDataLine object and open the line // audioInterface.open(); // start the line // audioInterface.start(); // write the signal object to the line // audioInterface.startPlayBack(signal); // sleep for sometime so that playback takes place // try { Thread.sleep(5000); } catch (Exception e) { } // stop the line // audioInterface.stop(); // close the line // audioInterface.close(); } }