//package GlobalUtilities;
|
|
import java.applet.Applet;
|
import java.applet.AudioClip;
|
import java.net.URISyntaxException;
|
import java.util.logging.Level;
|
import java.util.logging.Logger;
|
import java.io.*;
|
import java.io.File;
|
import java.net.MalformedURLException;
|
import java.net.URL;
|
import javax.sound.sampled.*;
|
|
/**
|
* This class handles the reading, writing, and playing of wav files. It is
|
* also capable of converting the file to its raw byte [] form.
|
*
|
* based on code by Evan Merz modified by Dan Vargo
|
* @author dvargo
|
*/
|
public class Wav
|
{
|
/*
|
WAV File Specification
|
FROM http://ccrma.stanford.edu/courses/422/projects/WaveFormat/
|
The canonical WAVE format starts with the RIFF header:
|
0 4 ChunkID Contains the letters "RIFF" in ASCII form
|
(0x52494646 big-endian form).
|
4 4 ChunkSize 36 + SubChunk2Size, or more precisely:
|
4 + (8 + SubChunk1Size) + (8 + SubChunk2Size)
|
This is the size of the rest of the chunk
|
following this number. This is the size of the
|
entire file in bytes minus 8 bytes for the
|
two fields not included in this count:
|
ChunkID and ChunkSize.
|
8 4 Format Contains the letters "WAVE"
|
(0x57415645 big-endian form).
|
|
The "WAVE" format consists of two subchunks: "fmt " and "data":
|
The "fmt " subchunk describes the sound data's format:
|
12 4 Subchunk1ID Contains the letters "fmt "
|
(0x666d7420 big-endian form).
|
16 4 Subchunk1Size 16 for PCM. This is the size of the
|
rest of the Subchunk which follows this number.
|
20 2 AudioFormat PCM = 1 (i.e. Linear quantization)
|
Values other than 1 indicate some
|
form of compression.
|
22 2 NumChannels Mono = 1, Stereo = 2, etc.
|
24 4 SampleRate 8000, 44100, etc.
|
28 4 ByteRate == SampleRate * NumChannels * BitsPerSample/8
|
32 2 BlockAlign == NumChannels * BitsPerSample/8
|
The number of bytes for one sample including
|
all channels. I wonder what happens when
|
this number isn't an integer?
|
34 2 BitsPerSample 8 bits = 8, 16 bits = 16, etc.
|
|
The "data" subchunk contains the size of the data and the actual sound:
|
36 4 Subchunk2ID Contains the letters "data"
|
(0x64617461 big-endian form).
|
40 4 Subchunk2Size == NumSamples * NumChannels * BitsPerSample/8
|
This is the number of bytes in the data.
|
You can also think of this as the size
|
of the read of the subchunk following this
|
number.
|
44 * Data The actual sound data.
|
|
|
The thing that makes reading wav files tricky is that java has no unsigned types. This means that the
|
binary data can't just be read and cast appropriately. Also, we have to use larger types
|
than are normally necessary.
|
|
In many languages including java, an integer is represented by 4 bytes. The issue here is
|
that in most languages, integers can be signed or unsigned, and in wav files the integers
|
are unsigned. So, to make sure that we can store the proper values, we have to use longs
|
to hold integers, and integers to hold shorts.
|
|
Then, we have to convert back when we want to save our wav data.
|
|
It's complicated, but ultimately, it just results in a few extra functions at the bottom of
|
this file. Once you understand the issue, there is no reason to pay any more attention
|
to it.
|
|
ALSO:
|
|
This code won't read ALL wav files. This does not use to full specification. It just uses
|
a trimmed down version that most wav files adhere to.
|
|
*/
|
|
ByteArrayOutputStream byteArrayOutputStream;
|
AudioFormat audioFormat;
|
TargetDataLine targetDataLine;
|
AudioInputStream audioInputStream;
|
SourceDataLine sourceDataLine;
|
// float frequency = 8000.0F; //8000,11025,16000,22050,44100
|
// int samplesize = 16;
|
private String myPath;
|
private long myChunkSize;
|
private long mySubChunk1Size;
|
private int myFormat;
|
private long myChannels;
|
private long mySampleRate;
|
private long myByteRate;
|
private int myBlockAlign;
|
private int myBitsPerSample;
|
private long myDataSize;
|
|
public byte[][] myData = new byte[4][];
|
|
static int minutes = 10;
|
|
static byte[] theStream = new byte[0]; // 44100*2 * 60 * minutes];
|
|
static int cursor = 0; // +735 per frame = 44100 Hz / 120 Hz * 2 (for 16 bits)
|
|
// public Wav()
|
// {
|
// myPath = "/Users/nbriere/0ut/wavs/Footstepwood";
|
// //myPath = "/Users/nbriere/0ut/wavs/robot_walk";
|
// }
|
|
// constructor takes a wav path
|
public Wav(String tmpPath)
|
{
|
myPath = tmpPath;
|
|
read();
|
}
|
|
// get/set for the Path property
|
public String getPath()
|
{
|
return myPath;
|
}
|
|
public void setPath(String newPath)
|
{
|
myPath = newPath;
|
}
|
|
// read a wav file into this class
|
public Wav read()
|
{
|
DataInputStream inFile = null;
|
// myData = null;
|
byte[] tmpLong = new byte[4];
|
byte[] tmpInt = new byte[2];
|
|
int i = 0;
|
|
try
|
{
|
for (; i<4; i++)
|
{
|
inFile = new DataInputStream(new FileInputStream(myPath + "" + i + ".wav"));
|
|
System.out.println("Reading wav file: " + myPath + "" + i + ".wav"); // for debugging only
|
|
String chunkID = "" + (char) inFile.readByte() + (char) inFile.readByte() + (char) inFile.readByte() + (char) inFile.readByte();
|
|
inFile.read(tmpLong); // read the ChunkSize
|
myChunkSize = byteArrayToInt(tmpLong);
|
|
String format = "" + (char) inFile.readByte() + (char) inFile.readByte() + (char) inFile.readByte() + (char) inFile.readByte();
|
|
// print what we've read so far
|
//System.out.println("chunkID:" + chunkID + " chunk1Size:" + myChunkSize + " format:" + format); // for debugging only
|
|
String subChunk1ID = "" + (char) inFile.readByte() + (char) inFile.readByte() + (char) inFile.readByte() + (char) inFile.readByte();
|
|
inFile.read(tmpLong); // read the SubChunk1Size
|
mySubChunk1Size = byteArrayToInt(tmpLong);
|
|
inFile.read(tmpInt); // read the audio format. This should be 1 for PCM
|
myFormat = byteArrayToShort(tmpInt);
|
|
inFile.read(tmpInt); // read the # of channels (1 or 2)
|
myChannels = byteArrayToShort(tmpInt);
|
|
inFile.read(tmpLong); // read the samplerate
|
mySampleRate = byteArrayToInt(tmpLong);
|
|
inFile.read(tmpLong); // read the byterate
|
myByteRate = byteArrayToInt(tmpLong);
|
|
inFile.read(tmpInt); // read the blockalign
|
myBlockAlign = byteArrayToShort(tmpInt);
|
|
inFile.read(tmpInt); // read the bitspersample
|
myBitsPerSample = byteArrayToShort(tmpInt);
|
|
// print what we've read so far
|
//System.out.println("SubChunk1ID:" + subChunk1ID + " SubChunk1Size:" + mySubChunk1Size + " AudioFormat:" + myFormat + " Channels:" + myChannels + " SampleRate:" + mySampleRate);
|
|
|
// read the data chunk header - reading this IS necessary, because not all wav files will have the data chunk here - for now, we're just assuming that the data chunk is here
|
String dataChunkID = "" + (char) inFile.readByte() + (char) inFile.readByte() + (char) inFile.readByte() + (char) inFile.readByte();
|
|
inFile.read(tmpLong); // read the size of the data
|
myDataSize = bytebuffer.length; // ??? byteArrayToLong(tmpLong);
|
|
|
// read the data chunk
|
myData[i] = new byte[(int) myDataSize];
|
inFile.read(myData[i]);
|
|
// close the input stream
|
inFile.close();
|
}
|
} catch (Exception e)
|
{
|
if (i == 0)
|
e.printStackTrace();
|
return this;
|
}
|
|
return this;
|
}
|
|
// write out the wav file
|
public Wav save()
|
{
|
if (loop == 0 || !Globals.ANIMATION)
|
return this;
|
|
Object[] options = {"Yes",
|
"No",
|
"Cancel"};
|
int n = javax.swing.JOptionPane.showOptionDialog(null,
|
"Would you like to save the sound?",
|
"A Silly Question",
|
javax.swing.JOptionPane.YES_NO_CANCEL_OPTION,
|
javax.swing.JOptionPane.QUESTION_MESSAGE,
|
null,
|
options,
|
options[2]);
|
|
if (n == 2)
|
return this;
|
|
loop = 0;
|
|
if (n == 1)
|
return this;
|
|
try
|
{
|
//for (int i=theStream.length; --i>=0;)
|
{
|
System.out.println(myPath + ".wav");
|
DataOutputStream outFile = new DataOutputStream(new FileOutputStream(myPath + ".wav"));
|
|
// write the wav file per the wav file format
|
outFile.writeBytes("RIFF"); // 00 - RIFF
|
outFile.write(intToByteArray((int) myChunkSize), 0, 4); // 04 - how big is the rest of this file?
|
outFile.writeBytes("WAVE"); // 08 - WAVE
|
outFile.writeBytes("fmt "); // 12 - fmt
|
outFile.write(intToByteArray((int) mySubChunk1Size), 0, 4); // 16 - size of this chunk
|
shortToByteArray((short) myFormat, buf2);
|
outFile.write(buf2, 0, 2); // 20 - what is the audio format? 1 for PCM = Pulse Code Modulation
|
shortToByteArray((short) myChannels, buf2);
|
outFile.write(buf2, 0, 2); // 22 - mono or stereo? 1 or 2? (or 5 or ???)
|
outFile.write(intToByteArray((int) mySampleRate), 0, 4); // 24 - samples per second (numbers per second)
|
outFile.write(intToByteArray((int) myByteRate), 0, 4); // 28 - bytes per second
|
shortToByteArray((short) myBlockAlign, buf2);
|
outFile.write(buf2, 0, 2); // 32 - # of bytes in one sample, for all channels
|
shortToByteArray((short) myBitsPerSample, buf2);
|
outFile.write(buf2, 0, 2); // 34 - how many bits in a sample(number)? usually 16 or 24
|
outFile.writeBytes("data"); // 36 - data
|
|
// outFile.write(intToByteArray((int) myDataSize), 0, 4); // 40 - how big is this data chunk
|
// outFile.write(myData); // 44 - the actual data itself - just a long string of numbers
|
|
outFile.write(intToByteArray((int) theStream.length), 0, 4); // up to cursor only
|
outFile.write(theStream);
|
}
|
} catch (Exception e)
|
{
|
System.out.println(e.getMessage());
|
return this;
|
}
|
|
System.out.println("done.");
|
// cursor = 0;
|
|
return this;
|
}
|
|
// return a printable summary of the wav file
|
public String getSummary()
|
{
|
//String newline = System.getProperty("line.separator");
|
String newline = "\n";
|
String summary = "Format: " + myFormat + newline + "Channels: " + myChannels + newline + "SampleRate: " + mySampleRate + newline + "ByteRate: " + myByteRate + newline + "BlockAlign: " + myBlockAlign + newline + "BitsPerSample: " + myBitsPerSample + newline + "DataSize: " + myDataSize + "";
|
return summary;
|
}
|
|
// public byte[] getBytes()
|
// {
|
// read();
|
// return myData;
|
// }
|
|
/**
|
* Plays back audio stored in the byte array using an audio format given by
|
* freq, sample rate, ect.
|
* @param data The byte array to play
|
*/
|
public void playAudio(byte[] data)
|
{
|
try
|
{
|
byte audioData[] = data;
|
//Get an input stream on the byte array containing the data
|
InputStream byteArrayInputStream = new ByteArrayInputStream(audioData);
|
AudioFormat audioFormat = getAudioFormat();
|
audioInputStream = new AudioInputStream(byteArrayInputStream, audioFormat, audioData.length / audioFormat.getFrameSize());
|
DataLine.Info dataLineInfo = new DataLine.Info(SourceDataLine.class, audioFormat);
|
sourceDataLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo);
|
sourceDataLine.open(audioFormat);
|
sourceDataLine.start();
|
|
//Create a thread to play back the data and start it running. It will run \
|
//until all the data has been played back.
|
Thread playThread = new Thread(new PlayThread());
|
playThread.start();
|
} catch (Exception e)
|
{
|
System.out.println(e);
|
}
|
}
|
|
boolean playing;
|
|
static byte[] bytebuffer = new byte[100000];
|
|
static byte[] buf2 = new byte[2];
|
|
static int loop = 0;
|
|
public void play(double volume) //, int wave)
|
{
|
byte[] mydata = myData[(loop++) % myData.length]; // (int)(Math.random()*4)];
|
|
//loop %= myData.length;
|
|
byte[] thestream = theStream; // [wave-1];
|
|
int mycursor = cursor/2*2;
|
|
if (mydata == null) // june 2014
|
return;
|
|
for (int i=mydata.length/2; --i>=0;)
|
{
|
// bytebuffer[i] = mydata[i]; // bytebuffer[i] *= volume;
|
buf2[0] = mydata[2*i];
|
buf2[1] = mydata[2*i+1];
|
int val = byteArrayToShort(buf2); // & 0xFFFF;
|
val *= volume;
|
|
buf2[0] = thestream[mycursor+2*i];
|
buf2[1] = thestream[mycursor+2*i+1];
|
int prevval = byteArrayToShort(buf2); // & 0xFFFF;
|
|
if (prevval != 0)
|
val += prevval;
|
|
if (val > 32767)
|
val = 32767;
|
if (val < -32768)
|
val = -32768;
|
|
shortToByteArray((short)val, buf2);
|
bytebuffer[2*i] = buf2[0];
|
bytebuffer[2*i + 1] = buf2[1];
|
}
|
|
System.arraycopy(bytebuffer, 0, thestream, mycursor, mydata.length);
|
|
if (playing)
|
return;
|
|
playing = true;
|
|
playAudio(bytebuffer); // myData);
|
}
|
|
/**
|
* This method creates and returns an AudioFormat object for a given set
|
* of format parameters. If these parameters don't work well for
|
* you, try some of the other allowable parameter values, which
|
* are shown in comments following the declarations.
|
* @return
|
*/
|
private AudioFormat getAudioFormat()
|
{
|
float sampleRate = mySampleRate; // frequency;
|
|
//8000,11025,16000,22050,44100
|
int sampleSizeInBits = myBitsPerSample; // samplesize;
|
//8,16
|
int channels = 1;
|
//1,2
|
boolean signed = true;
|
//true,false
|
boolean bigEndian = false;
|
//true,false
|
//return new AudioFormat( AudioFormat.Encoding.PCM_SIGNED, 8000.0f, 8, 1, 1,
|
//8000.0f, false );
|
|
return new AudioFormat(sampleRate, sampleSizeInBits, channels, signed, bigEndian);
|
}
|
|
public void playWav(String filePath)
|
{
|
try
|
{
|
AudioClip clip = (AudioClip) Applet.newAudioClip(new File(filePath).toURI().toURL());
|
clip.play();
|
} catch (Exception e)
|
{
|
Logger.getLogger(Wav.class.getName()).log(Level.SEVERE, null, e);
|
}
|
|
}
|
|
// ===========================
|
// CONVERT BYTES TO JAVA TYPES
|
// ===========================
|
// these two routines convert a byte array to a unsigned short
|
public static short byteArrayToShort(byte[] b)
|
{
|
// int start = 0;
|
// int low = b[start] & 0xff;
|
// int high = b[start + 1] & 0xff;
|
// return (int) (high << 8) | low;
|
//
|
// return (int) (high > 8) & 0x000000FF);
|
// b[2] = (byte) ((i >> 16) & 0x000000FF);
|
// b[3] = (byte) ((i >> 24) & 0x000000FF);
|
// return b;
|
short ret = 0;
|
for (int i = 2; --i >= 0;)
|
{
|
ret <<= 8;
|
ret |= b[i] & 0xFF;
|
}
|
|
return ret;
|
}
|
|
public static int byteArrayToInt(byte[] b)
|
{
|
int ret = 0;
|
for (int i = 4; --i >= 0;)
|
{
|
ret <<= 8;
|
ret |= b[i] & 0xFF;
|
}
|
|
return ret;
|
}
|
|
// convert a short to a byte array
|
public static void shortToByteArray(short data, byte[] buf2)
|
{
|
// return new byte[]
|
// {
|
// (byte) (data & 0xff), (byte) ((data >>> 8) & 0xff)
|
// };
|
buf2[0] = (byte) (data & 0xff);
|
buf2[1] = (byte) ((data >>> 8) & 0xff);
|
}
|
|
// convert a short to a byte array
|
public static byte[] intToByteArray(int data)
|
{
|
return new byte[]
|
{
|
(byte) (data & 0xff), (byte) ((data >>> 8) & 0xff), (byte) ((data >>> 16) & 0xff), (byte) ((data >>> 24) & 0xff)
|
};
|
}
|
|
/**
|
* Inner class to play back the data that was saved
|
*/
|
class PlayThread extends Thread
|
{
|
|
byte tempBuffer[] = new byte[10000];
|
|
public void run()
|
{
|
try
|
{
|
int cnt;
|
//Keep looping until the input
|
// read method returns -1 for
|
// empty stream.
|
while ((cnt = audioInputStream.read(tempBuffer, 0, tempBuffer.length)) != -1)
|
{
|
if (cnt > 0)
|
{
|
//Write data to the internal
|
// buffer of the data line
|
// where it will be delivered
|
// to the speaker.
|
sourceDataLine.write(tempBuffer, 0, cnt);
|
}
|
}
|
//Block and wait for internal
|
// buffer of the data line to
|
// empty.
|
sourceDataLine.drain();
|
sourceDataLine.close();
|
} catch (Exception e)
|
{
|
System.out.println(e);
|
System.exit(0);
|
}
|
|
playing = false;
|
}
|
}
|
}
|