diff --git a/instrument-core/src/main/java/jomu/instrument/audio/AudioPlayer.java b/instrument-core/src/main/java/jomu/instrument/audio/AudioPlayer.java new file mode 100644 index 00000000..c6bd7b12 --- /dev/null +++ b/instrument-core/src/main/java/jomu/instrument/audio/AudioPlayer.java @@ -0,0 +1,149 @@ +package jomu.instrument.audio; + +import java.util.logging.Logger; + +import javax.sound.sampled.AudioFormat; +import javax.sound.sampled.AudioSystem; +import javax.sound.sampled.DataLine; +import javax.sound.sampled.LineUnavailableException; +import javax.sound.sampled.SourceDataLine; + +import be.tarsos.dsp.AudioEvent; +import be.tarsos.dsp.AudioProcessor; +import be.tarsos.dsp.io.TarsosDSPAudioFormat; + +/** + * This AudioProcessor can be used to sync events with sound. It uses a pattern + * described in JavaFX Special Effects Taking Java RIA to the Extreme with + * Animation, Multimedia, and Game Element Chapter 9 page 185:
+ * The variable LineWavelet is the Java Sound object that actually makes the + * sound. The + * write method on LineWavelet is interesting because it blocks until it is + * ready for + * more data.If this AudioProcessor chained with other + * AudioProcessors the others should be able to operate in real time or process + * the signal on a separate thread. + * + * @author Joren Six + */ +public final class AudioPlayer implements AudioProcessor { + private final static Logger LOG = Logger.getLogger(AudioPlayer.class.getName()); + /** + * The LineWavelet to send sound to. Is also used to keep everything in sync. + */ + private SourceDataLine line; + + private final AudioFormat format; + + byte[] lastBuffer = null; + + boolean lastBufferEmpty = true; + + /** + * Creates a new audio player. + * + * @param format + * The AudioFormat of the buffer. + * @throws LineUnavailableException + * If no output LineWavelet is available. + */ + public AudioPlayer(final AudioFormat format) throws LineUnavailableException { + this(format, 1024); + } + + public AudioPlayer(final AudioFormat format, int bufferSize) throws LineUnavailableException { + final DataLine.Info info = new DataLine.Info(SourceDataLine.class, format, bufferSize); + LOG.info("Opening data line" + info.toString()); + this.format = format; + line = (SourceDataLine) AudioSystem.getLine(info); + + line.open(format, bufferSize * 2); + line.start(); + } + + public AudioPlayer(final TarsosDSPAudioFormat format, int bufferSize) throws LineUnavailableException { + this(JVMAudioInputStream.toAudioFormat(format), bufferSize); + } + + public AudioPlayer(final TarsosDSPAudioFormat format) throws LineUnavailableException { + this(JVMAudioInputStream.toAudioFormat(format)); + } + + public long getMicroSecondPosition() { + return line.getMicrosecondPosition(); + } + + @Override + public boolean process(AudioEvent audioEvent) { + boolean thisBufferEmpty = true; + for (byte entry : audioEvent.getByteBuffer()) { + if (entry != 0) { + thisBufferEmpty = false; + break; + } + } + if (lastBuffer != null) { + if (lastBufferEmpty && !thisBufferEmpty) { + for (int i = 0; i < audioEvent.getByteBuffer().length; i++) { + if (audioEvent.getByteBuffer()[i] != 0) { + audioEvent.getByteBuffer()[i] = 0; + } else { + break; + } + } + } else if (!lastBufferEmpty && thisBufferEmpty) { + // for (int i = 0; i < audioEvent.getByteBuffer().length; i++) { + // if (audioEvent.getByteBuffer()[i] != 0) { + // audioEvent.getByteBuffer()[i] = 0; + // } else { + // break; + // } + // } + } + } + byte[] lastBuffer = audioEvent.getByteBuffer(); + int byteOverlap = audioEvent.getOverlap() * format.getFrameSize(); + int byteStepSize = audioEvent.getBufferSize() * format.getFrameSize() - byteOverlap; + LOG.severe(">>>AO 1: " + byteOverlap + ", " + byteStepSize + + ", " + (System.currentTimeMillis() / 1000.0) + + ", " + audioEvent.getTimeStamp() + ", " + audioEvent.getSamplesProcessed()); + + if (audioEvent.getTimeStamp() == 0) { + byteOverlap = 0; + byteStepSize = audioEvent.getBufferSize() * format.getFrameSize(); + LOG.severe(">>>AO 2: " + byteOverlap + ", " + byteStepSize + + ", " + (System.currentTimeMillis() / 1000.0) + + ", " + audioEvent.getTimeStamp() + ", " + audioEvent.getSamplesProcessed()); + } + // overlap in samples * nr of bytes / sample = bytes overlap + + /* + * if(byteStepSize < line.available()){ + * System.out.println(line.available() + " Will not block " + + * line.getMicrosecondPosition()); + * }else { + * System.out.println("Will block " + line.getMicrosecondPosition()); + * } + */ + + int bytesWritten = line.write(audioEvent.getByteBuffer(), byteOverlap, byteStepSize); + if (bytesWritten != byteStepSize) { + System.err.println( + String.format("Expected to write %d bytes but only wrote %d bytes", byteStepSize, bytesWritten)); + } + return true; + } + + /* + * (non-Javadoc) + * + * @see be.tarsos.util.RealTimeAudioProcessor.AudioProcessor# + * processingFinished() + */ + public void processingFinished() { + // cleanup + line.drain();// drain takes too long.. + line.stop(); + line.close(); + } +} diff --git a/instrument-core/src/main/java/jomu/instrument/audio/JVMAudioInputStream.java b/instrument-core/src/main/java/jomu/instrument/audio/JVMAudioInputStream.java new file mode 100644 index 00000000..6d80bd54 --- /dev/null +++ b/instrument-core/src/main/java/jomu/instrument/audio/JVMAudioInputStream.java @@ -0,0 +1,83 @@ +package jomu.instrument.audio; + +import java.io.IOException; + +import javax.sound.sampled.AudioFormat; +import javax.sound.sampled.AudioFormat.Encoding; +import javax.sound.sampled.AudioInputStream; + +import be.tarsos.dsp.io.TarsosDSPAudioFormat; +import be.tarsos.dsp.io.TarsosDSPAudioInputStream; + +/** + * Encapsulates an {@link AudioInputStream} to make it work with the core + * TarsosDSP library. + * + * @author Joren Six + * + */ +public class JVMAudioInputStream implements TarsosDSPAudioInputStream { + + private final AudioInputStream underlyingStream; + private final TarsosDSPAudioFormat tarsosDSPAudioFormat; + + public JVMAudioInputStream(AudioInputStream stream) { + this.underlyingStream = stream; + this.tarsosDSPAudioFormat = JVMAudioInputStream.toTarsosDSPFormat(stream.getFormat()); + } + + @Override + public long skip(long bytesToSkip) throws IOException { + return underlyingStream.skip(bytesToSkip); + } + + @Override + public int read(byte[] b, int off, int len) throws IOException { + return underlyingStream.read(b, off, len); + } + + @Override + public void close() throws IOException { + underlyingStream.close(); + } + + @Override + public long getFrameLength() { + + return underlyingStream.getFrameLength(); + } + + @Override + public TarsosDSPAudioFormat getFormat() { + return tarsosDSPAudioFormat; + } + + /** + * Converts a {@link AudioFormat} to a {@link TarsosDSPAudioFormat}. + * + * @param format + * The {@link AudioFormat} + * @return A {@link TarsosDSPAudioFormat} + */ + public static TarsosDSPAudioFormat toTarsosDSPFormat(AudioFormat format) { + boolean isSigned = format.getEncoding() == Encoding.PCM_SIGNED; + TarsosDSPAudioFormat tarsosDSPFormat = new TarsosDSPAudioFormat(format.getSampleRate(), + format.getSampleSizeInBits(), format.getChannels(), isSigned, format.isBigEndian()); + return tarsosDSPFormat; + } + + /** + * Converts a {@link TarsosDSPAudioFormat} to a {@link AudioFormat}. + * + * @param format + * The {@link TarsosDSPAudioFormat} + * @return A {@link AudioFormat} + */ + public static AudioFormat toAudioFormat(TarsosDSPAudioFormat format) { + boolean isSigned = format.getEncoding() == TarsosDSPAudioFormat.Encoding.PCM_SIGNED; + AudioFormat audioFormat = new AudioFormat(format.getSampleRate(), format.getSampleSizeInBits(), + format.getChannels(), isSigned, format.isBigEndian()); + return audioFormat; + } + +} diff --git a/instrument-core/src/main/java/jomu/instrument/audio/ResynthAudioSynthesizer.java b/instrument-core/src/main/java/jomu/instrument/audio/ResynthAudioSynthesizer.java index a3642e12..904e541a 100644 --- a/instrument-core/src/main/java/jomu/instrument/audio/ResynthAudioSynthesizer.java +++ b/instrument-core/src/main/java/jomu/instrument/audio/ResynthAudioSynthesizer.java @@ -7,7 +7,6 @@ import java.util.concurrent.BlockingQueue; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.TimeUnit; import java.util.logging.Logger; import javax.sound.sampled.AudioFormat; @@ -17,7 +16,6 @@ import be.tarsos.dsp.AudioEvent; import be.tarsos.dsp.AudioProcessor; -import be.tarsos.dsp.io.jvm.AudioPlayer; import jomu.instrument.Instrument; import jomu.instrument.audio.features.AudioFeatureFrame; import jomu.instrument.audio.features.AudioFeatureProcessor; @@ -44,6 +42,25 @@ public class ResynthProcessor implements AudioProcessor { @Override public boolean process(AudioEvent audioEvent) { audioEvent.setFloatBuffer(resynthInfo.getSourceBuffer()); + + float smin = 0; + float smax = 0; + for (int i = 0; i < audioEvent.getFloatBuffer().length; i++) { + float sample = audioEvent.getFloatBuffer()[i]; + if (sample < smin) { + smin = sample; + } + + if (sample > smax) { + smax = sample; + } + + } + + LOG.severe(">>>RP after: " + (System.currentTimeMillis() / 1000.0) + + ", " + audioEvent.getTimeStamp() + ", " + audioEvent.getSamplesProcessed() + + ", min: " + smin + + ", max: " + smax + ", len: " + audioEvent.getFloatBuffer().length); return true; } @@ -149,7 +166,7 @@ public void playFrameSequence(ToneTimeFrame toneTimeFrame, String streamId, int AudioFeatureFrame aff = afp.getAudioFeatureFrame(sequence); TreeMap