问题
Okay, I've been looking but haven't found a solution. I've managed to get an byte[]
output from the mic with AudioFormat(44100.0f, 32, 2, true, true);
format. Okay, but now I want to play it back through my headphones to test it.
I've read plenty of things, googled, and looked here, on StackOverflow, but I can't find a way to just get my already stored data to be played int the headphones (or actual audio output device).
I've read that the common way is to play it from a Clip (and the clip read the File stream of an audiofile), or alternaly use a SourceDataLine. But I want to put back the byte[]
to a OutputStream or into a Buffer and play it realtime.
My code is intended to be called arround 60 times per second (as it works nested into a 3D application, maybe I'll thread it later on), and outputs the byte[]
for graphical representation.
My actual (non-working) approach is this (Posting full code below for completition):
/* DATA
AudioFormat format; //Audio format (already initialiced)
TargetDataLine line; //Microphone line (working)
DataLine.Info info;
byte soundArray[]; //The last retrieved data
*/
private void playOnRealtime() throws LineUnavailableException {
Clip clip = AudioSystem.getClip();
TargetDataLine d_line;
AudioInputStream inputStream;
DataLine.Info info = new DataLine.Info(SourceDataLine.class,
format); // format is an AudioFormat object
if (!AudioSystem.isLineSupported(info)) {
System.err.print("Dataline OUT not supported!");
}
try {
d_line = AudioSystem.getTargetDataLine(format);
d_line.open();
d_line.start();
inputStream = AudioSystem.getAudioInputStream(format, new AudioInputStream(d_line));
clip.open(inputStream);
clip.start();
d_line.stop();
clip.stop();
} catch (LineUnavailableException ex) {
// Handle the error.
// ...
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
Full code here (if anyone finds it relevant):
package main;
import java.io.IOException;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioInputStream;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.Clip;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.LineUnavailableException;
import javax.sound.sampled.SourceDataLine;
import javax.sound.sampled.TargetDataLine;
public class Monitor implements Runnable {
AudioFormat format;
TargetDataLine line;
DataLine.Info info;
byte soundArray[];
public Monitor() {
format = new AudioFormat(44100.0f, 32, 2, true, true);
// format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED,
// 44100.0F, 16, 2, 4, 44100.0F, false);
try {
line = AudioSystem.getTargetDataLine(format);
} catch (LineUnavailableException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
DataLine.Info info = new DataLine.Info(TargetDataLine.class,
format); // format is an AudioFormat object
if (!AudioSystem.isLineSupported(info)) {
// Handle the error ...
}
// Obtain and open the line.
try {
line = (TargetDataLine) AudioSystem.getLine(info);
line.open(format);
} catch (LineUnavailableException ex) {
// Handle the error ...
}
soundArray = new byte[8000];
}
boolean started;
@Override
public void run() {
try {
line.open();
line.start();
line.read(soundArray, 0, 8000);
System.out.println('A');
printFloatArray(fragment(16, soundArray));
line.stop();
} catch (LineUnavailableException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
playOnRealtime();
} catch (LineUnavailableException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
private static byte[] fragment(int n, byte[] array) {
int length = array.length;
byte[] result = new byte[n];
for (int k = 0; k < n; k++) {
result[k] = array[k * (length / n)];
}
return result;
}
private static void printFloatArray(byte[] array) {
for (byte var : array) {
System.out.print(String.format("%3d", var) + "\t");
}
}
public byte[] getData(int n_div) {
return fragment(n_div, soundArray);
}
private void playOnRealtime() throws LineUnavailableException {
Clip clip = AudioSystem.getClip();
TargetDataLine d_line;
AudioInputStream inputStream;
DataLine.Info info = new DataLine.Info(SourceDataLine.class,
format); // format is an AudioFormat object
if (!AudioSystem.isLineSupported(info)) {
System.err.print("Dataline OUT not supported!");
}
try {
d_line = AudioSystem.getTargetDataLine(format);
d_line.open();
d_line.start();
inputStream = AudioSystem.getAudioInputStream(format, new AudioInputStream(d_line));
clip.open(inputStream);
clip.start();
d_line.stop();
clip.stop();
} catch (LineUnavailableException ex) {
// Handle the error.
// ...
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
Thanks in advance!
回答1:
boolean isRecording = false;
AudioManager am = null;
AudioRecord record = null;
AudioTrack track = null;
@Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
setVolumeControlStream(AudioManager.MODE_IN_COMMUNICATION);
initRecordAndTrack();
am = (AudioManager) this.getSystemService(Context.AUDIO_SERVICE);
am.setSpeakerphoneOn(true);
(new Thread()
{
@Override
public void run()
{
recordAndPlay();
}
}).start();
Button startButton = (Button) findViewById(R.id.start_button);
startButton.setOnClickListener(new OnClickListener()
{
@Override
public void onClick(View v)
{
if (!isRecording)
{
startRecordAndPlay();
}
}
});
Button stopButton = (Button) findViewById(R.id.stop_button);
stopButton.setOnClickListener(new OnClickListener()
{
@Override
public void onClick(View v)
{
if (isRecording)
{
stopRecordAndPlay();
}
}
});
}
private void initRecordAndTrack()
{
int min = AudioRecord.getMinBufferSize(8000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
record = new AudioRecord(MediaRecorder.AudioSource.VOICE_COMMUNICATION, 8000, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT,
min);
if (AcousticEchoCanceler.isAvailable())
{
AcousticEchoCanceler echoCancler = AcousticEchoCanceler.create(record.getAudioSessionId());
echoCancler.setEnabled(true);
}
int maxJitter = AudioTrack.getMinBufferSize(8000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);
track = new AudioTrack(AudioManager.MODE_IN_COMMUNICATION, 8000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, maxJitter,
AudioTrack.MODE_STREAM);
}
private void recordAndPlay()
{
short[] lin = new short[1024];
int num = 0;
am.setMode(AudioManager.MODE_IN_COMMUNICATION);
while (true)
{
if (isRecording)
{
num = record.read(lin, 0, 1024);
track.write(lin, 0, num);
}
}
}
private void startRecordAndPlay()
{
record.startRecording();
track.play();
isRecording = true;
}
private void stopRecordAndPlay()
{
record.stop();
track.pause();
isRecording = false;
}
i was researching the same , im developing a microphone amplifier using mobile app.
you can check this link Good luck buddy!
来源:https://stackoverflow.com/questions/24519764/real-time-microphone-audio-playback