全双工语音

时间:2015-03-22 17:23:21

标签: android audio udp audio-streaming

我想使用UDP协议创建一个全双工语音传输但我得到了很多噪音和延迟和回声我会发布代码我会感激任何帮助提前感谢我使用的一些代码 Streaming voice between Android Phones over WiFi

import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioRecord;
import android.media.AudioTrack;
import android.media.MediaRecorder;
import android.os.AsyncTask;
import android.support.v7.app.ActionBarActivity;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;

import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.util.Arrays;

public class MainActivity extends ActionBarActivity {
private EditText ip;
private TextView streamingLabel;
private Button Call,Answer,End;

// public byte[] buffer;
public static DatagramSocket socket;
private int sendingport=50005;
private int recievingport=50006;//which port??
AudioRecord recorder;

//Audio Configuration.
private int sampleRate = 11025 ;      //How much will be ideal?
private int channelConfig = AudioFormat.CHANNEL_CONFIGURATION_MONO;
private int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
AudioTrack speaker;
private boolean status = true;
@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);
    ip = (EditText) findViewById (R.id.editText);
    streamingLabel = (TextView) findViewById(R.id.textView);
    Call = (Button) findViewById (R.id.button);
    Answer = (Button) findViewById (R.id.button2);
    End = (Button) findViewById (R.id.button3);

    streamingLabel.setText("Press Start! to begin");

    Call.setOnClickListener (startListener);
    End.setOnClickListener (stopListener);
    Answer.setOnClickListener(receiveListener);
}

private final View.OnClickListener stopListener = new View.OnClickListener() {

    @Override
    public void onClick(View arg0) {
        status = false;
        recorder.release();
        Log.d("VS", "Recorder released");
    }

};
private final View.OnClickListener receiveListener = new View.OnClickListener() {

    @Override
    public void onClick(View arg0) {
        status = true;
       Log.d("Note","RecordAudio");
        startReceiving();
     RecordAudio t=new RecordAudio();
        t.execute();
    }

};

private final View.OnClickListener startListener = new View.OnClickListener() {

    @Override
    public void onClick(View arg0) {
        status = true;
        startStreaming();
        PlayAudio a=new PlayAudio();
        a.execute();

    }

};

public void startStreaming() {


    Thread streamThread = new Thread(new Runnable() {

        @Override
        public void run() {
            try {


                int minBufSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat);
                DatagramSocket socket = new DatagramSocket();
                Log.d("VS", "Socket Created");
                byte[] buffer = new byte[minBufSize*10];

                Log.d("VS","Buffer created of size " + minBufSize);
                DatagramPacket packet;

                final InetAddress destination = InetAddress.getByName(ip.getText().toString());
                Log.d("VS", "Address retrieved");


                recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,sampleRate,channelConfig,audioFormat,minBufSize);
                Log.d("VS", "Recorder initialized");
                //  speaker = new AudioTrack(AudioManager.STREAM_MUSIC,sampleRate,channelConfig,audioFormat,(minBufSize*10),AudioTrack.MODE_STREAM);



                recorder.startRecording();
                // speaker.play();

                while(status == true) {


                    //reading data from MIC into buffer
                    minBufSize = recorder.read(buffer, 0, buffer.length);
                    Log.d("VS", "Reading");
                    //putting buffer in the packet
                    packet = new DatagramPacket (buffer,buffer.length,destination,sendingport);
                    Log.d("VS", "creating packet");
                    //   speaker.write(buffer, 0, minBufSize);
                    socket.send(packet);
                    //Arrays.fill(buffer, Byte.parseByte(null));

                }



            } catch(UnknownHostException e) {
                Log.e("VS", "UnknownHostException");
            }
            catch (Throwable t) {
                Log.e("AudioTrack", "Playback Failed");
            }
       /*    catch (IOException e) {
            Log.e("VS", "IOException");
           }*/


        }

    });
    streamThread.start();
}
private class PlayAudio extends AsyncTask<Void, Integer, Void> {
    @Override
    protected Void doInBackground(Void... params) {

            try {

                DatagramSocket recievingsocket = new DatagramSocket(recievingport);
                Log.d("VR", "Socket Created");

                int minBufSize1 = AudioTrack.getMinBufferSize(sampleRate, channelConfig, audioFormat);

                byte[] buffer1 = new byte[minBufSize1*10];


                //minimum buffer size. need to be careful. might cause problems. try setting manually if any problems faced
                //  int minBufSize = AudioTrack.getMinBufferSize(sampleRate, channelConfig, audioFormat);

                speaker = new AudioTrack(AudioManager.STREAM_MUSIC,sampleRate,channelConfig,audioFormat,(minBufSize1*10),AudioTrack.MODE_STREAM);

                speaker.play();
               while(status == true) {
                    try {


                        DatagramPacket packet1 = new DatagramPacket(buffer1,buffer1.length);
                        recievingsocket.receive(packet1);
                        Log.d("VR", "Packet Received");

                        //reading content from packet
                        buffer1=packet1.getData();
                        Log.d("VR", "Packet data read into buffer");

                        //sending data to the Audiotrack obj i.e. speaker
                        speaker.write(buffer1, 0, minBufSize1);
                        Log.d("VR", "Writing buffer content to speaker");
                     //   Arrays.fill(buffer1, Byte.parseByte(null));
                    } catch(IOException e) {
                        Log.e("VR","IOException");
                    }
                    catch(Throwable t){
                        Log.e("AudioTrack", "Playback Failed");
                    }
                }


            } catch (SocketException e) {
                Log.e("VR", "SocketException"+e.getMessage());
            }

            return null;
        }

}

public void startReceiving() {

    Thread receiveThread = new Thread (new Runnable() {

        @Override
        public void run() {

            try {

                DatagramSocket socket = new DatagramSocket(sendingport);
                Log.d("VR", "Socket Created");

                int minBufSize = AudioTrack.getMinBufferSize(sampleRate, channelConfig, audioFormat);

                byte[] buffer = new byte[minBufSize*10];


                //minimum buffer size. need to be careful. might cause problems. try setting manually if any problems faced
                //  int minBufSize = AudioTrack.getMinBufferSize(sampleRate, channelConfig, audioFormat);

                speaker = new AudioTrack(AudioManager.STREAM_MUSIC,sampleRate,channelConfig,audioFormat,(minBufSize*10),AudioTrack.MODE_STREAM);

                speaker.play();
                while(status == true) {
                    try {


                        DatagramPacket packet = new DatagramPacket(buffer,buffer.length);
                        socket.receive(packet);
                        Log.d("VR", "Packet Received");

                        //reading content from packet
                        buffer=packet.getData();
                        Log.d("VR", "Packet data read into buffer");

                        //sending data to the Audiotrack obj i.e. speaker
                        speaker.write(buffer, 0, minBufSize);
                        Log.d("VR", "Writing buffer content to speaker");
                      //  Arrays.fill(buffer, Byte.parseByte(null));
                    } catch(IOException e) {
                        Log.e("VR","IOException");
                    }
                    catch(Throwable t){
                        Log.e("AudioTrack", "Playback Failed");
                    }
                }


            } catch (SocketException e) {
                Log.e("VR", "SocketException"+e.getMessage());
            }


        }

    });
    receiveThread.start();
}
private class RecordAudio extends AsyncTask<Void, Integer, Void> {
    @Override
    protected Void doInBackground(Void... params) {
        try {


            int minBufSize1 = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat);
            DatagramSocket sendingsocket = new DatagramSocket();
            Log.d("VS", "Socket Created");
            byte[] buffer1 = new byte[minBufSize1*10];

            Log.d("VS", "Buffer created of size " + minBufSize1);
            DatagramPacket packet1;

            final InetAddress destination = InetAddress.getByName(ip.getText().toString());
            Log.d("VS", "Address retrieved");


            recorder = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleRate, channelConfig, audioFormat, minBufSize1);
            Log.d("VS", "Recorder initialized");
            //  speaker = new AudioTrack(AudioManager.STREAM_MUSIC,sampleRate,channelConfig,audioFormat,(minBufSize*10),AudioTrack.MODE_STREAM);


            recorder.startRecording();
            // speaker.play();

            while (status == true) {


                //reading data from MIC into buffer
                minBufSize1 = recorder.read(buffer1, 0, buffer1.length);
                Log.d("VS", "Reading");
                //putting buffer in the packet
                packet1 = new DatagramPacket(buffer1, buffer1.length, destination, recievingport);
                Log.d("VS", "creating packet");
                //   speaker.write(buffer, 0, minBufSize);
                sendingsocket.send(packet1);
               // Arrays.fill(buffer1, Byte.parseByte(null));

            }


        } catch (UnknownHostException e) {
            Log.e("VS", "UnknownHostException");
        } catch (Throwable t) {
            Log.e("AudioTrack", "Playback Failed");
        }
       /*    catch (IOException e) {
            Log.e("VS", "IOException");
           }*/

        return null;
    }
}

@Override
public boolean onCreateOptionsMenu(Menu menu) {
    // Inflate the menu; this adds items to the action bar if it is present.
    getMenuInflater().inflate(R.menu.menu_main, menu);
    return true;
}

@Override
public boolean onOptionsItemSelected(MenuItem item) {
    // Handle action bar item clicks here. The action bar will
    // automatically handle clicks on the Home/Up button, so long
    // as you specify a parent activity in AndroidManifest.xml.
    int id = item.getItemId();

    //noinspection SimplifiableIfStatement
    if (id == R.id.action_settings) {
        return true;
    }

    return super.onOptionsItemSelected(item);
}
}

0 个答案:

没有答案