Java-Stream Live Android Audi

我目前正在尝试将直播麦克风音频从Android设备流式传输到Java程序。 我首先在两个Android设备之间发送实时音频,以确认我的方法正确。 在接收设备上几乎没有任何延迟地可以完美地听到音频。 接下来,我将相同的音频流发送到一个小型Java程序,并验证了数据也已正确发送到此处。 现在,我想要做的是对这些数据进行编码,并以某种方式在运行Java程序的服务器上对其进行回放。 我宁愿在使用HTML5或JavaScript的网络浏览器中播放它,但可以使用VLC等替代方法。

这是发送实时麦克风音频的Android应用的代码

public class MainActivity extends Activity {


private Button startButton,stopButton;

public byte[] buffer;
public static DatagramSocket socket;
    AudioRecord recorder;

private int sampleRate = 44100;   
private int channelConfig = AudioFormat.CHANNEL_CONFIGURATION_MONO;    
private int audioFormat = AudioFormat.ENCODING_PCM_16BIT;       
int minBufSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat);
    private boolean status = true;

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);

     startButton = (Button) findViewById (R.id.start_button);
     stopButton = (Button) findViewById (R.id.stop_button);

     startButton.setOnClickListener(startListener);
     stopButton.setOnClickListener(stopListener);

     minBufSize += 2048;
}

@Override
public boolean onCreateOptionsMenu(Menu menu) {
    getMenuInflater().inflate(R.menu.main, menu);
    return true;
}

private final OnClickListener stopListener = new OnClickListener() {

    @Override
    public void onClick(View arg0) {
                status = false;
                recorder.release();
                Log.d("VS","Recorder released");
    }
};

private final OnClickListener startListener = new OnClickListener() {

    @Override
    public void onClick(View arg0) {
                status = true;
                startStreaming();           
    }
};



public void startStreaming()
{
    Thread streamThread = new Thread(new Runnable(){
        @Override
        public void run()
        {
            try{

                DatagramSocket socket = new DatagramSocket();
                Log.d("VS", "Socket Created");

                byte[] buffer = new byte[minBufSize];

                Log.d("VS","Buffer created of size " + minBufSize);


                Log.d("VS", "Address retrieved");
                recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,sampleRate,channelConfig,audioFormat,minBufSize);
                Log.d("VS", "Recorder initialized");


                recorder.startRecording();


                InetAddress IPAddress = InetAddress.getByName("192.168.1.5");
                byte[] sendData = new byte[1024];
                byte[] receiveData = new byte[1024];


                while (status == true)
                {
                    DatagramPacket sendPacket = new DatagramPacket(sendData, sendData.length, IPAddress, 50005);
                    socket.send(sendPacket);
                }

            } catch(UnknownHostException e) {
                Log.e("VS", "UnknownHostException");
            } catch (IOException e) {
                Log.e("VS", "IOException");
                e.printStackTrace();
            } 


        }

    });
    streamThread.start();
}
}

这是Java程序读取数据的代码。

class Server
{
   public static void main(String args[]) throws Exception
      {
         DatagramSocket serverSocket = new DatagramSocket(50005);
            byte[] receiveData = new byte[1024];
            byte[] sendData = new byte[1024];
            while(true)
               {
                  DatagramPacket receivePacket = new DatagramPacket(receiveData, receiveData.length);



              serverSocket.receive(receivePacket);
              String sentence = new String( receivePacket.getData().toString());

              System.out.println("RECEIVED: " + sentence);
           }
  }
}

我知道在将音频发送到Java程序之前,应该先在应用程序一侧对音频进行编码,但是我不确定在使用AudioRecorder时如何进行编码。 我宁愿不使用NDK,因为我没有使用它的经验,也没有真正的时间来学习如何使用它。

chuckliddell0 asked 2019-11-08T12:47:13Z
3个解决方案
56 votes

所以我解决了我的问题。 问题主要在接收方。 接收器接收音频流,并将其推送到PC的扬声器。 产生的声音仍然很漫长和破碎,但仍然有效。 尝试使用缓冲区大小可以改善这一点。

编辑:您可以使用线程读取音频,以免产生延迟。 另外,最好使用16 000的采样大小,因为这样可以进行语音处理。

Android代码:

package com.example.mictest2;

import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.UnknownHostException;

import android.app.Activity;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;

public class Send extends Activity {
private Button startButton,stopButton;

public byte[] buffer;
public static DatagramSocket socket;
private int port=50005;

AudioRecord recorder;

private int sampleRate = 16000 ; // 44100 for music
private int channelConfig = AudioFormat.CHANNEL_CONFIGURATION_MONO;    
private int audioFormat = AudioFormat.ENCODING_PCM_16BIT;       
int minBufSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat);
private boolean status = true;


@Override
public void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);

    startButton = (Button) findViewById (R.id.start_button);
    stopButton = (Button) findViewById (R.id.stop_button);

    startButton.setOnClickListener (startListener);
    stopButton.setOnClickListener (stopListener);

}

private final OnClickListener stopListener = new OnClickListener() {

    @Override
    public void onClick(View arg0) {
                status = false;
                recorder.release();
                Log.d("VS","Recorder released");
    }

};

private final OnClickListener startListener = new OnClickListener() {

    @Override
    public void onClick(View arg0) {
                status = true;
                startStreaming();           
    }

};

public void startStreaming() {


    Thread streamThread = new Thread(new Runnable() {

        @Override
        public void run() {
            try {

                DatagramSocket socket = new DatagramSocket();
                Log.d("VS", "Socket Created");

                byte[] buffer = new byte[minBufSize];

                Log.d("VS","Buffer created of size " + minBufSize);
                DatagramPacket packet;

                final InetAddress destination = InetAddress.getByName("192.168.1.5");
                Log.d("VS", "Address retrieved");


                recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,sampleRate,channelConfig,audioFormat,minBufSize*10);
                Log.d("VS", "Recorder initialized");

                recorder.startRecording();


                while(status == true) {


                    //reading data from MIC into buffer
                    minBufSize = recorder.read(buffer, 0, buffer.length);

                    //putting buffer in the packet
                    packet = new DatagramPacket (buffer,buffer.length,destination,port);

                    socket.send(packet);
                    System.out.println("MinBufferSize: " +minBufSize);


                }



            } catch(UnknownHostException e) {
                Log.e("VS", "UnknownHostException");
            } catch (IOException e) {
                e.printStackTrace();
                Log.e("VS", "IOException");
            } 
        }

    });
    streamThread.start();
 }
 }

Android XML:

<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:paddingBottom="@dimen/activity_vertical_margin"
android:paddingLeft="@dimen/activity_horizontal_margin"
android:paddingRight="@dimen/activity_horizontal_margin"
android:paddingTop="@dimen/activity_vertical_margin"
tools:context=".MainActivity" >

<TextView
    android:id="@+id/textView1"
    android:layout_width="wrap_content"
    android:layout_height="wrap_content"
    android:text="@string/hello_world" />

<Button
    android:id="@+id/start_button"
    android:layout_width="wrap_content"
    android:layout_height="wrap_content"
    android:layout_below="@+id/textView1"
    android:layout_centerHorizontal="true"
    android:layout_marginTop="130dp"
    android:text="Start" />

<Button
    android:id="@+id/stop_button"
    android:layout_width="wrap_content"
    android:layout_height="wrap_content"
    android:layout_alignLeft="@+id/button1"
    android:layout_below="@+id/button1"
    android:layout_marginTop="64dp"
    android:text="Stop" />

</RelativeLayout>

服务器代码:

package com.datagram;

import java.io.ByteArrayInputStream;
import java.net.DatagramPacket;
import java.net.DatagramSocket;

import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioInputStream;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.FloatControl;
import javax.sound.sampled.SourceDataLine;

class Server {

AudioInputStream audioInputStream;
static AudioInputStream ais;
static AudioFormat format;
static boolean status = true;
static int port = 50005;
static int sampleRate = 44100;

public static void main(String args[]) throws Exception {


    DatagramSocket serverSocket = new DatagramSocket(50005);


    byte[] receiveData = new byte[1280]; 
    // ( 1280 for 16 000Hz and 3584 for 44 100Hz (use AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat) to get the correct size)

    format = new AudioFormat(sampleRate, 16, 1, true, false);

    while (status == true) {
        DatagramPacket receivePacket = new DatagramPacket(receiveData,
                receiveData.length);

        serverSocket.receive(receivePacket);

        ByteArrayInputStream baiss = new ByteArrayInputStream(
                receivePacket.getData());

        ais = new AudioInputStream(baiss, format, receivePacket.getLength());

        // A thread solve the problem of chunky audio 
        new Thread(new Runnable() {
            @Override
            public void run() {
                toSpeaker(receivePacket.getData(), sourceDataLine);
            }
        }).start();
    }
}

public static void toSpeaker(byte soundbytes[]) {
    try {

        DataLine.Info dataLineInfo = new DataLine.Info(SourceDataLine.class, format);
        SourceDataLine sourceDataLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo);

        sourceDataLine.open(format);

        FloatControl volumeControl = (FloatControl) sourceDataLine.getControl(FloatControl.Type.MASTER_GAIN);
        volumeControl.setValue(100.0f);

        sourceDataLine.start();
        sourceDataLine.open(format);

        sourceDataLine.start();

        System.out.println("format? :" + sourceDataLine.getFormat());

        sourceDataLine.write(soundbytes, 0, soundbytes.length);
        System.out.println(soundbytes.toString());
        sourceDataLine.drain();
        sourceDataLine.close();
    } catch (Exception e) {
        System.out.println("Not working in speakers...");
        e.printStackTrace();
    }
}
}

我希望这可以帮助某人节省几个小时的痛苦:)

chuckliddell0 answered 2019-11-08T12:48:06Z
14 votes

我的2美分给您的代码提高了效率。 不错的尝试

package com.datagram;

import java.io.ByteArrayInputStream;
import java.net.DatagramPacket;
import java.net.DatagramSocket;

import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioInputStream;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.FloatControl;
import javax.sound.sampled.SourceDataLine;

class Server {

AudioInputStream audioInputStream;
static AudioInputStream ais;
static AudioFormat format;
static boolean status = true;
static int port = 50005;
static int sampleRate = 44100;

static DataLine.Info dataLineInfo;
static SourceDataLine sourceDataLine;

public static void main(String args[]) throws Exception {

    DatagramSocket serverSocket = new DatagramSocket(port);

    /**
     * Formula for lag = (byte_size/sample_rate)*2
     * Byte size 9728 will produce ~ 0.45 seconds of lag. Voice slightly broken.
     * Byte size 1400 will produce ~ 0.06 seconds of lag. Voice extremely broken.
     * Byte size 4000 will produce ~ 0.18 seconds of lag. Voice slightly more broken then 9728.
     */

    byte[] receiveData = new byte[4096];

    format = new AudioFormat(sampleRate, 16, 1, true, false);
    dataLineInfo = new DataLine.Info(SourceDataLine.class, format);
    sourceDataLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo);
    sourceDataLine.open(format);
    sourceDataLine.start();

    FloatControl volumeControl = (FloatControl) sourceDataLine.getControl(FloatControl.Type.MASTER_GAIN);
    volumeControl.setValue(1.00f);

    DatagramPacket receivePacket = new DatagramPacket(receiveData,
            receiveData.length);
    ByteArrayInputStream baiss = new ByteArrayInputStream(
            receivePacket.getData());
    while (status == true) {
        serverSocket.receive(receivePacket);
        ais = new AudioInputStream(baiss, format, receivePacket.getLength());
        toSpeaker(receivePacket.getData());
    }
    sourceDataLine.drain();
    sourceDataLine.close();
}

    public static void toSpeaker(byte soundbytes[]) {
        try {
            sourceDataLine.write(soundbytes, 0, soundbytes.length);
        } catch (Exception e) {
            System.out.println("Not working in speakers...");
            e.printStackTrace();
        }
    }
}
user1729564 answered 2019-11-08T12:48:31Z
4 votes

由于您的android代码中的以下行,声音被打断了:

minBufSize += 2048;

您只是添加空字节。 另外,请使用CHANNEL_IN_MONO代替CHANNEL_CONFIGURATION_MONO

Tareq answered 2019-11-08T12:49:04Z
translate from https://stackoverflow.com:/questions/15349987/stream-live-android-audio-to-server