android 手机一边录音一边播放 仿yy的试听功能

一 android中AudioRecord采集音频的参数说明

 

android中采集音频的apiandroid.media.AudioRecord

其中构造器的几个参数就是标准的声音采集参数

以下是参数的含义解释

public AudioRecord (int audioSource, int sampleRateInHz, int channelConfig, int audioFormat, int bufferSizeInBytes)

Since: API Level 3

Class constructor.

Parameters

audioSource

the recording source. See MediaRecorder.AudioSource for recording source definitions.

音频源:指的是从哪里采集音频。这里我们当然是从麦克风采集音频,所以此参数的值为MIC

sampleRateInHz

the sample rate expressed in Hertz. Examples of rates are (but not limited to) 44100, 22050 and 11025.

采样率:音频的采样频率,每秒钟能够采样的次数,采样率越高,音质越高。给出的实例是441002205011025但不限于这几个参数。例如要采集低质量的音频就可以使用40008000等低采样率。

channelConfig

describes the configuration of the audio channels. See CHANNEL_IN_MONO and CHANNEL_IN_STEREO

声道设置:android支持双声道立体声和单声道。MONO单声道,STEREO立体声

audioFormat

the format in which the audio data is represented. See ENCODING_PCM_16BIT and ENCODING_PCM_8BIT

编码制式和采样大小:采集来的数据当然使用PCM编码(脉冲代码调制编码,即PCM编码。PCM通过抽样、量化、编码三个步骤将连续变化的模拟信号转换为数字编码。) android支持的采样大小16bit 或者8bit。当然采样大小越大,那么信息量越多,音质也越高,现在主流的采样大小都是16bit,在低质量的语音传输的时候8bit足够了。

bufferSizeInBytes

the total size (in bytes) of the buffer where audio data is written to during the recording. New audio data can be read from this buffer in smaller chunks than this size. SeegetMinBufferSize(int, int, int) to determine the minimum required buffer size for the successful creation of an AudioRecord instance. Using values smaller than getMinBufferSize() will result in an initialization failure.

采集数据需要的缓冲区的大小,如果不知道最小需要的大小可以在getMinBufferSize()查看。

二 代码

package com.example.superb.yy4;

import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;

import java.io.IOException;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;

import android.app.Activity;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;

import org.w3c.dom.Text;

public class MainActivity extends Activity {
PipedInputStream in;
boolean isRrcord;
mAudio mm ;
mAudioPlayer m;

TextView T1,T2;
Button btn;
@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);
    btn=findViewById(R.id.search_close_btn);

    T1=findViewById(R.id.dddd);
    isRrcord = false;

}
public void btnclick(View v){
    if (isRrcord){
        isRrcord = false;
        mm.stopRecord();m.stopPlay();
        btn.setText("开始");
        T1.setText("点击开始");

    }else{
        isRrcord = true;
        startRecord();

        btn.setText("停止");

        T1.setText("点击停止");

    }
}


private void startRecord(){
    in = new PipedInputStream();
    new Thread(new Runnable() {

        @Override
        public void run() {
            try {
                mm = new mAudio(MainActivity.this, in);
                mm.StartAudioData();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }).start();
    new Thread(new Runnable() {
        @Override
        public void run() {
            byte[] buffer = new byte[1024];
            PipedOutputStream pout = new PipedOutputStream();
            m = new mAudioPlayer();
            try {
                m.setOutputStream(pout);
                new Thread(new Runnable() {

                    @Override
                    public void run() {
                        // TODO Auto-generated method stub
                        m.startPlayAudio();
                    }
                }).start();
            } catch (IOException e1) {
                e1.printStackTrace();
            }
            int size = 0 ;
            try {
                while (true){
                    while (in.available()>0){
                        size = in.read(buffer);
                        pout.write(buffer, 0, size);
                    }
                }

            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }).start();
}

}

package com.example.superb.yy4;

import java.io.IOException;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;

import android.content.Context;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;

/*

  • To getaudio or play audio

  • */
    public class mAudio {
    private AudioRecord audioRecord;
    private Context context;
    private boolean isRecording = false ;
    private PipedOutputStream outstream ;//利用管道传输数据
    public mAudio(Context context , PipedInputStream instream) throws IOException {
    this.context = context;
    //初始化管道流 用于向外传输数据
    outstream = new PipedOutputStream();
    outstream.connect(instream);
    }
    public void StartAudioData(){//得到录音数据
    int frequency = 11025;

     //frequency采样率:音频的采样频率,每秒钟能够采样的次数,采样率越高,音质越高。
     // 给出的实例是44100、22050、11025但不限于这几个参数。
     // 例如要采集低质量的音频就可以使用4000、8000等低采样率。
     @SuppressWarnings("deprecation")
     int channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_STEREO;//立体声录制通道
     int audioEncoding = AudioFormat.ENCODING_PCM_16BIT;
     //编码制式和采样大小:采集来的数据当然使用PCM编码(脉冲代码调制编码,
     // 即PCM编码。PCM通过抽样、量化、编码三个步骤将连续变化的模拟信号转换为数字编码。) android支持的采样大小16bit 或者8bit。
     // 当然采样大小越大,那么信息量越多,音质也越高,
     // 现在主流的采样大小都是16bit,在低质量的语音传输的时候8bit足够了。
     //
     int buffersize = AudioRecord.getMinBufferSize(frequency, channelConfiguration, audioEncoding);
    

    //采集数据需要的缓冲区的大小,如果不知道最小需要的大小可以在getMinBufferSize()查看。
    audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC,
    frequency, channelConfiguration, audioEncoding, buffersize);
    //音频源:指的是从哪里采集音频。这里我们当然是从麦克风采集音频,所以此参数的值为MIC

     //frequency采样率:音频的采样频率,每秒钟能够采样的次数,采样率越高,音质越高。
     // 给出的实例是44100、22050、11025但不限于这几个参数。
     // 例如要采集低质量的音频就可以使用4000、8000等低采样率。
     byte[]buffer  = new byte[buffersize];
     audioRecord.startRecording();//开始录音
     isRecording = true;
     int bufferReadSize = 1024;
     while (isRecording){
         audioRecord.read(buffer, 0, bufferReadSize);
         try {
             outstream.write(buffer, 0, bufferReadSize);
         } catch (IOException e) {
             e.printStackTrace();
         }
     }
    

    }
    public void stopRecord(){//停止录音
    isRecording = false;
    audioRecord.stop();
    try {
    outstream.close();
    } catch (IOException e) {
    e.printStackTrace();
    }
    }

}

package com.example.superb.yy4;

import java.io.IOException;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;

import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioTrack;

public class mAudioPlayer {
private PipedInputStream instream;
private boolean isPlaying ;
private AudioTrack audioplayer;
private byte[] buffer;
public mAudioPlayer() {
isPlaying = false;
instream = null;
//初始化播音类
@SuppressWarnings(“deprecation”)
int bufsize = AudioTrack.getMinBufferSize(11025, AudioFormat.CHANNEL_CONFIGURATION_STEREO,
AudioFormat.ENCODING_PCM_16BIT);
audioplayer = new AudioTrack(AudioManager.STREAM_MUSIC, 11025, AudioFormat.CHANNEL_CONFIGURATION_STEREO,
AudioFormat.ENCODING_PCM_16BIT, bufsize,AudioTrack.MODE_STREAM);
}
//设置管道流,用于接受音频数据
public void setOutputStream(PipedOutputStream out) throws IOException{
instream = new PipedInputStream(out);

}
public void startPlayAudio(){ //调用之前先调用setOutputStream 函数
    isPlaying = true;
    audioplayer.play();//开始接受数据流播放
    buffer = new byte[1024];
    while (instream!=null&&isPlaying){
        try {
            while (instream.available()>0){
                int size = instream.read(buffer);
                audioplayer.write(buffer, 0
                        , size);//不断播放数据
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
}
public void stopPlay(){//停止播放
    isPlaying = false ;
    try {
        instream.close();
    } catch (IOException e) {
        e.printStackTrace();
    }
    audioplayer.stop();
}

}
参考:
https://www.cnblogs.com/nanguabing/archive/2012/12/16/2820732.html
http://www.cnblogs.com/mythou/p/3242000.html

下载demo

猜你喜欢

转载自blog.csdn.net/SATTSA/article/details/89360185