Android 推送手机音频数据到RTMP服务器 AACtoRTMP
Android平台上使用AudioRecord采集音视频数据,通过MediaCodec对音视频实施硬编码,回调aac数据流,然后通过librtmp推送到服务器,案例紧写了RTMP的,RTSP的照搬即可,以下是部分代码,具体代码可以看源码连接:https://github.com/printlybyte/AndroidPCMtoAAC_RTMP_RTSP
推流器:DefaultRtmpPublisher
public class DefaultRtmpPublisher implements RtmpPublisher {
private RtmpConnection rtmpConnection;
public DefaultRtmpPublisher(ConnectCheckerRtmp connectCheckerRtmp) {
rtmpConnection = new RtmpConnection(connectCheckerRtmp);
}
@Override
public boolean connect(String url) {
return rtmpConnection.connect(url);
}
@Override
public boolean publish(String publishType) {
return rtmpConnection.publish(publishType);
}
@Override
public void close() {
rtmpConnection.close();
}
@Override
public void publishVideoData(byte[] data, int size, int dts) {
rtmpConnection.publishVideoData(data, size, dts);
}
@Override
public void publishAudioData(byte[] data, int size, int dts) {
rtmpConnection.publishAudioData(data, size, dts);
}
@Override
public void setVideoResolution(int width, int height) {
rtmpConnection.setVideoResolution(width, height);
}
@Override
public void setAuthorization(String user, String password) {
rtmpConnection.setAuthorization(user, password);
}
}
连接器:RtmpConnection
public class RtmpConnection implements RtmpPublisher {
private static final String TAG = "RtmpConnection";
private static final Pattern rtmpUrlPattern =
Pattern.compile("^rtmp://([^/:]+)(:(\\d+))*/([^/]+)(/(.*))*$");
private static final Pattern rtmpsUrlPattern =
Pattern.compile("^rtmps://([^/:]+)(:(\\d+))*/([^/]+)(/(.*))*$");
private int port;
private String host;
private String appName;
private String streamName;
private String publishType;
private String swfUrl;
private String tcUrl;
private String pageUrl;
private Socket socket;
private String socketExceptionCause = "";
private RtmpSessionInfo rtmpSessionInfo;
private RtmpDecoder rtmpDecoder;
private BufferedInputStream inputStream;
private BufferedOutputStream outputStream;
private Thread rxPacketHandler;
private volatile boolean connected = false;
private volatile boolean publishPermitted = false;
private final Object connectingLock = new Object();
private final Object publishLock = new Object();
private int currentStreamId = 0;
private int transactionIdCounter = 0;
private int videoWidth;
private int videoHeight;
private ConnectCheckerRtmp connectCheckerRtmp;
//for secure transport
private boolean tlsEnabled;
//for auth
private String user = null;
private String password = null;
private String salt = null;
private String challenge = null;
private String opaque = null;
private boolean onAuth = false;
public RtmpConnection(ConnectCheckerRtmp connectCheckerRtmp) {
this.connectCheckerRtmp = connectCheckerRtmp;
}
private void handshake(InputStream in, OutputStream out) throws IOException {
Handshake handshake = new Handshake();
handshake.writeC0(out);
handshake.writeC1(out); // Write C1 without waiting for S0
out.flush();
handshake.readS0(in);
handshake.readS1(in);
handshake.writeC2(out);
out.flush();
handshake.readS2(in);
}
@Override
public boolean connect(String url) {
Matcher rtmpMatcher = rtmpUrlPattern.matcher(url);
Matcher rtmpsMatcher = rtmpsUrlPattern.matcher(url);
Matcher matcher;
if (rtmpMatcher.matches()) {
matcher = rtmpMatcher;
tlsEnabled = false;
} else if (rtmpsMatcher.matches()) {
matcher = rtmpsMatcher;
tlsEnabled = true;
} else {
connectCheckerRtmp.onConnectionFailedRtmp(
"Endpoint malformed, should be: rtmp://ip:port/appname/streamname");
return false;
}
tcUrl = url.substring(0, url.lastIndexOf('/'));
swfUrl = "";
pageUrl = "";
host = matcher.group(1);
String portStr = matcher.group(3);
port = portStr != null ? Integer.parseInt(portStr) : 1935;
appName = matcher.group(4);
streamName = matcher.group(6);
// socket connection
Log.d(TAG, "connect() called. Host: "
+ host
+ ", port: "
+ port
+ ", appName: "
+ appName
+ ", publishPath: "
+ streamName);
rtmpSessionInfo = new RtmpSessionInfo();
rtmpDecoder = new RtmpDecoder(rtmpSessionInfo);
try {
if (!tlsEnabled) {
socket = new Socket();
SocketAddress socketAddress = new InetSocketAddress(host, port);
socket.connect(socketAddress, 3000);
} else {
socket = CreateSSLSocket.createSSlSocket(host, port);
if (socket == null) throw new IOException("Socket creation failed");
}
inputStream = new BufferedInputStream(socket.getInputStream());
outputStream = new BufferedOutputStream(socket.getOutputStream());
Log.d(TAG, "connect(): socket connection established, doing handhake...");
handshake(inputStream, outputStream);
Log.d(TAG, "connect(): handshake done");
} catch (IOException e) {
Log.e(TAG, "Error", e);
connectCheckerRtmp.onConnectionFailedRtmp("Connect error, " + e.getMessage());
return false;
}
// Start the "main" handling thread
rxPacketHandler = new Thread(new Runnable() {
@Override
public void run() {
try {
Log.d(TAG, "starting main rx handler loop");
handleRxPacketLoop();
} catch (IOException ex) {
Logger.getLogger(RtmpConnection.class.getName()).log(Level.SEVERE, null, ex);
}
}
});
rxPacketHandler.start();
return rtmpConnect();
}
private boolean rtmpConnect() {
if (connected) {
connectCheckerRtmp.onConnectionFailedRtmp("Already connected");
return false;
}
if (user != null && password != null) {
sendConnectAuthPacketUser(user);
} else {
// Mark session timestamp of all chunk stream information on connection.
ChunkStreamInfo.markSessionTimestampTx();
Log.d(TAG, "rtmpConnect(): Building 'connect' invoke packet");
ChunkStreamInfo chunkStreamInfo =
rtmpSessionInfo.getChunkStreamInfo(ChunkStreamInfo.RTMP_CID_OVER_CONNECTION);
Command invoke = new Command("connect", ++transactionIdCounter, chunkStreamInfo);
invoke.getHeader().setMessageStreamId(0);
AmfObject args = new AmfObject();
args.setProperty("app", appName);
args.setProperty("flashVer", "FMLE/3.0 (compatible; Lavf57.56.101)");
args.setProperty("swfUrl", swfUrl);
args.setProperty("tcUrl", tcUrl);
args.setProperty("fpad", false);
args.setProperty("capabilities", 239);
args.setProperty("audioCodecs", 3575);
args.setProperty("videoCodecs", 252);
args.setProperty("videoFunction", 1);
args.setProperty("pageUrl", pageUrl);
args.setProperty("objectEncoding", 0);
invoke.addData(args);
sendRtmpPacket(invoke);
}
synchronized (connectingLock) {
try {
connectingLock.wait(5000);
} catch (InterruptedException ex) {
// do nothing
}
}
if (!connected) {
shutdown(true);
connectCheckerRtmp.onConnectionFailedRtmp("Fail to connect, time out");
}
return connected;
}
private void sendConnectAuthPacketUser(String user) {
ChunkStreamInfo.markSessionTimestampTx();
Log.d(TAG, "rtmpConnect(): Building 'connect' invoke packet");
ChunkStreamInfo chunkStreamInfo =
rtmpSessionInfo.getChunkStreamInfo(ChunkStreamInfo.RTMP_CID_OVER_STREAM);
Command invoke = new Command("connect", ++transactionIdCounter, chunkStreamInfo);
invoke.getHeader().setMessageStreamId(0);
AmfObject args = new AmfObject();
args.setProperty("app", appName + "?authmod=adobe&user=" + user);
args.setProperty("flashVer", "FMLE/3.0 (compatible; Lavf57.56.101)");
args.setProperty("swfUrl", swfUrl);
args.setProperty("tcUrl", tcUrl + "?authmod=adobe&user=" + user);
args.setProperty("fpad", false);
args.setProperty("capabilities", 239);
args.setProperty("audioCodecs", 3575);
args.setProperty("videoCodecs", 252);
args.setProperty("videoFunction", 1);
args.setProperty("pageUrl", pageUrl);
args.setProperty("objectEncoding", 0);
invoke.addData(args);
sendRtmpPacket(invoke);
}
private void sendConnectAuthPacketFinal(String user, String password, String salt,
String challenge, String opaque) {
String challenge2 = String.format("%08x", new Random().nextInt());
String response = Util.stringToMD5BASE64(user + salt + password);
if (!opaque.isEmpty()) {
response += opaque;
} else if (!challenge.isEmpty()) {
response += challenge;
}
response = Util.stringToMD5BASE64(response + challenge2);
String result =
"?authmod=adobe&user=" + user + "&challenge=" + challenge2 + "&response=" + response;
if (!opaque.isEmpty()) {
result += "&opaque=" + opaque;
}
ChunkStreamInfo.markSessionTimestampTx();
Log.d(TAG, "rtmpConnect(): Building 'connect' invoke packet");
ChunkStreamInfo chunkStreamInfo =
rtmpSessionInfo.getChunkStreamInfo(ChunkStreamInfo.RTMP_CID_OVER_STREAM);
Command invoke = new Command("connect", ++transactionIdCounter, chunkStreamInfo);
invoke.getHeader().setMessageStreamId(0);
AmfObject args = new AmfObject();
args.setProperty("app", appName + result);
args.setProperty("flashVer", "FMLE/3.0 (compatible; Lavf57.56.101)");
args.setProperty("swfUrl", swfUrl);
args.setProperty("tcUrl", tcUrl + result);
args.setProperty("fpad", false);
args.setProperty("capabilities", 239);
args.setProperty("audioCodecs", 3575);
args.setProperty("videoCodecs", 252);
args.setProperty("videoFunction", 1);
args.setProperty("pageUrl", pageUrl);
args.setProperty("objectEncoding", 0);
invoke.addData(args);
sendRtmpPacket(invoke);
}
@Override
public boolean publish(String type) {
if (type == null) {
connectCheckerRtmp.onConnectionFailedRtmp("Null publish type");
return false;
}
publishType = type;
return createStream();
}
private boolean createStream() {
if (!connected || currentStreamId != 0) {
connectCheckerRtmp.onConnectionFailedRtmp(
"Create stream failed, connected= " + connected + ", StreamId= " + currentStreamId);
return false;
}
Log.d(TAG, "createStream(): Sending releaseStream command...");
// transactionId == 2
Command releaseStream = new Command("releaseStream", ++transactionIdCounter);
releaseStream.getHeader().setChunkStreamId(ChunkStreamInfo.RTMP_CID_OVER_STREAM);
releaseStream.addData(new AmfNull()); // command object: null for "createStream"
releaseStream.addData(streamName); // command object: null for "releaseStream"
sendRtmpPacket(releaseStream);
Log.d(TAG, "createStream(): Sending FCPublish command...");
// transactionId == 3
Command FCPublish = new Command("FCPublish", ++transactionIdCounter);
FCPublish.getHeader().setChunkStreamId(ChunkStreamInfo.RTMP_CID_OVER_STREAM);
FCPublish.addData(new AmfNull()); // command object: null for "FCPublish"
FCPublish.addData(streamName);
sendRtmpPacket(FCPublish);
Log.d(TAG, "createStream(): Sending createStream command...");
ChunkStreamInfo chunkStreamInfo =
rtmpSessionInfo.getChunkStreamInfo(ChunkStreamInfo.RTMP_CID_OVER_CONNECTION);
// transactionId == 4
Command createStream = new Command("createStream", ++transactionIdCounter, chunkStreamInfo);
createStream.addData(new AmfNull()); // command object: null for "createStream"
sendRtmpPacket(createStream);
// Waiting for "NetStream.Publish.Start" response.
synchronized (publishLock) {
try {
publishLock.wait(5000);
} catch (InterruptedException ex) {
// do nothing
}
}
if (!publishPermitted) {
shutdown(true);
connectCheckerRtmp.onConnectionFailedRtmp("Error configure stream, publish permitted failed");
}
return publishPermitted;
}
private void fmlePublish() {
if (!connected || currentStreamId == 0) {
Log.e(TAG, "fmlePublish failed");
return;
}
Log.d(TAG, "fmlePublish(): Sending publish command...");
Command publish = new Command("publish", 0);
publish.getHeader().setChunkStreamId(ChunkStreamInfo.RTMP_CID_OVER_STREAM);
publish.getHeader().setMessageStreamId(currentStreamId);
publish.addData(new AmfNull()); // command object: null for "publish"
publish.addData(streamName);
publish.addData(publishType);
sendRtmpPacket(publish);
}
private void onMetaData() {
if (!connected || currentStreamId == 0) {
Log.e(TAG, "onMetaData failed");
return;
}
Log.d(TAG, "onMetaData(): Sending empty onMetaData...");
Data metadata = new Data("@setDataFrame");
metadata.getHeader().setMessageStreamId(currentStreamId);
metadata.addData("onMetaData");
AmfMap ecmaArray = new AmfMap();
ecmaArray.setProperty("duration", 0);
ecmaArray.setProperty("width", videoWidth);
ecmaArray.setProperty("height", videoHeight);
ecmaArray.setProperty("videodatarate", 0);
ecmaArray.setProperty("framerate", 0);
ecmaArray.setProperty("audiodatarate", 0);
ecmaArray.setProperty("audiosamplerate", 44100);
ecmaArray.setProperty("audiosamplesize", 16);
ecmaArray.setProperty("stereo", true);
ecmaArray.setProperty("filesize", 0);
metadata.addData(ecmaArray);
sendRtmpPacket(metadata);
}
@Override
public void close() {
if (socket != null) {
closeStream();
}
shutdown(true);
}
private void closeStream() {
if (!connected || currentStreamId == 0 || !publishPermitted) {
Log.e(TAG, "closeStream failed");
return;
}
Log.d(TAG, "closeStream(): setting current stream ID to 0");
Command closeStream = new Command("closeStream", 0);
closeStream.getHeader().setChunkStreamId(ChunkStreamInfo.RTMP_CID_OVER_STREAM);
closeStream.getHeader().setMessageStreamId(currentStreamId);
closeStream.addData(new AmfNull());
sendRtmpPacket(closeStream);
}
private void shutdown(boolean r) {
if (socket != null) {
try {
// It will raise EOFException in handleRxPacketThread
socket.shutdownInput();
// It will raise SocketException in sendRtmpPacket
socket.shutdownOutput();
} catch (IOException | UnsupportedOperationException e) {
e.printStackTrace();
}
// shutdown rxPacketHandler
if (rxPacketHandler != null) {
rxPacketHandler.interrupt();
try {
rxPacketHandler.join();
} catch (InterruptedException ie) {
rxPacketHandler.interrupt();
}
rxPacketHandler = null;
}
// shutdown socket as well as its input and output stream
try {
socket.close();
Log.d(TAG, "socket closed");
} catch (IOException ex) {
Log.e(TAG, "shutdown(): failed to close socket", ex);
}
}
if (r) {
reset();
}
}
private void reset() {
connected = false;
publishPermitted = false;
tcUrl = null;
swfUrl = null;
pageUrl = null;
appName = null;
streamName = null;
publishType = null;
currentStreamId = 0;
transactionIdCounter = 0;
socketExceptionCause = "";
socket = null;
rtmpSessionInfo = null;
user = null;
password = null;
salt = null;
challenge = null;
opaque = null;
}
@Override
public void publishAudioData(byte[] data, int size, int dts) {
if (data == null
|| data.length == 0
|| dts < 0
|| !connected
|| currentStreamId == 0
|| !publishPermitted) {
return;
}
Audio audio = new Audio();
audio.setData(data, size);
audio.getHeader().setAbsoluteTimestamp(dts);
audio.getHeader().setMessageStreamId(currentStreamId);
sendRtmpPacket(audio);
}
@Override
public void publishVideoData(byte[] data, int size, int dts) {
if (data == null
|| data.length == 0
|| dts < 0
|| !connected
|| currentStreamId == 0
|| !publishPermitted) {
return;
}
Video video = new Video();
video.setData(data, size);
video.getHeader().setAbsoluteTimestamp(dts);
video.getHeader().setMessageStreamId(currentStreamId);
sendRtmpPacket(video);
}
private void sendRtmpPacket(RtmpPacket rtmpPacket) {
try {
ChunkStreamInfo chunkStreamInfo =
rtmpSessionInfo.getChunkStreamInfo(rtmpPacket.getHeader().getChunkStreamId());
chunkStreamInfo.setPrevHeaderTx(rtmpPacket.getHeader());
if (!(rtmpPacket instanceof Video || rtmpPacket instanceof Audio)) {
rtmpPacket.getHeader()
.setAbsoluteTimestamp((int) chunkStreamInfo.markAbsoluteTimestampTx());
}
rtmpPacket.writeTo(outputStream, rtmpSessionInfo.getTxChunkSize(), chunkStreamInfo);
Log.d(TAG,
"wrote packet: " + rtmpPacket + ", size: " + rtmpPacket.getHeader().getPacketLength());
if (rtmpPacket instanceof Command) {
rtmpSessionInfo.addInvokedCommand(((Command) rtmpPacket).getTransactionId(),
((Command) rtmpPacket).getCommandName());
}
outputStream.flush();
} catch (SocketException se) {
// Since there are still remaining AV frame in the cache, we set a flag to guarantee the
// socket exception only issue one time.
if (!socketExceptionCause.contentEquals(se.getMessage())) {
socketExceptionCause = se.getMessage();
connectCheckerRtmp.onConnectionFailedRtmp("Error send packet: " + se.getMessage());
Log.e(TAG, "Caught SocketException during write loop, shutting down: " + se.getMessage());
}
} catch (IOException ioe) {
Log.e(TAG, "Caught IOException during write loop, shutting down: " + ioe.getMessage());
}
}
private void handleRxPacketLoop() throws IOException {
// Handle all queued received RTMP packets
while (!Thread.interrupted()) {
try {
// It will be blocked when no data in input stream buffer
RtmpPacket rtmpPacket = rtmpDecoder.readPacket(inputStream);
if (rtmpPacket != null) {
//Log.d(TAG, "handleRxPacketLoop(): RTMP rx packet message type: " + rtmpPacket.getHeader().getMessageType());
switch (rtmpPacket.getHeader().getMessageType()) {
case ABORT:
rtmpSessionInfo.getChunkStreamInfo(((Abort) rtmpPacket).getChunkStreamId())
.clearStoredChunks();
break;
case USER_CONTROL_MESSAGE:
UserControl user = (UserControl) rtmpPacket;
switch (user.getType()) {
case STREAM_BEGIN:
break;
case PING_REQUEST:
ChunkStreamInfo channelInfo =
rtmpSessionInfo.getChunkStreamInfo(ChunkStreamInfo.RTMP_CID_PROTOCOL_CONTROL);
Log.d(TAG, "handleRxPacketLoop(): Sending PONG reply..");
UserControl pong = new UserControl(user, channelInfo);
sendRtmpPacket(pong);
break;
case STREAM_EOF:
Log.i(TAG, "handleRxPacketLoop(): Stream EOF reached, closing RTMP writer...");
break;
default:
// Ignore...
break;
}
break;
case WINDOW_ACKNOWLEDGEMENT_SIZE:
WindowAckSize windowAckSize = (WindowAckSize) rtmpPacket;
int size = windowAckSize.getAcknowledgementWindowSize();
Log.d(TAG, "handleRxPacketLoop(): Setting acknowledgement window size: " + size);
rtmpSessionInfo.setAcknowledgmentWindowSize(size);
break;
case SET_PEER_BANDWIDTH:
SetPeerBandwidth bw = (SetPeerBandwidth) rtmpPacket;
rtmpSessionInfo.setAcknowledgmentWindowSize(bw.getAcknowledgementWindowSize());
int acknowledgementWindowsize = rtmpSessionInfo.getAcknowledgementWindowSize();
ChunkStreamInfo chunkStreamInfo =
rtmpSessionInfo.getChunkStreamInfo(ChunkStreamInfo.RTMP_CID_PROTOCOL_CONTROL);
Log.d(TAG, "handleRxPacketLoop(): Send acknowledgement window size: "
+ acknowledgementWindowsize);
sendRtmpPacket(new WindowAckSize(acknowledgementWindowsize, chunkStreamInfo));
// Set socket option
socket.setSendBufferSize(acknowledgementWindowsize);
break;
case COMMAND_AMF0:
handleRxInvoke((Command) rtmpPacket);
break;
default:
Log.w(TAG, "handleRxPacketLoop(): Not handling unimplemented/unknown packet of type: "
+ rtmpPacket.getHeader().getMessageType());
break;
}
}
} catch (EOFException eof) {
Thread.currentThread().interrupt();
} catch (IOException e) {
connectCheckerRtmp.onConnectionFailedRtmp("Error reading packet: " + e.getMessage());
Log.e(TAG, "Caught SocketException while reading/decoding packet, shutting down: "
+ e.getMessage());
}
}
}
private void handleRxInvoke(Command invoke) throws IOException {
String commandName = invoke.getCommandName();
switch (commandName) {
case "_error":
try {
String description = ((AmfString) ((AmfObject) invoke.getData().get(1)).getProperty(
"description")).getValue();
Log.i(TAG, description);
if (description.contains("reason=authfailed")) {
connectCheckerRtmp.onAuthErrorRtmp();
connected = false;
synchronized (connectingLock) {
connectingLock.notifyAll();
}
} else if (user != null
&& password != null
&& description.contains("challenge=")
&& description.contains("salt=")) {
onAuth = true;
try {
shutdown(false);
} catch (Exception e) {
e.printStackTrace();
}
rtmpSessionInfo = new RtmpSessionInfo();
rtmpDecoder = new RtmpDecoder(rtmpSessionInfo);
if (!tlsEnabled) {
socket = new Socket(host, port);
} else {
socket = CreateSSLSocket.createSSlSocket(host, port);
}
inputStream = new BufferedInputStream(socket.getInputStream());
outputStream = new BufferedOutputStream(socket.getOutputStream());
Log.d(TAG, "connect(): socket connection established, doing handshake...");
salt = Util.getSalt(description);
challenge = Util.getChallenge(description);
opaque = Util.getOpaque(description);
handshake(inputStream, outputStream);
rxPacketHandler = new Thread(new Runnable() {
@Override
public void run() {
try {
handleRxPacketLoop();
} catch (IOException e) {
e.printStackTrace();
}
}
});
rxPacketHandler.start();
sendConnectAuthPacketFinal(user, password, salt, challenge, opaque);
} else if (description.contains("code=403") && user == null || password == null) {
connectCheckerRtmp.onAuthErrorRtmp();
connected = false;
synchronized (connectingLock) {
connectingLock.notifyAll();
}
} else {
connectCheckerRtmp.onConnectionFailedRtmp(description);
connected = false;
synchronized (connectingLock) {
connectingLock.notifyAll();
}
}
} catch (Exception e) {
connectCheckerRtmp.onConnectionFailedRtmp(e.getMessage());
connected = false;
synchronized (connectingLock) {
connectingLock.notifyAll();
}
}
break;
case "_result":
// This is the result of one of the methods invoked by us
String method = rtmpSessionInfo.takeInvokedCommand(invoke.getTransactionId());
Log.i(TAG, "handleRxInvoke: Got result for invoked method: " + method);
if ("connect".equals(method)) {
if (onAuth) {
connectCheckerRtmp.onAuthSuccessRtmp();
onAuth = false;
}
// Capture server ip/pid/id information if any
// We can now send createStream commands
connected = true;
synchronized (connectingLock) {
connectingLock.notifyAll();
}
} else if ("createStream".contains(method)) {
// Get stream id
currentStreamId = (int) ((AmfNumber) invoke.getData().get(1)).getValue();
Log.d(TAG, "handleRxInvoke(): Stream ID to publish: " + currentStreamId);
if (streamName != null && publishType != null) {
fmlePublish();
}
} else if ("releaseStream".contains(method)) {
Log.d(TAG, "handleRxInvoke(): 'releaseStream'");
} else if ("FCPublish".contains(method)) {
Log.d(TAG, "handleRxInvoke(): 'FCPublish'");
} else {
Log.w(TAG, "handleRxInvoke(): '_result' message received for unknown method: " + method);
}
break;
case "onBWDone":
Log.d(TAG, "handleRxInvoke(): 'onBWDone'");
break;
case "onFCPublish":
Log.d(TAG, "handleRxInvoke(): 'onFCPublish'");
break;
case "onStatus":
String code =
((AmfString) ((AmfObject) invoke.getData().get(1)).getProperty("code")).getValue();
Log.d(TAG, "handleRxInvoke(): onStatus " + code);
if (code.equals("NetStream.Publish.Start")) {
onMetaData();
// We can now publish AV data
publishPermitted = true;
synchronized (publishLock) {
publishLock.notifyAll();
}
}
break;
default:
Log.e(TAG, "handleRxInvoke(): Unknown/unhandled server invoke: " + invoke);
break;
}
}
@Override
public void setVideoResolution(int width, int height) {
videoWidth = width;
videoHeight = height;
}
@Override
public void setAuthorization(String user, String password) {
this.user = user;
this.password = password;
}
}
音频编码器:AudioEncoder
public class AudioEncoder implements GetMicrophoneData {
private String TAG = "AudioEncoder";
private MediaCodec audioEncoder;
private GetAacData getAacData;
private MediaCodec.BufferInfo audioInfo = new MediaCodec.BufferInfo();
private long mPresentTimeUs;
private boolean running;
//default parameters for encoder
private CodecUtil.Force force = CodecUtil.Force.FIRST_COMPATIBLE_FOUND;
private int bitRate = 128 * 1024; //in kbps
private int sampleRate = 44100; //in hz
private boolean isStereo = true;
public AudioEncoder(GetAacData getAacData) {
this.getAacData = getAacData;
}
/**
* Prepare encoder with custom parameters
*/
public boolean prepareAudioEncoder(int bitRate, int sampleRate, boolean isStereo) {
this.sampleRate = sampleRate;
try {
List<MediaCodecInfo> encoders = new ArrayList<>();
if (force == CodecUtil.Force.HARDWARE) {
encoders = CodecUtil.getAllHardwareEncoders(CodecUtil.AAC_MIME);
} else if (force == CodecUtil.Force.SOFTWARE) {
encoders = CodecUtil.getAllSoftwareEncoders(CodecUtil.AAC_MIME);
}
if (force == CodecUtil.Force.FIRST_COMPATIBLE_FOUND) {
audioEncoder = MediaCodec.createEncoderByType(CodecUtil.AAC_MIME);
} else {
if (encoders.isEmpty()) {
Log.e(TAG, "Valid encoder not found");
return false;
} else {
audioEncoder = MediaCodec.createByCodecName(encoders.get(0).getName());
}
}
int a = (isStereo) ? 2 : 1;
MediaFormat audioFormat = MediaFormat.createAudioFormat(CodecUtil.AAC_MIME, sampleRate, a);
audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
audioFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 0);
audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE,
MediaCodecInfo.CodecProfileLevel.AACObjectLC);
audioEncoder.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
running = false;
return true;
} catch (IOException e) {
e.printStackTrace();
return false;
} catch (IllegalStateException e) {
e.printStackTrace();
return false;
}
}
public void setForce(CodecUtil.Force force) {
this.force = force;
}
/**
* Prepare encoder with default parameters
*/
public boolean prepareAudioEncoder() {
return prepareAudioEncoder(bitRate, sampleRate, isStereo);
}
public void start() {
if (audioEncoder != null) {
mPresentTimeUs = System.nanoTime() / 1000;
audioEncoder.start();
running = true;
Log.i(TAG, "AudioEncoder started");
} else {
Log.e(TAG, "AudioEncoder need be prepared, AudioEncoder not enabled");
}
}
public void stop() {
running = false;
if (audioEncoder != null) {
audioEncoder.stop();
audioEncoder.release();
audioEncoder = null;
}
Log.i(TAG, "AudioEncoder stopped");
}
/**
* Set custom PCM data.
* Use it after prepareAudioEncoder(int sampleRate, int channel).
* Used too with microphone.
*
* @param buffer PCM buffer
* @param size Min PCM buffer size
*/
@Override
public void inputPCMData(final byte[] buffer, final int size) {
if (Build.VERSION.SDK_INT >= 21) {
getDataFromEncoderAPI21(buffer, size);
} else {
getDataFromEncoder(buffer, size);
}
}
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
private void getDataFromEncoderAPI21(byte[] data, int size) {
int inBufferIndex = audioEncoder.dequeueInputBuffer(-1);
if (inBufferIndex >= 0) {
ByteBuffer bb = audioEncoder.getInputBuffer(inBufferIndex);
bb.put(data, 0, size);
long pts = System.nanoTime() / 1000 - mPresentTimeUs;
audioEncoder.queueInputBuffer(inBufferIndex, 0, size, pts, 0);
}
for (; ; ) {
int outBufferIndex = audioEncoder.dequeueOutputBuffer(audioInfo, 0);
if (outBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
getAacData.onAudioFormat(audioEncoder.getOutputFormat());
} else if (outBufferIndex >= 0) {
//This ByteBuffer is AAC
ByteBuffer bb = audioEncoder.getOutputBuffer(outBufferIndex);
getAacData.getAacData(bb, audioInfo);
audioEncoder.releaseOutputBuffer(outBufferIndex, false);
} else {
break;
}
}
}
private void getDataFromEncoder(byte[] data, int size) {
ByteBuffer[] inputBuffers = audioEncoder.getInputBuffers();
ByteBuffer[] outputBuffers = audioEncoder.getOutputBuffers();
int inBufferIndex = audioEncoder.dequeueInputBuffer(-1);
if (inBufferIndex >= 0) {
ByteBuffer bb = inputBuffers[inBufferIndex];
bb.clear();
bb.put(data, 0, size);
long pts = System.nanoTime() / 1000 - mPresentTimeUs;
audioEncoder.queueInputBuffer(inBufferIndex, 0, size, pts, 0);
}
for (; ; ) {
int outBufferIndex = audioEncoder.dequeueOutputBuffer(audioInfo, 0);
if (outBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
getAacData.onAudioFormat(audioEncoder.getOutputFormat());
} else if (outBufferIndex >= 0) {
//This ByteBuffer is AAC
ByteBuffer bb = outputBuffers[outBufferIndex];
getAacData.getAacData(bb, audioInfo);
audioEncoder.releaseOutputBuffer(outBufferIndex, false);
} else {
break;
}
}
}
public void setSampleRate(int sampleRate) {
this.sampleRate = sampleRate;
}
public boolean isRunning() {
return running;
}
}
将H264推流到RTMP服务器,关键的也就是视频同步包的构造,和H264普通数据包的构造,以及在将这些数据封包进RTMP Packet时候,要注意时间戳之间的关系。除了这些,其他方面例如RTMP的初始化,建立连接等方面则和RTMPDUMP中的流程一致。
参考链接:https://github.com/pedroSG94/rtmp-rtsp-stream-client-java