java、android可用的rtp封包解包h264案例
阿新 • • 發佈:2020-10-09
做直播,音視訊通訊。經常需要通過rtp協議封裝音視訊資料來發送。網上找到的基本都是c或c++版本的,沒有JAVA版本的。就算千辛萬苦找到一篇java版本的,要麼不能用,要麼就是一些片段,要麼有封包沒解包。
很是蛋疼,本人也是這樣,剛開始不太熟悉rtp協議,不太明白怎麼封包組包,痛苦了幾天,終於搞出來了,分享給有需要的朋友,希望對你們有所幫助。
直接看程式碼吧。不多說了。
首先看看關鍵類:
package com.imsdk.socket.udp.codec; import android.os.SystemClock; import android.util.Log; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.math.BigDecimal; import java.util.Random; import java.util.concurrent.Semaphore; public class RtspPacketEncode { private static final String TAG = "RtspPacketEncode"; //------------視訊轉換資料監聽----------- public interface H264ToRtpLinsener { void h264ToRtpResponse(byte[] out,int len); } private H264ToRtpLinsener h264ToRtpLinsener; //執行回撥 private void exceuteH264ToRtpLinsener(byte[] out,int len) { if (this.h264ToRtpLinsener != null) { h264ToRtpLinsener.h264ToRtpResponse(out,len); } } // -------視訊-------- private int framerate = 10; private byte[] sendbuf = new byte[1500]; private int packageSize = 1400; private int seq_num = 0; private int timestamp_increse = (int) (90000.0 / framerate);//framerate是幀率 private int ts_current = 0; private int bytes = 0; // -------視訊END-------- public RtspPacketEncode(H264ToRtpLinsener h264ToRtpLinsener) { this.h264ToRtpLinsener = h264ToRtpLinsener; } /** * 一幀一幀的RTP封包 * * @param r * @return */ public void h264ToRtp(byte[] r,int h264len) throws Exception { CalculateUtil.memset(sendbuf,1500); sendbuf[1] = (byte) (sendbuf[1] | 96); // 負載型別號96,其值為:01100000 sendbuf[0] = (byte) (sendbuf[0] | 0x80); // 版本號,此版本固定為2 sendbuf[1] = (byte) (sendbuf[1] & 254); //標誌位,由具體協議規定其值,其值為:01100000 sendbuf[11] = 10;//隨機指定10,並在本RTP回話中全域性唯一,java預設採用網路位元組序號 不用轉換(同源識別符號的最後一個位元組) if (h264len <= packageSize) { sendbuf[1] = (byte) (sendbuf[1] | 0x80); // 設定rtp M位為1,其值為:11100000,分包的最後一片,M位(第一位)為0,後7位是十進位制的96,表示負載型別 sendbuf[3] = (byte) seq_num++; System.arraycopy(CalculateUtil.intToByte(seq_num++),sendbuf,2,2);//send[2]和send[3]為序列號,共兩位 { // java預設的網路位元組序是大端位元組序(無論在什麼平臺上),因為windows為小位元組序,所以必須倒序 /**參考: * http://blog.csdn.net/u011068702/article/details/51857557 * http://cpjsjxy.iteye.com/blog/1591261 */ byte temp = 0; temp = sendbuf[3]; sendbuf[3] = sendbuf[2]; sendbuf[2] = temp; } // FU-A HEADER,並將這個HEADER填入sendbuf[12] sendbuf[12] = (byte) (sendbuf[12] | ((byte) (r[0] & 0x80)) << 7); sendbuf[12] = (byte) (sendbuf[12] | ((byte) ((r[0] & 0x60) >> 5)) << 5); sendbuf[12] = (byte) (sendbuf[12] | ((byte) (r[0] & 0x1f))); // 同理將sendbuf[13]賦給nalu_payload //NALU頭已經寫到sendbuf[12]中,接下來則存放的是NAL的第一個位元組之後的資料。所以從r的第二個位元組開始複製 System.arraycopy(r,1,13,h264len - 1); ts_current = ts_current + timestamp_increse; System.arraycopy(CalculateUtil.intToByte(ts_current),4,4);//序列號接下來是時間戳,4個位元組,儲存後也需要倒序 { byte temp = 0; temp = sendbuf[4]; sendbuf[4] = sendbuf[7]; sendbuf[7] = temp; temp = sendbuf[5]; sendbuf[5] = sendbuf[6]; sendbuf[6] = temp; } bytes = h264len + 12;//獲sendbuf的長度,為nalu的長度(包含nalu頭但取出起始字首,加上rtp_header固定長度12個位元組) //client.send(new DatagramPacket(sendbuf,bytes,addr,port/*9200*/)); //send(sendbuf,bytes); exceuteH264ToRtpLinsener(sendbuf,bytes); } else if (h264len > packageSize) { int k = 0,l = 0; k = h264len / packageSize; l = h264len % packageSize; int t = 0; ts_current = ts_current + timestamp_increse; System.arraycopy(CalculateUtil.intToByte(ts_current),4);//時間戳,並且倒序 { byte temp = 0; temp = sendbuf[4]; sendbuf[4] = sendbuf[7]; sendbuf[7] = temp; temp = sendbuf[5]; sendbuf[5] = sendbuf[6]; sendbuf[6] = temp; } while (t <= k) { System.arraycopy(CalculateUtil.intToByte(seq_num++),2);//序列號,並且倒序 { byte temp = 0; temp = sendbuf[3]; sendbuf[3] = sendbuf[2]; sendbuf[2] = temp; } if (t == 0) {//分包的第一片 sendbuf[1] = (byte) (sendbuf[1] & 0x7F);//其值為:01100000,不是最後一片,M位(第一位)設為0 //FU indicator,一個位元組,緊接在RTP header之後,包括F,NRI,header sendbuf[12] = (byte) (sendbuf[12] | ((byte) (r[0] & 0x80)) << 7);//禁止位,為0 sendbuf[12] = (byte) (sendbuf[12] | ((byte) ((r[0] & 0x60) >> 5)) << 5);//NRI,表示包的重要性 sendbuf[12] = (byte) (sendbuf[12] | (byte) (28));//TYPE,表示此FU-A包為什麼型別,一般此處為28 //FU header,一個位元組,S,E,R,TYPE sendbuf[13] = (byte) (sendbuf[13] & 0xBF);//E=0,表示是否為最後一個包,是則為1 sendbuf[13] = (byte) (sendbuf[13] & 0xDF);//R=0,保留位,必須設定為0 sendbuf[13] = (byte) (sendbuf[13] | 0x80);//S=1,表示是否為第一個包,是則為1 sendbuf[13] = (byte) (sendbuf[13] | ((byte) (r[0] & 0x1f)));//TYPE,即NALU頭對應的TYPE //將除去NALU頭剩下的NALU資料寫入sendbuf的第14個位元組之後。前14個位元組包括:12位元組的RTP Header,FU indicator,FU header System.arraycopy(r,14,packageSize); //client.send(new DatagramPacket(sendbuf,packageSize + 14,port/*9200*/)); exceuteH264ToRtpLinsener(sendbuf,packageSize + 14); t++; } else if (t == k) {//分片的最後一片 sendbuf[1] = (byte) (sendbuf[1] | 0x80); sendbuf[12] = (byte) (sendbuf[12] | ((byte) (r[0] & 0x80)) << 7); sendbuf[12] = (byte) (sendbuf[12] | ((byte) ((r[0] & 0x60) >> 5)) << 5); sendbuf[12] = (byte) (sendbuf[12] | (byte) (28)); sendbuf[13] = (byte) (sendbuf[13] & 0xDF); //R=0,保留位必須設為0 sendbuf[13] = (byte) (sendbuf[13] & 0x7F); //S=0,不是第一個包 sendbuf[13] = (byte) (sendbuf[13] | 0x40); //E=1,是最後一個包 sendbuf[13] = (byte) (sendbuf[13] | ((byte) (r[0] & 0x1f)));//NALU頭對應的type if (0 != l) {//如果不能整除,則有剩下的包,執行此程式碼。如果包大小恰好是1400的倍數,不執行此程式碼。 System.arraycopy(r,t * packageSize + 1,l - 1);//l-1,不包含NALU頭 bytes = l - 1 + 14; //bytes=l-1+14; //client.send(new DatagramPacket(sendbuf,port/*9200*/)); //send(sendbuf,bytes); exceuteH264ToRtpLinsener(sendbuf,bytes); }//pl t++; } else if (t < k && 0 != t) {//既不是第一片,又不是最後一片的包 sendbuf[1] = (byte) (sendbuf[1] & 0x7F); //M=0,其值為:01100000,不是最後一片,M位(第一位)設為0. sendbuf[12] = (byte) (sendbuf[12] | ((byte) (r[0] & 0x80)) << 7); sendbuf[12] = (byte) (sendbuf[12] | ((byte) ((r[0] & 0x60) >> 5)) << 5); sendbuf[12] = (byte) (sendbuf[12] | (byte) (28)); sendbuf[13] = (byte) (sendbuf[13] & 0xDF); //R=0,保留位必須設為0 sendbuf[13] = (byte) (sendbuf[13] & 0x7F); //S=0,不是第一個包 sendbuf[13] = (byte) (sendbuf[13] & 0xBF); //E=0,不是最後一個包 sendbuf[13] = (byte) (sendbuf[13] | ((byte) (r[0] & 0x1f)));//NALU頭對應的type System.arraycopy(r,packageSize);//不包含NALU頭 //client.send(new DatagramPacket(sendbuf,port/*9200*/)); //send(sendbuf,1414); exceuteH264ToRtpLinsener(sendbuf,packageSize + 14); t++; } } } } }
計算類:
package com.imsdk.socket.udp.codec; /** * 計算類 * * @author kokJuis */ public class CalculateUtil { /** * 註釋:int到位元組陣列的轉換! * * @param number * @return */ public static byte[] intToByte(int number) { int temp = number; byte[] b = new byte[4]; for (int i = 0; i < b.length; i++) { b[i] = new Integer(temp & 0xff).byteValue();// 將最低位儲存在最低位 temp = temp >> 8; // 向右移8位 } return b; } public static int byteToInt(byte b) { //Java 總是把 byte 當做有符處理;我們可以通過將其和 0xFF 進行二進位制與得到它的無符值 return b & 0xFF; } //byte 陣列與 int 的相互轉換 public static int byteArrayToInt(byte[] b) { return b[3] & 0xFF | (b[2] & 0xFF) << 8 | (b[1] & 0xFF) << 16 | (b[0] & 0xFF) << 24; } public static byte[] intToByteArray(int a) { return new byte[] { (byte) ((a >> 24) & 0xFF),(byte) ((a >> 16) & 0xFF),(byte) ((a >> 8) & 0xFF),(byte) (a & 0xFF) }; } // 清空buf的值 public static void memset(byte[] buf,int value,int size) { for (int i = 0; i < size; i++) { buf[i] = (byte) value; } } public static void dump(NALU_t n) { System.out.println("len: " + n.len + " nal_unit_type:" + n.nal_unit_type); } // 判斷是否為0x000001,如果是返回1 public static int FindStartCode2(byte[] Buf,int off) { if (Buf[0 + off] != 0 || Buf[1 + off] != 0 || Buf[2 + off] != 1) return 0; else return 1; } // 判斷是否為0x00000001,如果是返回1 public static int FindStartCode3(byte[] Buf,int off) { if (Buf[0 + off] != 0 || Buf[1 + off] != 0 || Buf[2 + off] != 0 || Buf[3 + off] != 1) return 0; else return 1; } }
使用的話,實現監聽就可以了:
@Override public void h264ToRtpResponse(byte[] out,int len) { //h264轉rtp監聽 if (out != null) { Log.v(TAG,"---傳送資料---" + len); netSendTask.pushBuf(out,len); } } rtspPacketEncode.h264ToRtp(h264,ret);
組包類:
package com.imsdk.socket.udp.codec; public class RtspPacketDecode { private byte[] h264Buffer; private int h264Len = 0; private int h264Pos = 0; private static final byte[] start_code = {0,1}; // h264 start code //傳入視訊的解析度 public RtspPacketDecode(int width,int height) { h264Buffer = new byte[getYuvBuffer(width,height)]; } /** * RTP解包H264 * * @param rtpData * @return */ public byte[] rtp2h264(byte[] rtpData,int rtpLen) { int fu_header_len = 12; // FU-Header長度為12位元組 int extension = (rtpData[0] & (1 << 4)); // X: 擴充套件為是否為1 if (extension > 0) { // 計算擴充套件頭的長度 int extLen = (rtpData[12] << 24) + (rtpData[13] << 16) + (rtpData[14] << 8) + rtpData[15]; fu_header_len += (extLen + 1) * 4; } // 解析FU-indicator byte indicatorType = (byte) (CalculateUtil.byteToInt(rtpData[fu_header_len]) & 0x1f); // 取出low 5 bit 則為FU-indicator type byte nri = (byte) ((CalculateUtil.byteToInt(rtpData[fu_header_len]) >> 5) & 0x03); // 取出h2bit and h3bit byte f = (byte) (CalculateUtil.byteToInt(rtpData[fu_header_len]) >> 7); // 取出h1bit byte h264_nal_header; byte fu_header; if (indicatorType == 28) { // FU-A fu_header = rtpData[fu_header_len + 1]; byte s = (byte) (rtpData[fu_header_len + 1] & 0x80); byte e = (byte) (rtpData[fu_header_len + 1] & 0x40); if (e == 64) { // end of fu-a //ZOLogUtil.d("RtpParser","end of fu-a.....;;;"); byte[] temp = new byte[rtpLen - (fu_header_len + 2)]; System.arraycopy(rtpData,fu_header_len + 2,temp,temp.length); writeData2Buffer(temp,temp.length); if (h264Pos >= 0) { h264Pos = -1; if (h264Len > 0) { byte[] h264Data = new byte[h264Len]; System.arraycopy(h264Buffer,h264Data,h264Len); h264Len = 0; return h264Data; } } } else if (s == -128) { // start of fu-a h264Pos = 0; // 指標歸0 writeData2Buffer(start_code,4); // 寫入H264起始碼 h264_nal_header = (byte) ((fu_header & 0x1f) | (nri << 5) | (f << 7)); writeData2Buffer(new byte[]{h264_nal_header},1); byte[] temp = new byte[rtpLen - (fu_header_len + 2)]; System.arraycopy(rtpData,temp.length); // 負載資料 writeData2Buffer(temp,temp.length); } else { byte[] temp = new byte[rtpLen - (fu_header_len + 2)]; System.arraycopy(rtpData,temp.length); } } else { // nalu h264Pos = 0; writeData2Buffer(start_code,4); byte[] temp = new byte[rtpLen - fu_header_len]; System.arraycopy(rtpData,fu_header_len,temp.length); writeData2Buffer(temp,temp.length); if (h264Pos >= 0) { h264Pos = -1; if (h264Len > 0) { byte[] h264Data = new byte[h264Len]; System.arraycopy(h264Buffer,h264Len); h264Len = 0; return h264Data; } } } return null; } private void writeData2Buffer(byte[] data,int len) { if (h264Pos >= 0) { System.arraycopy(data,h264Buffer,h264Pos,len); h264Pos += len; h264Len += len; } } //計算h264大小 public int getYuvBuffer(int width,int height) { // stride = ALIGN(width,16) int stride = (int) Math.ceil(width / 16.0) * 16; // y_size = stride * height int y_size = stride * height; // c_stride = ALIGN(stride/2,16) int c_stride = (int) Math.ceil(width / 32.0) * 16; // c_size = c_stride * height/2 int c_size = c_stride * height / 2; // size = y_size + c_size * 2 return y_size + c_size * 2; } }
使用:
byte[] tmp = rtspPacketDecode.rtp2h264(out,len);
以上這篇java、android可用的rtp封包解包h264案例就是小編分享給大家的全部內容了,希望能給大家一個參考,也希望大家多多支援我們。