Android手機的h264硬編碼測試
阿新 • • 發佈:2019-02-11
網上原始碼進行點小改動, 程式碼如下:
/** * oppo r7s(android 4.4.4)測試通過, 紅米3(android 5.1.1)測試未通過 * @author Administrator */ @SuppressLint("NewApi") public class MainActivity extends Activity implements SurfaceHolder.Callback, PreviewCallback { DatagramSocket socket; InetAddress address; AvcEncoder avcCodec; public Camera m_camera; SurfaceView m_prevewview; SurfaceHolder m_surfaceHolder; int width = 1280; int height = 720; int framerate = 20; int bitrate = 2500000; byte[] h264 = new byte[width*height*3/2]; @Override protected void onCreate(Bundle savedInstanceState) { StrictMode.setThreadPolicy(new StrictMode.ThreadPolicy.Builder() .detectDiskReads() .detectDiskWrites() .detectAll() .penaltyLog() .build()); StrictMode.setVmPolicy(new StrictMode.VmPolicy.Builder() .detectLeakedSqlLiteObjects() .detectLeakedClosableObjects() .penaltyLog() .penaltyDeath() .build()); super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); avcCodec = new AvcEncoder(width,height,framerate,bitrate); m_prevewview = (SurfaceView) findViewById(R.id.SurfaceViewPlay); m_surfaceHolder = m_prevewview.getHolder(); m_surfaceHolder.setFixedSize(width, height); m_surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); m_surfaceHolder.addCallback((Callback) this); try { socket = new DatagramSocket(); address = InetAddress.getByName("192.168.1.101"); } catch (SocketException e) { e.printStackTrace(); } catch (UnknownHostException e) { e.printStackTrace(); } } @Override public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2, int arg3) { } @Override public void surfaceCreated(SurfaceHolder arg0) { try { m_camera = Camera.open(); m_camera.setPreviewDisplay(m_surfaceHolder); Camera.Parameters parameters = m_camera.getParameters(); parameters.setPreviewSize(width, height); parameters.setPictureSize(width, height); parameters.setPreviewFormat(ImageFormat.YV12); m_camera.setParameters(parameters); m_camera.setPreviewCallback((PreviewCallback) this); //m_camera.setPreviewCallbackWithBuffer((PreviewCallback) this); m_camera.startPreview(); } catch (IOException e){ e.printStackTrace(); } } @Override public void surfaceDestroyed(SurfaceHolder arg0) { m_camera.setPreviewCallback(null); m_camera.stopPreview(); m_camera.release(); m_camera = null; avcCodec.close(); } @Override public void onPreviewFrame(byte[] data, Camera camera) { Log.v("h264", "onPreviewFrame"); int ret = avcCodec.offerEncoder(data, h264); if(ret > 0){ try { /** * [vlc] * 1. 工具->首選項->顯示設定->全部->輸入/編解碼器->去複用器->右邊:去複用模組 -> H264視訊去複用器 * 2. 媒體->開啟網路串流-> udp://@:5000 */ DatagramPacket packet=new DatagramPacket(h264, ret, address,5000); socket.send(packet); Log.v("h264", "send packet"); } catch (IOException e){ Log.e("h264", "udp send err:"+e); } }else{ Log.v("h264", "ret ="+ret); } } } @SuppressLint("InlinedApi") class AvcEncoder { private MediaCodec mediaCodec; private int m_width; private int m_height; private byte[] m_info = null; private byte[] yuv420 = null; private static boolean isRecognizedFormat(int colorFormat) { switch (colorFormat) { case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar: case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar: case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar: case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar: case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar: return true; default: return false; } } private static int selectColorFormat(MediaCodecInfo codecInfo, String mimeType) { MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(mimeType); for (int i = 0; i < capabilities.colorFormats.length; i++) { int colorFormat = capabilities.colorFormats[i]; if (isRecognizedFormat(colorFormat)) { return colorFormat; } } Log.e("test","error format:" + codecInfo.getName() + " / " + mimeType); return 0; } private static MediaCodecInfo selectCodec(String mimeType) { int numCodecs = MediaCodecList.getCodecCount(); for (int i = 0; i < numCodecs; i++) { MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i); if (!codecInfo.isEncoder()) continue; String[] types = codecInfo.getSupportedTypes(); for (int j = 0; j < types.length; j++) { if (types[j].equalsIgnoreCase(mimeType)) return codecInfo; } } return null; } public AvcEncoder(int width, int height, int framerate, int bitrate) { m_width = width; m_height = height; yuv420 = new byte[width*height*3/2]; String mime = "video/avc"; int colorFormat = selectColorFormat(selectCodec(mime), mime); mediaCodec = MediaCodec.createEncoderByType(mime); MediaFormat mediaFormat = MediaFormat.createVideoFormat(mime, width, height); mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitrate); mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, framerate); mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat); mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5); mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); mediaCodec.start(); } public void close() { try { mediaCodec.stop(); mediaCodec.release(); } catch (Exception e){ e.printStackTrace(); } } public int offerEncoder(byte[] input, byte[] output){ int pos = 0; swapYV12toI420(input, yuv420, m_width, m_height); try { ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers(); ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers(); int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1); Log.v("chenqy","inputBufferIndex="+inputBufferIndex); if (inputBufferIndex >= 0){ ByteBuffer inputBuffer = inputBuffers[inputBufferIndex]; inputBuffer.clear(); inputBuffer.put(yuv420); mediaCodec.queueInputBuffer(inputBufferIndex, 0, yuv420.length, 0, 0); } MediaCodec.BufferInfo bufferInfo =new MediaCodec.BufferInfo(); Log.v("chenqy","bufferInfo="+bufferInfo); int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo,0); Log.v("chenqy","outputBufferIndex="+outputBufferIndex); while (outputBufferIndex >= 0){ ByteBuffer outputBuffer = outputBuffers[outputBufferIndex]; byte[] outData = new byte[bufferInfo.size]; outputBuffer.get(outData); if(m_info != null){ System.arraycopy(outData, 0, output, pos, outData.length); pos += outData.length; }else{ ByteBuffer spsPpsBuffer = ByteBuffer.wrap(outData); if (spsPpsBuffer.getInt() == 0x00000001){ m_info = new byte[outData.length]; System.arraycopy(outData, 0, m_info, 0, outData.length); }else { return -1; } } mediaCodec.releaseOutputBuffer(outputBufferIndex, false); outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0); } if(output[4] == 0x65){ //key frame System.arraycopy(output, 0, yuv420, 0, pos); System.arraycopy(m_info, 0, output, 0, m_info.length); System.arraycopy(yuv420, 0, output, m_info.length, pos); pos += m_info.length; } } catch (Throwable t) { t.printStackTrace(); } return pos; } private void swapYV12toI420(byte[] yv12bytes, byte[] i420bytes, int width, int height) { System.arraycopy(yv12bytes, 0, i420bytes, 0,width*height); System.arraycopy(yv12bytes, width*height+width*height/4, i420bytes, width*height,width*height/4); System.arraycopy(yv12bytes, width*height, i420bytes, width*height+width*height/4,width*height/4); } }
佈局:
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android" xmlns:tools="http://schemas.android.com/tools" android:layout_width="fill_parent" android:layout_height="fill_parent" android:orientation="horizontal" tools:context=".MainActivity" > <SurfaceView android:id="@+id/SurfaceViewPlay" android:layout_width="fill_parent" android:layout_height="fill_parent" /> </RelativeLayout>
許可權:
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/> <uses-permission android:name="android.permission.INTERNET"/> <uses-permission android:name="android.permission.RECORD_VIDEO"/> <uses-permission android:name="android.permission.RECORD_AUDIO"/> <uses-permission android:name="android.permission.CAMERA" /> <uses-feature android:name="android.hardware.camera" /> <uses-feature android:name="android.hardware.camera.autofocus" /> <uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" /> <uses-feature android:name="android.hardware.wifi" android:required="true" />