Unity接收FFmpeg的UDP推流
阿新 • • 發佈:2021-01-02
- 將下方的指令碼掛在Unity中的一個空物體上:
// proof of concept, ffmpeg raw video into unity texture 2D using UDP streaming using System; using System.Collections.Generic; using System.Net; using System.Net.Sockets; using UnityEngine; using Debug = UnityEngine.Debug; namespace UnityCoder.RawVideoUDP { public class RawVideoReceiver : MonoBehaviour { public Material targetMat; UdpClient client; int port = 8888; int receiveBufferSize = 1472 * 1000; IPEndPoint ipEndPoint; private object obj = null; private AsyncCallback AC; byte[] receivedBytes; Texture2D tex; public int size = 256; int imageSize = 0; byte[] dump; int bufferSize = 0; int bufferIndex = 0; int bufferFrameStart = 0; byte[] temp; bool frameReady = false; void Start() { //tex = new Texture2D(size, size, TextureFormat.RGB24, false, false); tex = new Texture2D(size, size, TextureFormat.RGBA32, false, false); tex.filterMode = FilterMode.Point; tex.wrapMode = TextureWrapMode.Clamp; imageSize = size * size * 4; temp = new byte[imageSize]; // init pixels wit bright color for (int i = 0; i < imageSize; i += 4) { temp[i] = 255; temp[i + 1] = 0; temp[i + 2] = 255; } tex.LoadRawTextureData(temp); tex.Apply(false); bufferSize = imageSize * 100; dump = new byte[bufferSize]; targetMat.mainTexture = tex; InitializeUDPClient(); } Queue<int> frameIndex = new Queue<int>(); int frameBufferCount = 0; void FixedUpdate() { // if we have frames, draw them to texture if (frameBufferCount > 0) { Buffer.BlockCopy(dump, frameIndex.Dequeue(), temp, 0, imageSize); frameBufferCount--; tex.LoadRawTextureData(temp); tex.Apply(false); } } void ReceivedUDPPacket(IAsyncResult result) { try { receivedBytes = client.EndReceive(result, ref ipEndPoint); var len = receivedBytes.Length; // we only use the buffer until the end, should wrap around if (bufferIndex + len > bufferSize) { Debug.LogError("Buffer finished, should fix this.."); return; } Buffer.BlockCopy(receivedBytes, 0, dump, bufferIndex, len); bufferIndex += len; if (bufferIndex - bufferFrameStart >= imageSize) { frameIndex.Enqueue(bufferFrameStart); frameBufferCount++; bufferFrameStart += imageSize; } } catch (Exception e) { Debug.LogException(e); } client.BeginReceive(AC, obj); } public void InitializeUDPClient() { ipEndPoint = new IPEndPoint(IPAddress.Any, port); client = new UdpClient(); client.Client.ReceiveBufferSize = receiveBufferSize; client.Client.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseAddress, optionValue: true); client.ExclusiveAddressUse = false; client.EnableBroadcast = true; client.Client.Bind(ipEndPoint); client.DontFragment = true; client.Client.ReceiveBufferSize = 1472 * 100000; AC = new AsyncCallback(ReceivedUDPPacket); client.BeginReceive(AC, obj); Debug.Log("Started UDP listener.."); } private void OnDestroy() { if (client != null) { client.Close(); } } } }
- 建立一個Plane或Quad,並選擇一個Unlit Shader作為承載影像的材質使用Ulit/Texture掛載到該物體上;
- 將用在這個Plane或Quad的Unlit Shader材質掛載在第一步中建立的空物體
Raw Video Player
元件中的Target Mat
上;
- 將相機調整到合適位置;
- 在本機上進行鍼對於桌面的ffmpeg推流,命令如下:
ffmpeg -f gdigrab -i desktop -pixel_format rgb8 -video_size 256x256 -vf scale=256:256 -framerate 5 -r 5 -f rawvideo udp://127.0.0.1:8888
- 點選執行Unity的場景,可以看到桌面推流的效果。
作者:艾孜爾江