1. 程式人生 > >Unity將粒子光效匯出成png序列幀

Unity將粒子光效匯出成png序列幀

        這個功能並不是很實用,不過美術同學有這樣的需求,那麼就花了一點時間研究了下。

        我們沒有使用Unity的引擎,但是做特效的同學找了一批Unity的粒子特效,希望匯出成png序列幀的形式,然後我們的遊戲來使用。這個就相當於拿Unity做了特效編輯器的工作。這個並不是很“邪門”,因為用幻影粒子,或者3dmax,差不多也是這個思路,只不過那些軟體提供了正規的匯出功能,而Unity則沒有。

        先上程式碼

using UnityEngine;
using UnityEditor;
using System;
using System.IO;
using System.Collections;
using System.Collections.Generic;

public class ParticleExporter : MonoBehaviour
{
    // Default folder name where you want the animations to be output
    public string folder = "PNG_Animations";

    // Framerate at which you want to play the animation
    public int frameRate = 25;                  // export frame rate 匯出幀率,設定Time.captureFramerate會忽略真實時間,直接使用此幀率
    public float frameCount = 100;              // export frame count 匯出幀的數目,100幀則相當於匯出5秒鐘的光效時間。由於匯出每一幀的時間很長,所以匯出時間會遠遠長於直觀的光效播放時間
    public int screenWidth = 960;               // not use 暫時沒用,希望可以直接設定螢幕的大小(即光效畫布的大小)
    public int screenHeight = 640;
    public Vector3 cameraPosition = Vector3.zero;
    public Vector3 cameraRotation = Vector3.zero;

    private string realFolder = ""; // real folder where the output files will be
    private float originaltimescaleTime; // track the original time scale so we can freeze the animation between frames
    private float currentTime = 0;
    private bool over = false;
    private int currentIndex = 0;
    private Camera exportCamera;    // camera for export 匯出光效的攝像機,使用RenderTexture

    public void Start()
    {
        // set frame rate
        Time.captureFramerate = frameRate;

        // Create a folder that doesn't exist yet. Append number if necessary.
        realFolder = Path.Combine(folder, name);

        // Create the folder
        if (!Directory.Exists(realFolder)) {
            Directory.CreateDirectory(realFolder);
        }

        originaltimescaleTime = Time.timeScale;

        GameObject goCamera = Camera.main.gameObject;
        if (cameraPosition != Vector3.zero) {
            goCamera.transform.position = cameraPosition;
        }

        if (cameraRotation != Vector3.zero) {
            goCamera.transform.rotation = Quaternion.Euler(cameraRotation);
        }

        GameObject go = Instantiate(goCamera) as GameObject;
        exportCamera = go.GetComponent<Camera>();

        currentTime = 0;

        
    }

    void Update()
    {
        currentTime += Time.deltaTime;
        if (!over && currentIndex >= frameCount) {
            over = true;
            Cleanup();
            Debug.Log("Finish");
            return;
        }

        // 每幀截圖
        StartCoroutine(CaptureFrame());
    }

    void Cleanup()
    {
        DestroyImmediate(exportCamera);
        DestroyImmediate(gameObject);
    }

    IEnumerator CaptureFrame()
    {
        // Stop time
        Time.timeScale = 0;
        // Yield to next frame and then start the rendering
        // this is important, otherwise will have error
        yield return new WaitForEndOfFrame();

        string filename = String.Format("{0}/{1:D04}.png", realFolder, ++currentIndex);
        Debug.Log(filename);

        int width = Screen.width;
        int height = Screen.height;

        //Initialize and render textures
        RenderTexture blackCamRenderTexture = new RenderTexture(width, height, 24, RenderTextureFormat.ARGB32);
        RenderTexture whiteCamRenderTexture = new RenderTexture(width, height, 24, RenderTextureFormat.ARGB32);

        exportCamera.targetTexture = blackCamRenderTexture;
        exportCamera.backgroundColor = Color.black;
        exportCamera.Render();
        RenderTexture.active = blackCamRenderTexture;
        Texture2D texb = GetTex2D();

        //Now do it for Alpha Camera
        exportCamera.targetTexture = whiteCamRenderTexture;
        exportCamera.backgroundColor = Color.white;
        exportCamera.Render();
        RenderTexture.active = whiteCamRenderTexture;
        Texture2D texw = GetTex2D();

        // If we have both textures then create final output texture
        if (texw && texb) {
            Texture2D outputtex = new Texture2D(width, height, TextureFormat.ARGB32, false);

            // we need to check alpha ourselves,because particle use additive shader
            // Create Alpha from the difference between black and white camera renders
            for (int y = 0; y < outputtex.height; ++y) { // each row
                for (int x = 0; x < outputtex.width; ++x) { // each column
                    float alpha;
                    alpha = texw.GetPixel(x, y).r - texb.GetPixel(x, y).r;
                    alpha = 1.0f - alpha;
                    Color color;
                    if (alpha == 0) {
                        color = Color.clear;
                    } else {
                        color = texb.GetPixel(x, y);
                    }
                    color.a = alpha;
                    outputtex.SetPixel(x, y, color);
                }
            }


            // Encode the resulting output texture to a byte array then write to the file
            byte[] pngShot = outputtex.EncodeToPNG();
            File.WriteAllBytes(filename, pngShot);

            // cleanup, otherwise will memory leak
            pngShot = null;
            RenderTexture.active = null;
            DestroyImmediate(outputtex);
            outputtex = null;
            DestroyImmediate(blackCamRenderTexture);
            blackCamRenderTexture = null;
            DestroyImmediate(whiteCamRenderTexture);
            whiteCamRenderTexture = null;
            DestroyImmediate(texb);
            texb = null;
            DestroyImmediate(texw);
            texb = null;

            System.GC.Collect();

            // Reset the time scale, then move on to the next frame.
            Time.timeScale = originaltimescaleTime;
        }
    }

    // Get the texture from the screen, render all or only half of the camera
    private Texture2D GetTex2D()
    {
        // Create a texture the size of the screen, RGB24 format
        int width = Screen.width;
        int height = Screen.height;
        Texture2D tex = new Texture2D(width, height, TextureFormat.ARGB32, false);
        // Read screen contents into the texture
        tex.ReadPixels(new Rect(0, 0, width, height), 0, 0);
        tex.Apply();
        return tex;
    }
}

這裡對幾個關鍵的知識點來做說明:

1、整體思路是這樣的,Unity中調整好攝像機,正常播放特效,然後每幀截圖,儲存成我們需要的png序列幀。這個不僅僅是特效可以這麼用,其實模型也可以。比如我們需要同屏顯示幾百上千人,或者是無關緊要的怪物、場景物件等等,就可以使用這個匯出成2d的序列幀,可以大大提高效率,使一些不可能的情況變為可能。

2、關於時間和幀率的控制。由於截圖所需要的時間遠遠大於幀間隔,所以光效如果是播放1秒,則匯出時間可能超過一分鐘。Time.captureFrameRate可以設定幀率,設定後則忽略真實時間,光效、模型會按照幀率的時間來播放。這個介面恰好就是用在視訊錄製上的。

3、光效畫布控制。這個暫時沒有找到好的方法,由於是全螢幕截圖,所以Game視窗的大小就是光效畫布的大小。

4、通過調整攝像機的位置、旋轉,控制光效的顯示資訊。

5、截圖函式就是GetTex2D()。這裡面最主要的是ReadPixels函式。需要注意,CaptureFrame函式必須要以協程的方式執行,因為裡面有一句yield return new WaitForEndOfFrame();如果沒有這一句,會報一個錯誤,大概意思就是ReadPixels不在DrawFrame裡面執行。

6、截圖時間消耗很大,所以需要在截圖開始使用Time.timeScale=0暫停時間執行,截圖後再恢復

7、注意截圖操作完成後清理各種資源,並進行GC。否則記憶體很有可能就不夠用了,截100幀圖片,記憶體很有可能就兩三G了。

8、截圖的時候使用了兩個RenderTexture,分別繪製白底和黑底的圖片,然後根據這兩張圖片計算出alpha。如果不是光效其實可以不這麼麻煩,直接把Camera的backgroundColor中的alpha設定為0就可以了。但是光效使用了特殊的shader,比如Additive,這裡涉及到alpha blend。繪製光效時如果也這樣設定的話,匯出的圖片沒有任何東西。所以必須要有實色背景。