1. 程式人生 > 實用技巧 >Metal拍攝視訊&Metal處理視訊

Metal拍攝視訊&Metal處理視訊

使用AVFoundation可以拍攝視訊,Avplayer播放視訊。如果使用Metal取處理視訊的話需要把視訊的每一幀去處理然後顯示,

需要用到CMSampleBuffer相關處理,本篇濾鏡使用了Metal的內建濾鏡MetalPerformanceShaders,MPS提供了許多常用的濾鏡,具體可以參考apple官方文件

一、使用Metal處理實時拍攝視訊

class ViewController: UIViewController {

    //按鈕
    var captureButton:UIButton!
    var recodButton:UIButton!
    
    var
session : AVCaptureSession = AVCaptureSession() var queue = DispatchQueue(label: "quque") var input: AVCaptureDeviceInput? lazy var previewLayer = AVCaptureVideoPreviewLayer(session: self.session) lazy var recordOutput = AVCaptureMovieFileOutput() //Metal相關 var device :MTLDevice! var
mtkView : MTKView! var texture : MTLTexture? var tetureCache : CVMetalTextureCache? override func viewDidLoad() { super.viewDidLoad() view.backgroundColor = .white captureButton = UIButton(frame: CGRect(x: 10, y: view.bounds.size.height - 60, width: 150
, height: 50)) captureButton.backgroundColor = .gray captureButton.setTitle("start capture", for: .normal) captureButton.addTarget(self, action: #selector(capture(btn:)), for: .touchUpInside) view.addSubview(captureButton) recodButton = UIButton(frame: CGRect(x: view.bounds.size.width - 160, y: view.bounds.size.height - 60, width: 150, height: 50)) recodButton.backgroundColor = .gray recodButton.setTitle("paly movie", for: .normal) recodButton.addTarget(self, action: #selector(recordAction(btn:)), for: .touchUpInside) view.addSubview(recodButton) } func setMetalConfig() { guard let device1 = MTLCreateSystemDefaultDevice() else{ return } self.device = device1 mtkView = MTKView(frame: view.bounds, device: device) mtkView.delegate = self mtkView.framebufferOnly = false //建立紋理快取區 CVMetalTextureCacheCreate(nil, nil, device1, nil, &tetureCache) } @objc func recordAction(btn:UIButton){ btn.isSelected = !btn.isSelected if session.isRunning { if btn.isSelected { btn.setTitle("stop record", for: .normal) if !session.isRunning{ session.startRunning() } if session.canAddOutput(recordOutput){ session.addOutput(recordOutput) } // recordOutput. let connection = recordOutput.connection(with: .video) connection?.preferredVideoStabilizationMode = .auto guard let path = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first else { return } let url = URL(fileURLWithPath: "\(path)/test.mp4") recordOutput.startRecording(to: url, recordingDelegate: self) }else{ btn.setTitle("start record", for: .normal) recordOutput.stopRecording() } }else{ // btn.setTitle("paly movie", for: .normal) let moVC = MovieViewController() self.navigationController?.pushViewController(moVC, animated: true) } } @objc func capture(btn:UIButton){ btn.isSelected = !btn.isSelected if btn.isSelected { // recodButton.isHidden = false recodButton.setTitle("start record", for: .normal) btn.setTitle("stop capture", for: UIControl.State.normal) guard let device = getCamera(postion: .back) else{ return } guard let input = try? AVCaptureDeviceInput(device: device) else{ return } self.input = input if session.canAddInput(input) { session.addInput(input) } let output = AVCaptureVideoDataOutput() output.setSampleBufferDelegate(self, queue: queue) if session.canAddOutput(output){ session.addOutput(output) } //這裡設定格式為BGRA,而不用YUV的顏色空間,避免使用Shader轉換 //注意:這裡必須和後面CVMetalTextureCacheCreateTextureFromImage 儲存影象畫素儲存格式保持一致.否則視訊會出現異常現象. output.videoSettings = [String(kCVPixelBufferPixelFormatTypeKey) :NSNumber(value: kCVPixelFormatType_32BGRA) ] let connection: AVCaptureConnection = output.connection(with: .video)! connection.videoOrientation = .portrait // previewLayer.frame = view.bounds // view.layer.insertSublayer(previewLayer, at: 0) setMetalConfig() view.insertSubview(mtkView, at: 0) session.startRunning() }else{ // recodButton.isHidden = true btn.setTitle("start capture", for: .normal) if recordOutput.isRecording { recordOutput.stopRecording() } recodButton.isSelected = false recodButton.setTitle("play movie", for: .normal) session.stopRunning() // previewLayer.removeFromSuperlayer() mtkView.removeFromSuperview() } } //獲取相機裝置 func getCamera(postion: AVCaptureDevice.Position) -> AVCaptureDevice? { var devices = [AVCaptureDevice]() if #available(iOS 10.0, *) { let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: AVMediaType.video, position: AVCaptureDevice.Position.unspecified) devices = discoverySession.devices } else { devices = AVCaptureDevice.devices(for: AVMediaType.video) } for device in devices { if device.position == postion { return device } } return nil } //切換攝像頭 func swapFrontAndBackCameras() { if let input = input { var newDevice: AVCaptureDevice? if input.device.position == .front { newDevice = getCamera(postion: AVCaptureDevice.Position.back) } else { newDevice = getCamera(postion: AVCaptureDevice.Position.front) } if let new = newDevice { do{ let newInput = try AVCaptureDeviceInput(device: new) session.beginConfiguration() session.removeInput(input) session.addInput(newInput) self.input = newInput session.commitConfiguration() } catch let error as NSError { print("AVCaptureDeviceInput(): \(error)") } } } } //設定橫豎屏問題 func setupVideoPreviewLayerOrientation() { if let connection = previewLayer.connection, connection.isVideoOrientationSupported { if #available(iOS 13.0, *) { if let orientation = UIApplication.shared.windows.first?.windowScene?.interfaceOrientation{ switch orientation { case .portrait: connection.videoOrientation = .portrait case .landscapeLeft: connection.videoOrientation = .landscapeLeft case .landscapeRight: connection.videoOrientation = .landscapeRight case .portraitUpsideDown: connection.videoOrientation = .portraitUpsideDown default: connection.videoOrientation = .portrait } } }else{ switch UIApplication.shared.statusBarOrientation { case .portrait: connection.videoOrientation = .portrait case .landscapeRight: connection.videoOrientation = .landscapeRight case .landscapeLeft: connection.videoOrientation = .landscapeLeft case .portraitUpsideDown: connection.videoOrientation = .portraitUpsideDown default: connection.videoOrientation = .portrait } } } } } extension ViewController : AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureFileOutputRecordingDelegate,MTKViewDelegate { //mtk func draw(in view: MTKView) { guard let queue = device.makeCommandQueue() else { return } guard let buffer = queue.makeCommandBuffer() else { return } // guard let descriptor = mtkView.currentRenderPassDescriptor else{return} // guard let encode = buffer.makeRenderCommandEncoder(descriptor: descriptor) else { // return // } //metal有許多內建濾鏡 MetalPerformanceShaders let blurFilter = MPSImageGaussianBlur.init(device: device, sigma: 10) guard let texture = self.texture else { return } blurFilter.encode(commandBuffer: buffer, sourceTexture: texture, destinationTexture: view.currentDrawable!.texture) buffer.present(view.currentDrawable!) buffer.commit() self.texture = nil } func mtkView(_ view: MTKView, drawableSizeWillChange size: CGSize) { } //錄製完成 func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) { } //採集結果 func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return } // imageBuffer.attachments[0]. var metalTexture:CVMetalTexture? let status = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, self.tetureCache!, imageBuffer, nil, MTLPixelFormat.bgra8Unorm, CVPixelBufferGetWidth(imageBuffer), CVPixelBufferGetHeight(imageBuffer), 0, &metalTexture) if status == kCVReturnSuccess { mtkView.drawableSize = CGSize(width: CVPixelBufferGetWidth(imageBuffer), height: CVPixelBufferGetHeight(imageBuffer)) self.texture = CVMetalTextureGetTexture(metalTexture!) } } func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { } }

二、處理已有視訊

import UIKit
import AVFoundation
import MetalKit
import MetalPerformanceShaders

struct ConvertMatrix {
    var matrix :float3x3
    var verctor :SIMD3<Float>
    
}

class MovieViewController: UIViewController {

   
    var device :MTLDevice!
    var mtkView : MTKView!
    
    var reader: DQAssetReader?
    
    var texture : MTLTexture?
    var textureUV:MTLTexture?
    
    var tetureCache : CVMetalTextureCache?
    
    var state : MTLRenderPipelineState?
    var commendQueue: MTLCommandQueue?
    
    var vertexbuffer :MTLBuffer?
    var cmatrixBuffer :MTLBuffer?
    
    var useYUV = true
    
    var timeRange : CMTimeRange?
    
    var pauseButton:UIButton!
    override func viewDidLoad() {
        super.viewDidLoad()

        self.title = "movie"
        self.view.backgroundColor = .white
        
        let path = Bundle.main.path(forResource: "123", ofType: "mp4")
        let url1 = URL(fileURLWithPath: path!)
        
        reader = DQAssetReader(url: url1,valueYUV: useYUV)
        reader?.timeRange = CMTimeRange(start: CMTime(value: 2, timescale: 1, flags: CMTimeFlags(rawValue: 1), epoch: 0), duration: CMTime(value: 0, timescale: 0, flags: CMTimeFlags(rawValue: 5), epoch: 0))
        setMetalConfig()
        vertexData()
        yuvToRGBmatrix()
        
        pauseButton = UIButton(frame: CGRect(x: 0, y: view.frame.size.height - 100, width: 100, height: 50))
        pauseButton.center.x = view.center.x
        
        pauseButton.setTitle("暫停", for:.normal)
        pauseButton.setTitle("繼續", for:.selected)
        pauseButton.backgroundColor = .gray
        view.addSubview(pauseButton)
        pauseButton.addTarget(self, action: #selector(pauseAction(btn:)), for: .touchUpInside)
        
    }
    
    @objc func pauseAction(btn:UIButton){
        btn.isSelected = !btn.isSelected
        
        if !btn.isSelected {
            if reader?.readBuffer() == nil {
                reader?.setUpAsset()
                pauseButton.setTitle("繼續", for:.selected)
            }
        }
    }
    
    func setMetalConfig()  {
        guard let device1 = MTLCreateSystemDefaultDevice() else{
            return
        }
        self.device = device1
        mtkView = MTKView(frame: view.bounds, device: device)
        
        mtkView.delegate = self
        
        mtkView.framebufferOnly = false
        
        //建立紋理快取區
        CVMetalTextureCacheCreate(nil, nil, device1, nil, &tetureCache)
        
        view.addSubview(mtkView)
        let library = device.makeDefaultLibrary()
        let verFunc = library?.makeFunction(name: "vertexShader")
        let fragFunc = library?.makeFunction(name: "samplingShader")
        
        let descriptor =  MTLRenderPipelineDescriptor()
        descriptor.fragmentFunction = fragFunc
        descriptor.vertexFunction = verFunc
        descriptor.colorAttachments[0].pixelFormat = mtkView.colorPixelFormat
        state = try? device.makeRenderPipelineState(descriptor: descriptor)
        
        commendQueue = device.makeCommandQueue()
        
    }

    func vertexData() {
        var vertex:[Float] = [
             1.0, -1.0, 0.0, 1.0,  1.0, 1.0,1.0,1.0,
            -1.0, -1.0, 0.0, 1.0,  0.0, 1.0,1.0,1.0,
            -1.0,  1.0, 0.0, 1.0,  0.0, 0.0,1.0,1.0,
             1.0, -1.0, 0.0, 1.0,  1.0, 1.0,1.0,1.0,
            -1.0,  1.0, 0.0, 1.0,  0.0, 0.0,1.0,1.0,
             1.0,  1.0, 0.0, 1.0,  1.0, 0.0,1.0,1.0
        ]
        
        vertexbuffer = device.makeBuffer(bytes: &vertex, length: MemoryLayout<Float>.size * vertex.count, options: MTLResourceOptions.storageModeShared)
    }
    
    func changeVertex(sampleBuffer:CMSampleBuffer) {
        
            var vertexs:[Float] = [
                1.0, -1.0, 0.0, 1.0,  1.0, 1.0,1.0,1.0,
               -1.0, -1.0, 0.0, 1.0,  0.0, 1.0,1.0,1.0,
               -1.0,  1.0, 0.0, 1.0,  0.0, 0.0,1.0,1.0,
                1.0, -1.0, 0.0, 1.0,  1.0, 1.0,1.0,1.0,
               -1.0,  1.0, 0.0, 1.0,  0.0, 0.0,1.0,1.0,
                1.0,  1.0, 0.0, 1.0,  1.0, 0.0,1.0,1.0
            ]
            
            guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
                return
            }
            let width = CVPixelBufferGetWidth(imageBuffer)
            let height = CVPixelBufferGetHeight(imageBuffer)
                   
            let scaleF = CGFloat(view.frame.height)/CGFloat(view.frame.width)
            let scaleI = CGFloat(height)/CGFloat(width)
                       
            let imageScale = scaleF>scaleI ? (1,scaleI/scaleF) : (scaleF/scaleI,1)
            
            for (i,v) in vertexs.enumerated(){
                if i % 8 == 0 {
                    vertexs[i] = v * Float(imageScale.0)
                }
                if i % 8 == 1{
                    vertexs[i] = v * Float(imageScale.1)
                }

            }
        
            vertexbuffer = device.makeBuffer(bytes: vertexs, length: MemoryLayout<Float>.size * vertexs.count, options: MTLResourceOptions.storageModeShared)
        
    }
    
    func yuvToRGBmatrix() {
        
        /*
        YUV與RGB相互轉化公式
         傳輸時使用YUV節省空間大小
         4:4:4  YUV全部取值。        不節省空間
         4:2:2  U/V隔一個取一個。     節省1/3
         4:2:0  第一行取U,第二行取V,還是隔一個取一個   節省1/2
         
         Y = 0.299 * R + 0.587 * G + 0.114 * B
         U = -0.174 * R - 0.289 * G + 0.436 * B
         V = 0.615 * R - 0.515 * G - 0.100 * B
         
         
         R = Y + 1.14 V
         G = Y - 0.390 * U - 0.58 * V
         B = Y + 2.03 * U
         */
        
        //1.轉化矩陣
        // BT.601, which is the standard for SDTV.
        let kColorConversion601DefaultMatrix = float3x3(
            SIMD3<Float>(1.164,1.164, 1.164),
            SIMD3<Float>(0.0, -0.392, 2.017),
            SIMD3<Float>(1.596, -0.813, 0.0))
        
        // BT.601 full range
        let kColorConversion601FullRangeMatrix = float3x3(
            SIMD3<Float>(1.0,    1.0,    1.0),
            SIMD3<Float>(0.0,  -0.343, 1.765),
            SIMD3<Float>(1.4,    -0.711, 0.0))
        
        // BT.709, which is the standard for HDTV.
        let kColorConversion709DefaultMatrix = float3x3(
            SIMD3<Float>(1.164, 1.164, 1.164),
            SIMD3<Float>(0.0,  -0.213, 2.112),
            SIMD3<Float>(1.793, -0.533,  0.0))
        
        //
        
        let offset = SIMD3<Float>(-(16.0/255.0), -0.5, -0.5)
        
        var cMatrix = ConvertMatrix(matrix: kColorConversion601FullRangeMatrix, verctor: offset)
        
        self.cmatrixBuffer = device.makeBuffer(bytes: &cMatrix, length: MemoryLayout<ConvertMatrix>.size, options: .storageModeShared)
        
        
    }

}

extension MovieViewController:MTKViewDelegate {
    
    func mtkView(_ view: MTKView, drawableSizeWillChange size: CGSize) {
        
    }
    
    
    func draw(in view: MTKView) {
        
        if pauseButton.isSelected  {
            return
        }
        guard let commandBuffer = commendQueue?.makeCommandBuffer() else {
            return
        }
        //texture
        guard let sample = self.reader?.readBuffer() else {
            pauseButton.isSelected = true
            pauseButton.setTitle("重播", for: UIControl.State.selected)
            return
            
        }
        
        //encode
        guard let passDescriptor = view.currentRenderPassDescriptor else{return}
        passDescriptor.colorAttachments[0].clearColor = MTLClearColorMake(0.3, 0.1, 0.4, 1)
        guard let encode = commandBuffer.makeRenderCommandEncoder(descriptor: passDescriptor) else{return}
        guard let pipeState = self.state else {return}
        encode.setRenderPipelineState(pipeState)
        encode.setViewport(MTLViewport(originX: 0, originY: 0, width: Double(view.drawableSize.width), height: Double(view.drawableSize.height), znear: -1, zfar: 1))
        
        
        changeVertex(sampleBuffer: sample)
        encode.setVertexBuffer(vertexbuffer, offset: 0, index: 0)
        encode.setFragmentBuffer(cmatrixBuffer, offset: 0, index: 0)
        setTextureWithEncoder(encoder: encode,sampleBuffer: sample,yuv: useYUV)
        
        if let blendTex = ImageTool.setUpImageTexture(imageName: "image.jpg", device: device) {
            encode.setFragmentTexture(blendTex, index: 2)
        }
    
        encode.drawPrimitives(type: .triangle, vertexStart: 0, vertexCount: 6)
        encode.endEncoding()
        
        commandBuffer.present(view.currentDrawable!)
        commandBuffer.commit()
        self.texture = nil
    }
    
    func setTextureWithEncoder(encoder:MTLRenderCommandEncoder,sampleBuffer:CMSampleBuffer,yuv:Bool = false) {
        
        guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
            return
        }
        
        func settexture(index:Int){
            var pixelFormat:MTLPixelFormat = .bgra8Unorm
            
            if index == -1{
                pixelFormat = .bgra8Unorm
            }else if index == 0{
                pixelFormat = .r8Unorm
            }else if index == 1{
                pixelFormat = .rg8Unorm
            }
            var metalTexture:CVImageBuffer?
            let width =  CVPixelBufferGetWidthOfPlane(imageBuffer, index == -1 ? 0 : index)
            let hieght = CVPixelBufferGetHeightOfPlane(imageBuffer, index == -1 ? 0 : index)
            let status = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
                                                                   self.tetureCache!,
                                                                   imageBuffer,
                                                                   nil,
                                                                   pixelFormat,
                                                                   width,
                                                                   hieght,
                                                                   index == -1 ? 0 : index,
                                                                   &metalTexture)
            if  status == kCVReturnSuccess{
                if index == 1 {
                    self.textureUV = CVMetalTextureGetTexture(metalTexture!)
                    encoder.setFragmentTexture(self.textureUV, index: 1)
                }else{
                    self.texture = CVMetalTextureGetTexture(metalTexture!)
                    encoder.setFragmentTexture(self.texture, index: 0)
                }
                
                
            }
        }
        
        if yuv {
            
            settexture(index: 0)
            settexture(index: 1)
        }else{
            settexture(index: -1)
        }
        
        
    }
    
}

三、逐幀獲取視訊檔案每幀的工具類

class DQAssetReader: NSObject {
    
    var readerVideoTrackOutput:AVAssetReaderTrackOutput?
    
    var assetReader:AVAssetReader!
    
    var lockObjc = NSObject()
    
    var videoUrl:URL
    var inputAsset :AVAsset!
    var YUV : Bool = false
    var timeRange:CMTimeRange?
    var loop: Bool = false
    
    init(url:URL,valueYUV:Bool = false) {
        videoUrl = url
        YUV = valueYUV
        super.init()
        setUpAsset()
    }
    
    func setUpAsset(startRead:Bool = true) {
        //建立AVUrlAsset,用於從本地/遠端URL初始化資源
        //AVURLAssetPreferPreciseDurationAndTimingKey 預設為NO,YES表示提供精確的時長
        inputAsset = AVURLAsset(url: videoUrl, options: [AVURLAssetPreferPreciseDurationAndTimingKey:true])
        
        //對資源所需的鍵執行標準的非同步載入操作,這樣就可以訪問資源的tracks屬性時,就不會受到阻礙.
        inputAsset.loadValuesAsynchronously(forKeys: ["tracks"]) {[weak self] in
            
            guard let `self` = self else{
                return
            }
            
           //開闢子執行緒併發佇列非同步函式來處理讀取的inputAsset
            DispatchQueue.global().async {[weak self] in
                
                guard let `self` = self else{
                    return
                }
                
                var error: NSError?
                let tracksStatus = self.inputAsset.statusOfValue(forKey: "tracks", error: &error)
                //如果狀態不等於成功載入,則返回並列印錯誤資訊
                if tracksStatus != .loaded{
                    
                    print(error?.description as Any)
                    return
                }
                self.processAsset(asset: self.inputAsset,startRead: startRead)
                
            }
            
            
        }
        
    }
    
    func processAsset(asset:AVAsset,startRead:Bool = true) {
        //加鎖
        objc_sync_enter(lockObjc)
        
        //建立AVAssetReader
        guard let assetReader1 = try? AVAssetReader(asset: asset) else {
            return
        }
        assetReader = assetReader1
        //
        /*
         2.kCVPixelBufferPixelFormatTypeKey 畫素格式.
         kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange : 420v(YUV)
         kCVPixelFormatType_32BGRA : iOS在內部進行YUV至BGRA格式轉換
         3. 設定readerVideoTrackOutput
         assetReaderTrackOutputWithTrack:(AVAssetTrack *)track outputSettings:(nullable NSDictionary<NSString *, id> *)outputSettings
         引數1: 表示讀取資源中什麼資訊
         引數2: 視訊引數
         */
        let pixelFormat = YUV ? kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange : kCVPixelFormatType_32BGRA
        
        readerVideoTrackOutput = AVAssetReaderTrackOutput(track: asset.tracks(withMediaType: .video).first!, outputSettings:[String(kCVPixelBufferPixelFormatTypeKey) :NSNumber(value: pixelFormat)])
        //alwaysCopiesSampleData : 表示快取區的資料輸出之前是否會被複制.YES:輸出總是從快取區提供複製的資料,你可以自由的修改這些快取區資料The default value is YES.
        readerVideoTrackOutput?.alwaysCopiesSampleData = false
        
        
        if assetReader.canAdd(readerVideoTrackOutput!){
            assetReader.add(readerVideoTrackOutput!)
        }
        
        //開始讀取
        if startRead {
            if assetReader.startReading() == false {
                print("reading file error")
            }
        }
        
        //解鎖
        objc_sync_exit(lockObjc)
        
    }
    
    //讀取每一幀
    func readBuffer() -> CMSampleBuffer? {
        
        objc_sync_enter(lockObjc)
        var sampleBuffer:CMSampleBuffer?
        
        
        if let readerTrackout = self.readerVideoTrackOutput  {
            sampleBuffer = readerTrackout.copyNextSampleBuffer()
        }
        
        //判斷assetReader 並且status 是已經完成讀取 則重新清空readerVideoTrackOutput/assetReader.並重新初始化它們
        if assetReader != nil,assetReader.status == .completed {
            readerVideoTrackOutput = nil
            assetReader = nil
            if loop {
                self.setUpAsset()
            }
        }
        //時間
//        print(sampleBuffer?.presentationTimeStamp.value as Any)
        objc_sync_exit(lockObjc)
        return sampleBuffer
    }
    
    
    
}

四、Metal檔案程式碼

#include <metal_stdlib>

using namespace metal;
//頂點資料結構
typedef struct
{
    //頂點座標(x,y,z,w)
    vector_float4 position;
    //紋理座標(s,t)
    vector_float2 textureCoordinate;
} CCVertex;

//轉換矩陣
typedef struct {
    //三維矩陣
    float3x3 matrix;
    //偏移量
    vector_float3 offset;
} CCConvertMatrix;

//結構體(用於頂點函式輸出/片元函式輸入)
typedef struct
{
    float4 clipSpacePosition [[position]]; // position的修飾符表示這個是頂點
    
    float2 textureCoordinate; // 紋理座標
    
} RasterizerData;

//RasterizerData 返回資料型別->片元函式
// vertex_id是頂點shader每次處理的index,用於定位當前的頂點
// buffer表明是快取資料,0是索引
vertex RasterizerData
vertexShader(uint vertexID [[ vertex_id ]],
             constant CCVertex *vertexArray [[ buffer(0) ]])
{
    RasterizerData out;
    //頂點座標
    out.clipSpacePosition = vertexArray[vertexID].position;
    //紋理座標
    out.textureCoordinate = vertexArray[vertexID].textureCoordinate;
    return out;
}


//YUV->RGB 參考學習連結: https://mp.weixin.qq.com/s/KKfkS5QpwPAdYcEwFAN9VA
// stage_in表示這個資料來自光柵化。(光柵化是頂點處理之後的步驟,業務層無法修改)
// texture表明是紋理資料,CCFragmentTextureIndexTextureY是索引
// texture表明是紋理資料,CCFragmentTextureIndexTextureUV是索引
// buffer表明是快取資料, CCFragmentInputIndexMatrix是索引
fragment float4
samplingShader(RasterizerData input [[stage_in]],
               texture2d<float> textureY [[ texture(0) ]],
               texture2d<float> textureUV [[ texture(1) ]],
               texture2d<float> textureBlend [[ texture(2) ]],
               constant CCConvertMatrix *convertMatrix [[ buffer(0) ]]
               )
{
    //1.獲取紋理取樣器
    constexpr sampler textureSampler (mag_filter::linear,
                                      min_filter::linear);
    /*
     2. 讀取YUV 顏色值
        textureY.sample(textureSampler, input.textureCoordinate).r
        從textureY中的紋理採集器中讀取,紋理座標對應上的R值.(Y)
        textureUV.sample(textureSampler, input.textureCoordinate).rg
        從textureUV中的紋理採集器中讀取,紋理座標對應上的RG值.(UV)
     */
    
    
    float3 yuv = float3(textureY.sample(textureSampler, input.textureCoordinate).r,
                        textureUV.sample(textureSampler, input.textureCoordinate).rg);
    
    float Y = textureY.sample(textureSampler, input.textureCoordinate).r;
    
    float3 rgb1 = float3(Y,Y,Y);//黑白的

    //3.將YUV 轉化為 RGB值.convertMatrix->matrix * (YUV + convertMatrix->offset)
    float3 rgb = convertMatrix->matrix * (yuv + convertMatrix->offset);
    
    //混合濾鏡顏色
    float4 blend = textureBlend.sample(textureSampler, input.textureCoordinate);
    return float4(rgb,1.0) * 0.4 + blend * 0.6;
    
    //4.返回顏色值(RGBA)
//    return float4(rgb, 1.0);
}

具體程式碼地址:https://github.com/duzhaoquan/CaptureUseMetal