zoukankan      html  css  js  c++  java
  • Metal拍摄视频&Metal处理视频

    使用AVFoundation可以拍摄视频,Avplayer播放视频。如果使用Metal取处理视频的话需要把视频的每一帧去处理然后显示,

     需要用到CMSampleBuffer相关处理,本篇滤镜使用了Metal的内置滤镜MetalPerformanceShaders,MPS提供了许多常用的滤镜,具体可以参考apple官方文档

    一、使用Metal处理实时拍摄视频

    class ViewController: UIViewController {
    
        //按钮
        var captureButton:UIButton!
        var recodButton:UIButton!
        
        var session : AVCaptureSession = AVCaptureSession()
        var queue = DispatchQueue(label: "quque")
        var input: AVCaptureDeviceInput?
        lazy var previewLayer  = AVCaptureVideoPreviewLayer(session: self.session)
        lazy var recordOutput = AVCaptureMovieFileOutput()
        
        //Metal相关
        var device :MTLDevice!
        var mtkView : MTKView!
        
        var texture : MTLTexture?
        
        var tetureCache : CVMetalTextureCache?
        
        override func viewDidLoad() {
            super.viewDidLoad()
            view.backgroundColor = .white
            captureButton = UIButton(frame: CGRect(x: 10, y: view.bounds.size.height - 60,  150, height: 50))
            captureButton.backgroundColor = .gray
            captureButton.setTitle("start capture", for: .normal)
            captureButton.addTarget(self, action: #selector(capture(btn:)), for: .touchUpInside)
            view.addSubview(captureButton)
            
            recodButton = UIButton(frame: CGRect(x: view.bounds.size.width - 160, y: view.bounds.size.height - 60,  150, height: 50))
            recodButton.backgroundColor = .gray
    
            recodButton.setTitle("paly movie", for: .normal)
            recodButton.addTarget(self, action: #selector(recordAction(btn:)), for: .touchUpInside)
            view.addSubview(recodButton)
            
            
        }
        
        func setMetalConfig()  {
            guard let device1 = MTLCreateSystemDefaultDevice() else{
                return
            }
            self.device = device1
            mtkView = MTKView(frame: view.bounds, device: device)
            
            mtkView.delegate = self
            
            mtkView.framebufferOnly = false
            
            //创建纹理缓存区
            CVMetalTextureCacheCreate(nil, nil, device1, nil, &tetureCache)
        }
    
        @objc func recordAction(btn:UIButton){
            btn.isSelected = !btn.isSelected
            if session.isRunning {
                if btn.isSelected {
                    btn.setTitle("stop record", for: .normal)
                    
                    if !session.isRunning{
                        session.startRunning()
                    }
                    if session.canAddOutput(recordOutput){
                        session.addOutput(recordOutput)
                    }
    //                recordOutput.
                    let connection = recordOutput.connection(with: .video)
                    connection?.preferredVideoStabilizationMode = .auto
                    
                    guard let path = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first else { return  }
                    let url = URL(fileURLWithPath: "(path)/test.mp4")
                    recordOutput.startRecording(to: url, recordingDelegate: self)
                    
                    
                }else{
                    btn.setTitle("start record", for: .normal)
                    
                    recordOutput.stopRecording()
                    
                }
            }else{
    //            btn.setTitle("paly movie", for: .normal)
                let moVC = MovieViewController()
                self.navigationController?.pushViewController(moVC, animated: true)
            }
            
        }
        @objc func capture(btn:UIButton){
            btn.isSelected = !btn.isSelected
            
            if btn.isSelected {
    //            recodButton.isHidden = false
                recodButton.setTitle("start record", for: .normal)
                btn.setTitle("stop capture", for: UIControl.State.normal)
                guard let device = getCamera(postion: .back) else{
                    return
                }
                
                guard let input = try? AVCaptureDeviceInput(device: device) else{
                    return
                }
                self.input = input
                if session.canAddInput(input) {
                    session.addInput(input)
                }
                
                let output = AVCaptureVideoDataOutput()
                
                output.setSampleBufferDelegate(self, queue: queue)
                if session.canAddOutput(output){
                    session.addOutput(output)
                }
                //这里设置格式为BGRA,而不用YUV的颜色空间,避免使用Shader转换
                //注意:这里必须和后面CVMetalTextureCacheCreateTextureFromImage 保存图像像素存储格式保持一致.否则视频会出现异常现象.
                output.videoSettings = [String(kCVPixelBufferPixelFormatTypeKey)  :NSNumber(value: kCVPixelFormatType_32BGRA) ]
                let connection: AVCaptureConnection = output.connection(with: .video)!
                connection.videoOrientation = .portrait
    //            previewLayer.frame = view.bounds
    //            view.layer.insertSublayer(previewLayer, at: 0)
                setMetalConfig()
                view.insertSubview(mtkView, at: 0)
                session.startRunning()
            }else{
    //            recodButton.isHidden = true
                btn.setTitle("start capture", for: .normal)
                if recordOutput.isRecording {
                    recordOutput.stopRecording()
                }
                recodButton.isSelected = false
                recodButton.setTitle("play movie", for: .normal)
                session.stopRunning()
    //            previewLayer.removeFromSuperlayer()
                mtkView.removeFromSuperview()
            }
            
            
            
        }
        //获取相机设备
        func getCamera(postion: AVCaptureDevice.Position) -> AVCaptureDevice? {
            var devices = [AVCaptureDevice]()
            
            if #available(iOS 10.0, *) {
                let discoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera], mediaType: AVMediaType.video, position: AVCaptureDevice.Position.unspecified)
                devices = discoverySession.devices
            } else {
                devices = AVCaptureDevice.devices(for: AVMediaType.video)
            }
            
            for device in devices {
                if device.position == postion {
                    return device
                }
            }
            return nil
        }
        //切换摄像头
        func swapFrontAndBackCameras() {
            if let input = input {
                
                var newDevice: AVCaptureDevice?
                
                if input.device.position == .front {
                    newDevice = getCamera(postion: AVCaptureDevice.Position.back)
                } else {
                    newDevice = getCamera(postion: AVCaptureDevice.Position.front)
                }
                
                if let new = newDevice {
                    do{
                        let newInput = try AVCaptureDeviceInput(device: new)
                        
                        session.beginConfiguration()
                        
                        session.removeInput(input)
                        session.addInput(newInput)
                        self.input = newInput
                        
                        session.commitConfiguration()
                    }
                    catch let error as NSError {
                        print("AVCaptureDeviceInput(): (error)")
                    }
                }
            }
        }
        //设置横竖屏问题
        func setupVideoPreviewLayerOrientation() {
            
            if let connection = previewLayer.connection, connection.isVideoOrientationSupported {
                if #available(iOS 13.0, *) {
                    if let orientation = UIApplication.shared.windows.first?.windowScene?.interfaceOrientation{
                        switch orientation {
                        case .portrait:
                            connection.videoOrientation = .portrait
                        case .landscapeLeft:
                            connection.videoOrientation = .landscapeLeft
                        case .landscapeRight:
                            connection.videoOrientation = .landscapeRight
                        case .portraitUpsideDown:
                            connection.videoOrientation = .portraitUpsideDown
                        default:
                            connection.videoOrientation = .portrait
                        }
                    }
                }else{
                    switch UIApplication.shared.statusBarOrientation {
                    case .portrait:
                        connection.videoOrientation = .portrait
                    case .landscapeRight:
                        connection.videoOrientation = .landscapeRight
                    case .landscapeLeft:
                        connection.videoOrientation = .landscapeLeft
                    case .portraitUpsideDown:
                        connection.videoOrientation = .portraitUpsideDown
                    default:
                        connection.videoOrientation = .portrait
                    }
                }
            }
        }
    }
    
    extension ViewController : AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureFileOutputRecordingDelegate,MTKViewDelegate {
        //mtk
        func draw(in view: MTKView) {
            
            guard let queue = device.makeCommandQueue() else { return }
            guard let buffer = queue.makeCommandBuffer() else { return }
    //        guard let descriptor = mtkView.currentRenderPassDescriptor else{return}
    //        guard let encode = buffer.makeRenderCommandEncoder(descriptor: descriptor) else {
    //            return
    //        }
            //metal有许多内置滤镜 MetalPerformanceShaders
            let blurFilter = MPSImageGaussianBlur.init(device: device, sigma: 10)
            guard let texture = self.texture else {
                return
            }
            
            blurFilter.encode(commandBuffer: buffer, sourceTexture: texture, destinationTexture: view.currentDrawable!.texture)
            
            buffer.present(view.currentDrawable!)
            buffer.commit()
            self.texture = nil
            
            
            
        }
        
        func mtkView(_ view: MTKView, drawableSizeWillChange size: CGSize) {
            
        }
        
        //录制完成
        func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
            
        }
        //采集结果
        func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
            
            guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return  }
    //        imageBuffer.attachments[0].
            var metalTexture:CVMetalTexture?
            
            let status = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
                                                      self.tetureCache!,
                                                      imageBuffer,
                                                      nil,
                                                      MTLPixelFormat.bgra8Unorm,
                                                      CVPixelBufferGetWidth(imageBuffer),
                                                      CVPixelBufferGetHeight(imageBuffer),
                                                      0,
                                                      &metalTexture)
            if  status == kCVReturnSuccess {
                mtkView.drawableSize = CGSize( CVPixelBufferGetWidth(imageBuffer), height: CVPixelBufferGetHeight(imageBuffer))
                self.texture = CVMetalTextureGetTexture(metalTexture!)
                
                
            }
        }
        func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
            
        }
        
    }

    二、处理已有视频

    import UIKit
    import AVFoundation
    import MetalKit
    import MetalPerformanceShaders
    
    struct ConvertMatrix {
        var matrix :float3x3
        var verctor :SIMD3<Float>
        
    }
    
    class MovieViewController: UIViewController {
    
       
        var device :MTLDevice!
        var mtkView : MTKView!
        
        var reader: DQAssetReader?
        
        var texture : MTLTexture?
        var textureUV:MTLTexture?
        
        var tetureCache : CVMetalTextureCache?
        
        var state : MTLRenderPipelineState?
        var commendQueue: MTLCommandQueue?
        
        var vertexbuffer :MTLBuffer?
        var cmatrixBuffer :MTLBuffer?
        
        var useYUV = true
        
        var timeRange : CMTimeRange?
        
        var pauseButton:UIButton!
        override func viewDidLoad() {
            super.viewDidLoad()
    
            self.title = "movie"
            self.view.backgroundColor = .white
            
            let path = Bundle.main.path(forResource: "123", ofType: "mp4")
            let url1 = URL(fileURLWithPath: path!)
            
            reader = DQAssetReader(url: url1,valueYUV: useYUV)
            reader?.timeRange = CMTimeRange(start: CMTime(value: 2, timescale: 1, flags: CMTimeFlags(rawValue: 1), epoch: 0), duration: CMTime(value: 0, timescale: 0, flags: CMTimeFlags(rawValue: 5), epoch: 0))
            setMetalConfig()
            vertexData()
            yuvToRGBmatrix()
            
            pauseButton = UIButton(frame: CGRect(x: 0, y: view.frame.size.height - 100,  100, height: 50))
            pauseButton.center.x = view.center.x
            
            pauseButton.setTitle("暂停", for:.normal)
            pauseButton.setTitle("继续", for:.selected)
            pauseButton.backgroundColor = .gray
            view.addSubview(pauseButton)
            pauseButton.addTarget(self, action: #selector(pauseAction(btn:)), for: .touchUpInside)
            
        }
        
        @objc func pauseAction(btn:UIButton){
            btn.isSelected = !btn.isSelected
            
            if !btn.isSelected {
                if reader?.readBuffer() == nil {
                    reader?.setUpAsset()
                    pauseButton.setTitle("继续", for:.selected)
                }
            }
        }
        
        func setMetalConfig()  {
            guard let device1 = MTLCreateSystemDefaultDevice() else{
                return
            }
            self.device = device1
            mtkView = MTKView(frame: view.bounds, device: device)
            
            mtkView.delegate = self
            
            mtkView.framebufferOnly = false
            
            //创建纹理缓存区
            CVMetalTextureCacheCreate(nil, nil, device1, nil, &tetureCache)
            
            view.addSubview(mtkView)
            let library = device.makeDefaultLibrary()
            let verFunc = library?.makeFunction(name: "vertexShader")
            let fragFunc = library?.makeFunction(name: "samplingShader")
            
            let descriptor =  MTLRenderPipelineDescriptor()
            descriptor.fragmentFunction = fragFunc
            descriptor.vertexFunction = verFunc
            descriptor.colorAttachments[0].pixelFormat = mtkView.colorPixelFormat
            state = try? device.makeRenderPipelineState(descriptor: descriptor)
            
            commendQueue = device.makeCommandQueue()
            
        }
    
        func vertexData() {
            var vertex:[Float] = [
                 1.0, -1.0, 0.0, 1.0,  1.0, 1.0,1.0,1.0,
                -1.0, -1.0, 0.0, 1.0,  0.0, 1.0,1.0,1.0,
                -1.0,  1.0, 0.0, 1.0,  0.0, 0.0,1.0,1.0,
                 1.0, -1.0, 0.0, 1.0,  1.0, 1.0,1.0,1.0,
                -1.0,  1.0, 0.0, 1.0,  0.0, 0.0,1.0,1.0,
                 1.0,  1.0, 0.0, 1.0,  1.0, 0.0,1.0,1.0
            ]
            
            vertexbuffer = device.makeBuffer(bytes: &vertex, length: MemoryLayout<Float>.size * vertex.count, options: MTLResourceOptions.storageModeShared)
        }
        
        func changeVertex(sampleBuffer:CMSampleBuffer) {
            
                var vertexs:[Float] = [
                    1.0, -1.0, 0.0, 1.0,  1.0, 1.0,1.0,1.0,
                   -1.0, -1.0, 0.0, 1.0,  0.0, 1.0,1.0,1.0,
                   -1.0,  1.0, 0.0, 1.0,  0.0, 0.0,1.0,1.0,
                    1.0, -1.0, 0.0, 1.0,  1.0, 1.0,1.0,1.0,
                   -1.0,  1.0, 0.0, 1.0,  0.0, 0.0,1.0,1.0,
                    1.0,  1.0, 0.0, 1.0,  1.0, 0.0,1.0,1.0
                ]
                
                guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
                    return
                }
                let width = CVPixelBufferGetWidth(imageBuffer)
                let height = CVPixelBufferGetHeight(imageBuffer)
                       
                let scaleF = CGFloat(view.frame.height)/CGFloat(view.frame.width)
                let scaleI = CGFloat(height)/CGFloat(width)
                           
                let imageScale = scaleF>scaleI ? (1,scaleI/scaleF) : (scaleF/scaleI,1)
                
                for (i,v) in vertexs.enumerated(){
                    if i % 8 == 0 {
                        vertexs[i] = v * Float(imageScale.0)
                    }
                    if i % 8 == 1{
                        vertexs[i] = v * Float(imageScale.1)
                    }
    
                }
            
                vertexbuffer = device.makeBuffer(bytes: vertexs, length: MemoryLayout<Float>.size * vertexs.count, options: MTLResourceOptions.storageModeShared)
            
        }
        
        func yuvToRGBmatrix() {
            
            /*
            YUV与RGB相互转化公式
             传输时使用YUV节省空间大小
             4:4:4  YUV全部取值。        不节省空间
             4:2:2  U/V隔一个取一个。     节省1/3
             4:2:0  第一行取U,第二行取V,还是隔一个取一个   节省1/2
             
             Y = 0.299 * R + 0.587 * G + 0.114 * B
             U = -0.174 * R - 0.289 * G + 0.436 * B
             V = 0.615 * R - 0.515 * G - 0.100 * B
             
             
             R = Y + 1.14 V
             G = Y - 0.390 * U - 0.58 * V
             B = Y + 2.03 * U
             */
            
            //1.转化矩阵
            // BT.601, which is the standard for SDTV.
            let kColorConversion601DefaultMatrix = float3x3(
                SIMD3<Float>(1.164,1.164, 1.164),
                SIMD3<Float>(0.0, -0.392, 2.017),
                SIMD3<Float>(1.596, -0.813, 0.0))
            
            // BT.601 full range
            let kColorConversion601FullRangeMatrix = float3x3(
                SIMD3<Float>(1.0,    1.0,    1.0),
                SIMD3<Float>(0.0,  -0.343, 1.765),
                SIMD3<Float>(1.4,    -0.711, 0.0))
            
            // BT.709, which is the standard for HDTV.
            let kColorConversion709DefaultMatrix = float3x3(
                SIMD3<Float>(1.164, 1.164, 1.164),
                SIMD3<Float>(0.0,  -0.213, 2.112),
                SIMD3<Float>(1.793, -0.533,  0.0))
            
            //
            
            let offset = SIMD3<Float>(-(16.0/255.0), -0.5, -0.5)
            
            var cMatrix = ConvertMatrix(matrix: kColorConversion601FullRangeMatrix, verctor: offset)
            
            self.cmatrixBuffer = device.makeBuffer(bytes: &cMatrix, length: MemoryLayout<ConvertMatrix>.size, options: .storageModeShared)
            
            
        }
    
    }
    
    extension MovieViewController:MTKViewDelegate {
        
        func mtkView(_ view: MTKView, drawableSizeWillChange size: CGSize) {
            
        }
        
        
        func draw(in view: MTKView) {
            
            if pauseButton.isSelected  {
                return
            }
            guard let commandBuffer = commendQueue?.makeCommandBuffer() else {
                return
            }
            //texture
            guard let sample = self.reader?.readBuffer() else {
                pauseButton.isSelected = true
                pauseButton.setTitle("重播", for: UIControl.State.selected)
                return
                
            }
            
            //encode
            guard let passDescriptor = view.currentRenderPassDescriptor else{return}
            passDescriptor.colorAttachments[0].clearColor = MTLClearColorMake(0.3, 0.1, 0.4, 1)
            guard let encode = commandBuffer.makeRenderCommandEncoder(descriptor: passDescriptor) else{return}
            guard let pipeState = self.state else {return}
            encode.setRenderPipelineState(pipeState)
            encode.setViewport(MTLViewport(originX: 0, originY: 0,  Double(view.drawableSize.width), height: Double(view.drawableSize.height), znear: -1, zfar: 1))
            
            
            changeVertex(sampleBuffer: sample)
            encode.setVertexBuffer(vertexbuffer, offset: 0, index: 0)
            encode.setFragmentBuffer(cmatrixBuffer, offset: 0, index: 0)
            setTextureWithEncoder(encoder: encode,sampleBuffer: sample,yuv: useYUV)
            
            if let blendTex = ImageTool.setUpImageTexture(imageName: "image.jpg", device: device) {
                encode.setFragmentTexture(blendTex, index: 2)
            }
        
            encode.drawPrimitives(type: .triangle, vertexStart: 0, vertexCount: 6)
            encode.endEncoding()
            
            commandBuffer.present(view.currentDrawable!)
            commandBuffer.commit()
            self.texture = nil
        }
        
        func setTextureWithEncoder(encoder:MTLRenderCommandEncoder,sampleBuffer:CMSampleBuffer,yuv:Bool = false) {
            
            guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
                return
            }
            
            func settexture(index:Int){
                var pixelFormat:MTLPixelFormat = .bgra8Unorm
                
                if index == -1{
                    pixelFormat = .bgra8Unorm
                }else if index == 0{
                    pixelFormat = .r8Unorm
                }else if index == 1{
                    pixelFormat = .rg8Unorm
                }
                var metalTexture:CVImageBuffer?
                let width =  CVPixelBufferGetWidthOfPlane(imageBuffer, index == -1 ? 0 : index)
                let hieght = CVPixelBufferGetHeightOfPlane(imageBuffer, index == -1 ? 0 : index)
                let status = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
                                                                       self.tetureCache!,
                                                                       imageBuffer,
                                                                       nil,
                                                                       pixelFormat,
                                                                       width,
                                                                       hieght,
                                                                       index == -1 ? 0 : index,
                                                                       &metalTexture)
                if  status == kCVReturnSuccess{
                    if index == 1 {
                        self.textureUV = CVMetalTextureGetTexture(metalTexture!)
                        encoder.setFragmentTexture(self.textureUV, index: 1)
                    }else{
                        self.texture = CVMetalTextureGetTexture(metalTexture!)
                        encoder.setFragmentTexture(self.texture, index: 0)
                    }
                    
                    
                }
            }
            
            if yuv {
                
                settexture(index: 0)
                settexture(index: 1)
            }else{
                settexture(index: -1)
            }
            
            
        }
        
    }

    三、逐帧获取视频文件每帧的工具类

    class DQAssetReader: NSObject {
        
        var readerVideoTrackOutput:AVAssetReaderTrackOutput?
        
        var assetReader:AVAssetReader!
        
        var lockObjc = NSObject()
        
        var videoUrl:URL
        var inputAsset :AVAsset!
        var YUV : Bool = false
        var timeRange:CMTimeRange?
        var loop: Bool = false
        
        init(url:URL,valueYUV:Bool = false) {
            videoUrl = url
            YUV = valueYUV
            super.init()
            setUpAsset()
        }
        
        func setUpAsset(startRead:Bool = true) {
            //创建AVUrlAsset,用于从本地/远程URL初始化资源
            //AVURLAssetPreferPreciseDurationAndTimingKey 默认为NO,YES表示提供精确的时长
            inputAsset = AVURLAsset(url: videoUrl, options: [AVURLAssetPreferPreciseDurationAndTimingKey:true])
            
            //对资源所需的键执行标准的异步载入操作,这样就可以访问资源的tracks属性时,就不会受到阻碍.
            inputAsset.loadValuesAsynchronously(forKeys: ["tracks"]) {[weak self] in
                
                guard let `self` = self else{
                    return
                }
                
               //开辟子线程并发队列异步函数来处理读取的inputAsset
                DispatchQueue.global().async {[weak self] in
                    
                    guard let `self` = self else{
                        return
                    }
                    
                    var error: NSError?
                    let tracksStatus = self.inputAsset.statusOfValue(forKey: "tracks", error: &error)
                    //如果状态不等于成功加载,则返回并打印错误信息
                    if tracksStatus != .loaded{
                        
                        print(error?.description as Any)
                        return
                    }
                    self.processAsset(asset: self.inputAsset,startRead: startRead)
                    
                }
                
                
            }
            
        }
        
        func processAsset(asset:AVAsset,startRead:Bool = true) {
            //加锁
            objc_sync_enter(lockObjc)
            
            //创建AVAssetReader
            guard let assetReader1 = try? AVAssetReader(asset: asset) else {
                return
            }
            assetReader = assetReader1
            //
            /*
             2.kCVPixelBufferPixelFormatTypeKey 像素格式.
             kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange : 420v(YUV)
             kCVPixelFormatType_32BGRA : iOS在内部进行YUV至BGRA格式转换
             3. 设置readerVideoTrackOutput
             assetReaderTrackOutputWithTrack:(AVAssetTrack *)track outputSettings:(nullable NSDictionary<NSString *, id> *)outputSettings
             参数1: 表示读取资源中什么信息
             参数2: 视频参数
             */
            let pixelFormat = YUV ? kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange : kCVPixelFormatType_32BGRA
            
            readerVideoTrackOutput = AVAssetReaderTrackOutput(track: asset.tracks(withMediaType: .video).first!, outputSettings:[String(kCVPixelBufferPixelFormatTypeKey) :NSNumber(value: pixelFormat)])
            //alwaysCopiesSampleData : 表示缓存区的数据输出之前是否会被复制.YES:输出总是从缓存区提供复制的数据,你可以自由的修改这些缓存区数据The default value is YES.
            readerVideoTrackOutput?.alwaysCopiesSampleData = false
            
            
            if assetReader.canAdd(readerVideoTrackOutput!){
                assetReader.add(readerVideoTrackOutput!)
            }
            
            //开始读取
            if startRead {
                if assetReader.startReading() == false {
                    print("reading file error")
                }
            }
            
            //解锁
            objc_sync_exit(lockObjc)
            
        }
        
        //读取每一帧
        func readBuffer() -> CMSampleBuffer? {
            
            objc_sync_enter(lockObjc)
            var sampleBuffer:CMSampleBuffer?
            
            
            if let readerTrackout = self.readerVideoTrackOutput  {
                sampleBuffer = readerTrackout.copyNextSampleBuffer()
            }
            
            //判断assetReader 并且status 是已经完成读取 则重新清空readerVideoTrackOutput/assetReader.并重新初始化它们
            if assetReader != nil,assetReader.status == .completed {
                readerVideoTrackOutput = nil
                assetReader = nil
                if loop {
                    self.setUpAsset()
                }
            }
            //时间
    //        print(sampleBuffer?.presentationTimeStamp.value as Any)
            objc_sync_exit(lockObjc)
            return sampleBuffer
        }
        
        
        
    }

    四、Metal文件代码

    #include <metal_stdlib>
    
    using namespace metal;
    //顶点数据结构
    typedef struct
    {
        //顶点坐标(x,y,z,w)
        vector_float4 position;
        //纹理坐标(s,t)
        vector_float2 textureCoordinate;
    } CCVertex;
    
    //转换矩阵
    typedef struct {
        //三维矩阵
        float3x3 matrix;
        //偏移量
        vector_float3 offset;
    } CCConvertMatrix;
    
    //结构体(用于顶点函数输出/片元函数输入)
    typedef struct
    {
        float4 clipSpacePosition [[position]]; // position的修饰符表示这个是顶点
        
        float2 textureCoordinate; // 纹理坐标
        
    } RasterizerData;
    
    //RasterizerData 返回数据类型->片元函数
    // vertex_id是顶点shader每次处理的index,用于定位当前的顶点
    // buffer表明是缓存数据,0是索引
    vertex RasterizerData
    vertexShader(uint vertexID [[ vertex_id ]],
                 constant CCVertex *vertexArray [[ buffer(0) ]])
    {
        RasterizerData out;
        //顶点坐标
        out.clipSpacePosition = vertexArray[vertexID].position;
        //纹理坐标
        out.textureCoordinate = vertexArray[vertexID].textureCoordinate;
        return out;
    }
    
    
    //YUV->RGB 参考学习链接: https://mp.weixin.qq.com/s/KKfkS5QpwPAdYcEwFAN9VA
    // stage_in表示这个数据来自光栅化。(光栅化是顶点处理之后的步骤,业务层无法修改)
    // texture表明是纹理数据,CCFragmentTextureIndexTextureY是索引
    // texture表明是纹理数据,CCFragmentTextureIndexTextureUV是索引
    // buffer表明是缓存数据, CCFragmentInputIndexMatrix是索引
    fragment float4
    samplingShader(RasterizerData input [[stage_in]],
                   texture2d<float> textureY [[ texture(0) ]],
                   texture2d<float> textureUV [[ texture(1) ]],
                   texture2d<float> textureBlend [[ texture(2) ]],
                   constant CCConvertMatrix *convertMatrix [[ buffer(0) ]]
                   )
    {
        //1.获取纹理采样器
        constexpr sampler textureSampler (mag_filter::linear,
                                          min_filter::linear);
        /*
         2. 读取YUV 颜色值
            textureY.sample(textureSampler, input.textureCoordinate).r
            从textureY中的纹理采集器中读取,纹理坐标对应上的R值.(Y)
            textureUV.sample(textureSampler, input.textureCoordinate).rg
            从textureUV中的纹理采集器中读取,纹理坐标对应上的RG值.(UV)
         */
        
        
        float3 yuv = float3(textureY.sample(textureSampler, input.textureCoordinate).r,
                            textureUV.sample(textureSampler, input.textureCoordinate).rg);
        
        float Y = textureY.sample(textureSampler, input.textureCoordinate).r;
        
        float3 rgb1 = float3(Y,Y,Y);//黑白的
    
        //3.将YUV 转化为 RGB值.convertMatrix->matrix * (YUV + convertMatrix->offset)
        float3 rgb = convertMatrix->matrix * (yuv + convertMatrix->offset);
        
        //混合滤镜颜色
        float4 blend = textureBlend.sample(textureSampler, input.textureCoordinate);
        return float4(rgb,1.0) * 0.4 + blend * 0.6;
        
        //4.返回颜色值(RGBA)
    //    return float4(rgb, 1.0);
    }

    具体代码地址:https://github.com/duzhaoquan/CaptureUseMetal

  • 相关阅读:
    上传——断点续传之实践篇(1)
    上传——断点续传之实践篇
    上传——断点续传之理论篇
    overrides报错:TypeError: Highway.forward: `input` must be present
    InvalidVersionSpecError: Invalid version spec: =2.7
    qt.qpa.plugin: Could not find the Qt platform plugin "windows" in "" This application failed to start because no Qt platform plugin could be initialized.
    匈牙利算法解决两个坐标列表匹配的问题
    pytorch-summary 针对DenseNet生成摘要报错: AttributeError: 'list' object has no attribute 'size'
    使用sklearn的pca模块fit数据报错“ValueError: array must not contain infs or NaNs”
    Nginx+rtmp构建时,localhost/stat显示为空白
  • 原文地址:https://www.cnblogs.com/duzhaoquan/p/13380578.html
Copyright © 2011-2022 走看看