zoukankan      html  css  js  c++  java
  • 由腾讯直播探了探CVPixelBufferRef

    https://developer.apple.com/library/archive/documentation/GraphicsImaging/Reference/CoreImageFilterReference/index.html?language=objc#//apple_ref/doc/filter/ci/CISourceInCompositing  各种滤镜的说明

    CVPixelBufferRef 也就是  CVImageBufferRef

    腾讯直播需要 sendCustomVideoData: ,只要你发格式正确,大小问题不需要考虑(有点无脑操作)

    如果通过摄像头数据采集要进行图片的处理,那首选是CIContext和CIImage组合,提供很多API进行各种转换,贴出代码

    重要函数:

    - (void)createBuffer:(CVPixelBufferRef)originalBuffer size:(CGSize)size
    //
    
    
    @property (nonatomic, strong)AVCaptureVideoPreviewLayer *previewLayer;
    
    @end
    
    -(void)layoutSubviews {
        [super layoutSubviews];
        [_previewLayer setFrame:self.layer.bounds];
    }
    
    @end
    
    
    @property (nonatomic, strong) AVCaptureSession *session;
    @property (nonatomic, strong) AVCaptureDeviceInput       *input;
    @property (nonatomic, strong) AVCaptureVideoDataOutput   *videoDataOutput;
    @property (nonatomic, weak)   TRTCCloud *trtc;
    @property (nonatomic, weak)   UIImageView *trtcPreviewView;
    @property (nonatomic, strong) CIContext *ciContext;
    
    @end
    
    - (void)startCapture:(TRTCCloud*)trtc {
        self.trtc = trtc;
        [self.session startRunning];
        [self refreshCaptureSessionConfigure];
    }
    
    -(void)setupDisplayView:(UIView *)view {
        self.trtcPreviewView = (UIImageView*)view;
        [self.trtcPreviewView setContentMode:UIViewContentModeScaleAspectFit];
        /*
        [self.previewView removeFromSuperview];
        [view addSubview:self.previewView];
        [self.previewView mas_remakeConstraints:^(MASConstraintMaker *make) {
            make.edges.mas_equalTo(view);
        }];*/
    }
    
    -(void)setVideoSize:(CGSize)videoSize {
        _videoSize = videoSize;
        AVCaptureSessionPreset sessionPreset = [self sessionPresetForVideoSize:self.videoSize];
        if (![self.session.sessionPreset isEqualToString:sessionPreset]) {
            dispatch_async(dispatch_get_main_queue(), ^{
                self.session.sessionPreset = sessionPreset;
            });
        }
    }
    
    -(void)setFrameRate:(int)frameRate {
        BOOL needUpdate = _frameRate != frameRate;
        _frameRate = frameRate;
        if (needUpdate) {
            dispatch_async(dispatch_get_main_queue(), ^{
                [self configureFrameRate:frameRate];
            });
        }
    }
    
    - (void)configureFrameRate:(int)frameRate {
        // 设置帧率
        AVCaptureDevice *captureDevice = self.input.device;
        AVFrameRateRange *frameRateRange = captureDevice.activeFormat.videoSupportedFrameRateRanges.firstObject;
        if (frameRate >= frameRateRange.maxFrameRate) {
            frameRate = frameRateRange.maxFrameRate;
        } else if (frameRate <= frameRateRange.minFrameRate) {
            frameRate = frameRateRange.minFrameRate;
        }
        self.frameRate = frameRate;
        if ([captureDevice lockForConfiguration:NULL]){
            [captureDevice setActiveVideoMinFrameDuration:CMTimeMake(1, frameRate)];
            [captureDevice setActiveVideoMaxFrameDuration:CMTimeMake(1, frameRate)];
            [captureDevice unlockForConfiguration];
        }
    }
    
    - (void)configureSession {
        if (!self.session) {
            self.session = [[AVCaptureSession alloc] init];
        }
        [self setCaptureDevicePosition:AVCaptureDevicePositionFront];
        [self setupCaptureVideoDataOutput];
    }
    
    - (void)refreshCaptureSessionConfigure {
        dispatch_async(dispatch_get_main_queue(), ^{
            [self configureFrameRate:self.frameRate];
            self.session.sessionPreset = [self sessionPresetForVideoSize:self.videoSize];
            [self.previewView.previewLayer removeFromSuperlayer];
            self.previewView.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];
            [self.previewView.previewLayer setFrame:self.previewView.layer.bounds];
            [self.previewView.layer addSublayer:self.previewView.previewLayer];
            [self.previewView.previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
        });
    }
    
    - (AVCaptureSessionPreset)sessionPresetForVideoSize:(CGSize)size {
        
        if (size.width <= 352 && size.height <= 288 && [self.session canSetSessionPreset:AVCaptureSessionPreset352x288]) {
            return AVCaptureSessionPreset352x288;
        } else if (size.width <= 640 && size.height <= 480 && [self.session canSetSessionPreset:AVCaptureSessionPreset640x480]) {
            return AVCaptureSessionPreset640x480;
        } else if (size.width <= 960 && size.height <= 540 && [self.session canSetSessionPreset:AVCaptureSessionPresetiFrame960x540]) {
            return AVCaptureSessionPresetiFrame960x540;
        } else if (size.width <= 1280 && size.height <= 720 && [self.session canSetSessionPreset:AVCaptureSessionPreset1280x720]) {
            return AVCaptureSessionPreset1280x720;
        } else if (size.width <= 1920 && size.height <= 1080 && [self.session canSetSessionPreset:AVCaptureSessionPreset1920x1080]) {
            return AVCaptureSessionPreset1920x1080;
        } else if (size.width <= 3840 && size.height <= 2160 && [self.session canSetSessionPreset:AVCaptureSessionPreset3840x2160]) {
            return AVCaptureSessionPreset3840x2160;
        }
        return AVCaptureSessionPresetHigh;
    }
    
    - (void)setupCaptureVideoDataOutput {
        if (!self.videoDataOutput) {
            // Conigure and add output
            AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
            [self.session addOutput:videoDataOutput];
            videoDataOutput.videoSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange]
                                                                        forKey:(id)kCVPixelBufferPixelFormatTypeKey];
            videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
            // Use serial queue to receive audio / video data
            dispatch_queue_t videoQueue = dispatch_queue_create("videoQueue", NULL);
            [videoDataOutput setSampleBufferDelegate:self queue:videoQueue];
            self.videoDataOutput   = videoDataOutput;
        }
    }
    
    - (void)setCaptureDevicePosition:(AVCaptureDevicePosition)position {
        NSError *error = nil;
        AVCaptureDevice *device = [self getCaptureDevicePosition:position];
        // Add input
        AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
        if (error != noErr) {
            NSLog(@"Configure device input failed:%@",error.localizedDescription);
            return;
        }
        if (self.input) {
            [self.session removeInput:self.input];
        }
        [self.session addInput:input];
        self.input = input;
        self.session.sessionPreset = [self sessionPresetForVideoSize:self.videoSize];
    }
    
    
    #pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate
    
    - (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
        CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
        [self createBuffer:imageBuffer size:self.videoSize];
        
    }
    
    #pragma mark - 裁剪CVPixelBufferRef
    /*
    /// aspectRatio (width / height)
    -(CVPixelBufferRef)createBuffer:(CVPixelBufferRef)buffer aspectRatio:(CGFloat)aspectRatio {
        
        CVPixelBufferLockBaseAddress(buffer, 0);
        size_t num = 2;//CVPixelBufferGetPlaneCount(buffer);
        void * address[num];
        size_t width[num];
        size_t height[num];
        size_t bytes[num];
        size_t rWidth = 0;
        size_t rHeight = 0;
        for (size_t i = 0; i != num; i++) {
            address[i] = CVPixelBufferGetBaseAddressOfPlane(buffer, i);
            width[i] = CVPixelBufferGetWidthOfPlane(buffer, i);
            height[i] = CVPixelBufferGetHeightOfPlane(buffer, i);
            bytes[i] = CVPixelBufferGetBytesPerRowOfPlane(buffer, i);
            size_t w = 0;
            size_t h = 0;
            w = ceil(height[i] * aspectRatio);
            if (w > width[i]) {
                w = width[i];
                h = ceil(w / aspectRatio);
            } else {
                h = height[i];
            }
            if (rWidth == 0 && rHeight == 0) {
                rWidth = w;
                rHeight = h;
            }
            size_t x = (width[i] - w) / 2;
            size_t y = (height[i] - h) / 2;
            size_t startpos = y * bytes[i] + x * (bytes[i] / width[i]);
            address[i] = address[i] + startpos / (bytes[i] / width[i]);
        }
        CVPixelBufferRef re = NULL;
        CVPixelBufferCreateWithPlanarBytes(kCFAllocatorDefault, rWidth, rHeight, CVPixelBufferGetPixelFormatType(buffer), NULL, CVPixelBufferGetDataSize(buffer), num, address, width, height, bytes, NULL, NULL, NULL, &re);
        
        CVPixelBufferUnlockBaseAddress(buffer, 0);
        
        return re;
    }*/
    - (void)createBuffer:(CVPixelBufferRef)originalBuffer size:(CGSize)size
    {
        if (self.ciContext == nil) {
            EAGLContext *eaglContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3];
            self.ciContext = [CIContext contextWithEAGLContext:eaglContext options:nil];
        }
        CVPixelBufferLockBaseAddress(originalBuffer, 0);
        CIImage *ciImage = [CIImage imageWithCVImageBuffer:originalBuffer];
        /*
        //此处通过额外的CGImageRef来生成ciImage对象,只是因为直接使用会导致后面的大小缩放会出现异常
        CGImageRef originalImageRef = [self.ciContext createCGImage:ciImage fromRect:ciImage.extent];
    //    CGColorSpaceRef colorSpace = CGImageGetColorSpace(originalImageRef);
        CGImageRef fixedImageRef = [self image:originalImageRef sizeToFitThat:size];
        CGImageRelease(originalImageRef);
        ciImage = [CIImage imageWithCGImage:fixedImageRef];*/
        
        CGFloat width = ciImage.extent.size.width;
        CGFloat height = ciImage.extent.size.height;
        CGFloat ws = width / size.width;
        CGFloat hs = height / size.height;
        CGFloat aspectRatio = (ws > hs) ? hs : ws;
        CGFloat w = floor(size.width * aspectRatio);
        CGFloat h = floor(size.height * aspectRatio);
        CGFloat x = ((width - w) / 2);
        CGFloat y = ((height - h) / 2);
        CGRect cropRect = CGRectMake(x, y, w, h);
        ciImage = [ciImage imageByCroppingToRect:cropRect];
        ciImage = [ciImage imageByApplyingTransform:CGAffineTransformMakeTranslation(-x, -y)];
        CGFloat scale = 1.0 / aspectRatio;
        ciImage = [ciImage imageByApplyingTransform:CGAffineTransformMakeScale(scale, scale)];
        if (self.input.device.position == AVCaptureDevicePositionFront) {
            ciImage = [ciImage imageByApplyingOrientation:kCGImagePropertyOrientationDownMirrored];
        }
        
        CVPixelBufferRef re = NULL;
        
        NSMutableDictionary *outputPixelBufferAttributes = [NSMutableDictionary dictionary];
        [outputPixelBufferAttributes setObject:@(CVPixelBufferGetPixelFormatType(originalBuffer)) forKey:(__bridge NSString *) kCVPixelBufferPixelFormatTypeKey];
        [outputPixelBufferAttributes setObject:@(size.width) forKey:(__bridge NSString *) kCVPixelBufferWidthKey];
        [outputPixelBufferAttributes setObject:@(size.height) forKey:(__bridge NSString *) kCVPixelBufferHeightKey];
        [outputPixelBufferAttributes setObject:@{} forKey:(__bridge NSString *) kCVPixelBufferIOSurfacePropertiesKey];
        
        CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, CVPixelBufferGetPixelFormatType(originalBuffer), (__bridge CFDictionaryRef)outputPixelBufferAttributes, &re);
    //    CVOpenGLESTextureCacheCreateTextureFromImage
        [self.ciContext render:ciImage toCVPixelBuffer:re];
        
        ciImage = [CIImage imageWithCVImageBuffer:re];
        
        UIImage *image = [UIImage imageWithCIImage:ciImage];
    //    CGImageRelease(fixedImageRef);
        dispatch_async(dispatch_get_main_queue(), ^{
            [self.trtcPreviewView setImage:image];
            CVPixelBufferRelease(re);
        });
    //    CVOpenGLESTextureCacheCreateTextureFromImage -6683
        CVPixelBufferUnlockBaseAddress(originalBuffer, 0);
        
        
        
        NSLog(@"[PixelBuffer] %ld  height:%ld",CVPixelBufferGetWidth(re),CVPixelBufferGetHeight(re));
    
        
        //      TRTCVideoPixelFormat_NV12 对应  kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
        TRTCVideoFrame *frame = [[TRTCVideoFrame  alloc] init];
        frame.pixelFormat = TRTCVideoPixelFormat_NV12;
        frame.bufferType = TRTCVideoBufferType_PixelBuffer;
        frame.pixelBuffer = re;
        [self.trtc sendCustomVideoData:frame];
        /*
        CVPixelBufferLockBaseAddress(originalBuffer, 0);
    //    if (width >= size.width && height >= size.height) {
            CGFloat ws = width / size.width;
            CGFloat hs = height / size.height;
            CGFloat aspectRatio = (ws > hs) ? hs : ws;
            CGFloat w = floor(size.width * aspectRatio);
            CGFloat h = floor(size.height * aspectRatio);
            CGFloat x = ((width - w) / 2);
            CGFloat y = ((height - h) / 2);
            CGRect cropRect = CGRectMake(x, y, w, h);
            ciImage = [ciImage imageByCroppingToRect:cropRect];
            ciImage = [ciImage imageByApplyingTransform:CGAffineTransformMakeTranslation(-x, -y)];
            CGFloat scale = 1.0 / aspectRatio;
            ciImage = [ciImage imageByApplyingTransform:CGAffineTransformMakeScale(scale, scale)];
            [ciContext render:ciImage toCVPixelBuffer:originalBuffer];
            if (self.input.device.position == AVCaptureDevicePositionFront) {
                ciImage = [ciImage imageByApplyingOrientation:kCGImagePropertyOrientationDownMirrored];
            }
            UIImage *image = [UIImage imageWithCIImage:ciImage];
            dispatch_async(dispatch_get_main_queue(), ^{
                [self.trtcPreviewView setImage:image];
            });
    //        CGImageRelease(imageRef);
            CVPixelBufferUnlockBaseAddress(originalBuffer, 0);
    //      TRTCVideoPixelFormat_NV12 对应  kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
            TRTCVideoFrame *frame = [[TRTCVideoFrame  alloc] init];
            frame.pixelFormat = TRTCVideoPixelFormat_NV12;
            frame.bufferType = TRTCVideoBufferType_PixelBuffer;
            frame.pixelBuffer = originalBuffer;
            [self.trtc sendCustomVideoData:frame];
    //    } else {
    //        CVPixelBufferUnlockBaseAddress(originalBuffer, 0);
    //    }*/
    }
    
    - (CGImageRef)image:(CGImageRef)image sizeToFitThat:(CGSize)size {
        size_t oWidth = CGImageGetWidth(image);
        size_t oHeight = CGImageGetHeight(image);
        CGFloat ws = 1.0 * size.width / (CGFloat)oWidth;
        CGFloat hs = 1.0 * size.height / (CGFloat)oHeight;
        CGFloat aspectRatio = (ws > hs) ? ws : hs;
        CGFloat w = floor(size.width / aspectRatio);
        CGFloat h = floor(size.height / aspectRatio);
        CGFloat x = ((oWidth - w) / 2);
        CGFloat y = ((oHeight - h) / 2);
        CGRect cropRect = CGRectMake(x, y, w, h);
        CGImageRef cropImage = CGImageCreateWithImageInRect(image, cropRect);
        CGAffineTransform transform = CGAffineTransformIdentity;
            
        CGContextRef ctx = CGBitmapContextCreate(NULL, size.width, size.height,
                                                 CGImageGetBitsPerComponent(cropImage), 0,
                                                 CGImageGetColorSpace(cropImage),
                                                 CGImageGetBitmapInfo(cropImage));
        CGContextConcatCTM(ctx, transform);
        CGContextDrawImage(ctx, CGRectMake(0, 0, size.width, size.height), cropImage);
        // And now we just create a new UIImage from the drawing context
        CGImageRef scaleImage = CGBitmapContextCreateImage(ctx);
        CGContextRelease(ctx);
        CGImageRelease(cropImage);
        return scaleImage;
    }
    
    /*
     CVPixelBufferRef buffer = [self createBuffer:originalBuffer aspectRatio:needRotate?(size.height / size.width):(size.width / size.height)];
     // CVPixelBufferCreateWithPlanarBytes for YUV input
     CGSize originalSize = CGSizeMake(CVPixelBufferGetWidth(buffer), CVPixelBufferGetHeight(buffer));
     CVPixelBufferLockBaseAddress(buffer, 0);
     GLubyte *sourceImageBytes =  CVPixelBufferGetBaseAddressOfPlane(buffer, 0);
     CGDataProviderRef dataProvider = CGDataProviderCreateWithData(NULL, sourceImageBytes, CVPixelBufferGetBytesPerRow(buffer) * originalSize.height, NULL);
     CGColorSpaceRef genericRGBColorspace = CGColorSpaceCreateDeviceRGB();
     CGImageRef cgImageFromBytes = CGImageCreate((int)originalSize.width, (int)originalSize.height, 8, 32, CVPixelBufferGetBytesPerRow(buffer), genericRGBColorspace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst, dataProvider, NULL, NO, kCGRenderingIntentDefault);
     GLubyte *imageData = (GLubyte *) calloc(1, (int)size.width * (int)size.height * 4);
     CGContextRef imageContext = CGBitmapContextCreate(imageData, (int)size.width, (int)size.height, 8, (int)size.width * 4, genericRGBColorspace,  kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
     CGAffineTransform transform = CGAffineTransformIdentity;
     if (needRotate) {
         transform = CGAffineTransformTranslate(transform, size.height, 0);
         transform = CGAffineTransformRotate(transform, M_PI_2);
     }
     CGContextConcatCTM(imageContext, transform);
     if (needRotate) {
         CGContextDrawImage(imageContext, CGRectMake(0.0, 0.0, size.height, size.width), cgImageFromBytes);
     } else {
         CGContextDrawImage(imageContext, CGRectMake(0.0, 0.0, size.width, size.height), cgImageFromBytes);
     }
     CGImageRef cgimg = CGBitmapContextCreateImage(imageContext);
     UIImage *img = [UIImage imageWithCGImage:cgimg];
     CGImageRelease(cgimg);
     dispatch_async(dispatch_get_main_queue(), ^{
         [self.trtcPreviewView setImage:img];
     });
     CGImageRelease(cgImageFromBytes);
     CGContextRelease(imageContext);
     CGColorSpaceRelease(genericRGBColorspace);
     CGDataProviderRelease(dataProvider);
     CVPixelBufferRef pixel_buffer = NULL;
     CVPixelBufferCreateWithBytes(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32BGRA, imageData, size.width * 4, NULL, NULL, NULL, &pixel_buffer);
     CVPixelBufferUnlockBaseAddress(buffer, 0);
     CVPixelBufferRelease(buffer);
     if (pixel_buffer) {
         TRTCVideoFrame *frame = [[TRTCVideoFrame  alloc] init];
         frame.pixelFormat = TRTCVideoPixelFormat_32BGRA;
         frame.bufferType = TRTCVideoBufferType_PixelBuffer;
         frame.pixelBuffer = pixel_buffer;
         [self.trtc sendCustomVideoData:frame];
         CVBufferRelease(pixel_buffer);
         dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.2 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
             free(imageData);
         });
     }
     */
    
    #pragma mark - priv
    
    - (AVCaptureDevice *)getCaptureDevicePosition:(AVCaptureDevicePosition)position {
        NSArray *devices = nil;
        
        if (@available(iOS 10.0, *)) {
            AVCaptureDeviceDiscoverySession *deviceDiscoverySession =  [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[AVCaptureDeviceTypeBuiltInWideAngleCamera] mediaType:AVMediaTypeVideo position:position];
            devices = deviceDiscoverySession.devices;
        } else {
    #pragma clang diagnostic push
    #pragma clang diagnostic ignored "-Wdeprecated-declarations"
            devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
    #pragma clang diagnostic pop
        }
        
        for (AVCaptureDevice *device in devices) {
            if (position == device.position) {
                return device;
            }
        }
        return NULL;
    }@end

    流程如下:

    1.由CVPixelBufferRef生成CIImage对象

    2.对CIImage进行各种花式操作,旋转、裁剪、缩放、滤镜等等一顿操作

    3.通过CIContext将处理好的CIImage信息写入一个新的CVPixelBufferRef中(由于处理包含了大小缩放等信息,原来的CVPixelBufferRef大小可能存在有变化,所以要新生成一个)

    CVPixelBufferRef re = NULL;
        
        NSMutableDictionary *outputPixelBufferAttributes = [NSMutableDictionary dictionary];
        [outputPixelBufferAttributes setObject:@(CVPixelBufferGetPixelFormatType(originalBuffer)) forKey:(__bridge NSString *) kCVPixelBufferPixelFormatTypeKey];
        [outputPixelBufferAttributes setObject:@(size.width) forKey:(__bridge NSString *) kCVPixelBufferWidthKey];
        [outputPixelBufferAttributes setObject:@(size.height) forKey:(__bridge NSString *) kCVPixelBufferHeightKey];
        [outputPixelBufferAttributes setObject:@{} forKey:(__bridge NSString *) kCVPixelBufferIOSurfacePropertiesKey];
        
        CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, CVPixelBufferGetPixelFormatType(originalBuffer), (__bridge CFDictionaryRef)outputPixelBufferAttributes, &re);
    //    CVOpenGLESTextureCacheCreateTextureFromImage
        [self.ciContext render:ciImage toCVPixelBuffer:re];

    注意:

    1.UIImage获取CGImage和CIImage是否能取得值,完全要看UIImage创建时,是通过CIImage还是CGImage来创建的

    2.如果说通过不熟悉CIImage图片处理的话,也可以通过先转成CGImage来处理,但是...为什么要这么做呢?

    3.一定要注意大小的变化,将处理结果写入到新的对象中,否则出来的结果很可能就不是你想要的

  • 相关阅读:
    CF575A Fibonotci [线段树+矩阵快速幂]
    P3768 简单的数学题 [杜教筛,莫比乌斯反演]
    2-SAT 学习笔记
    CF776D The Door Problem [2sat]
    KD-Tree 学习笔记
    Mybatis入门笔记(2)——基于代理Dao实现CRUD
    Mybatis入门笔记(1)——基于原始dao实现CRUD
    mybatis入门看这一篇就够了
    使用JDBC程序的问题总结
    关于递归你知道多少?
  • 原文地址:https://www.cnblogs.com/yuxiaoyiyou/p/12464740.html
Copyright © 2011-2022 走看看