zoukankan      html  css  js  c++  java
  • 使用AVCapTureSession 获取每一帧sampleBuffer

    定义 全局 变量

      ///设备协调输入输出中心

        AVCaptureSession *_captureSession;

        ///设备

        AVCaptureDevice *_captureDevice;

        /// 输入源

        AVCaptureDeviceInput *_videoCaptureDeviceInput;

        AVCaptureDeviceInput *_audioCaptureDeviceInput;

        

        ///  视频输出

        AVCaptureVideoDataOutput *_captureVideoDataOutput;

        /// 音频输出

        AVCaptureAudioDataOutput *_captureAudioDataOutput;

        /// 队列

        dispatch_queue_t my_Queue;

        /// 视频 连接

        AVCaptureConnection *_videoConnection;

        /// 音频连接

        AVCaptureConnection *_audioConnection; 

       // 用来显示 每一帧的 imageview

        UIImageView *bufferImageView;

    //写入路径

    @property(nonatomic,copy)NSString *path;

    /// 写入

    @property(nonatomic,strong)AVAssetWriter *assetWriter;

    @property(nonatomic,strong) AVAssetWriterInputPixelBufferAdaptor *adaptor;

    /// 视频写入

    @property(nonatomic,strong)AVAssetWriterInput *videoInput;

    /// 音频写入

    @property(nonatomic,strong)AVAssetWriterInput *audioInput;

    - (void)initDevice {

        bufferImageView = [[UIImageView alloc] initWithFrame:CGRectMake(0, 64, 375, 375)];

        [self.view addSubview:bufferImageView];

        _captureSession = [[AVCaptureSession alloc] init];

        if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset640x480]) {

            [_captureSession setSessionPreset:AVCaptureSessionPreset640x480];

        }

        // 获取后置 摄像头   

        _captureDevice = [self backCamera];

      // 音频输入

        AVCaptureDevice *audioCaptureDevice = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject];

        

        _audioCaptureDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:audioCaptureDevice error:nil];

        //视频输入

        _videoCaptureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:_captureDevice error:nil];

        

        [_captureSession addInput:_videoCaptureDeviceInput];

        [_captureSession addInput:_audioCaptureDeviceInput];

        

        [_captureDevice lockForConfiguration:nil];

        [_captureDevice setActiveVideoMaxFrameDuration:CMTimeMake(1,15)];

        [_captureDevice setActiveVideoMinFrameDuration:CMTimeMake(1,15)];

        [_captureDevice unlockForConfiguration];

        

        

        

        // 视频输出

        _captureVideoDataOutput = [[AVCaptureVideoDataOutput alloc] init];

        _captureVideoDataOutput.videoSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]

                                                                            forKey:(id)kCVPixelBufferPixelFormatTypeKey];

        [_captureSession addOutput:_captureVideoDataOutput];

        my_Queue = dispatch_queue_create("myqueue", NULL);

        [_captureVideoDataOutput setSampleBufferDelegate:self queue:my_Queue];

        _captureVideoDataOutput.alwaysDiscardsLateVideoFrames = YES;

        

        

        // 音频输出

        _captureAudioDataOutput = [[AVCaptureAudioDataOutput alloc] init];

        [_captureAudioDataOutput setSampleBufferDelegate:self queue:my_Queue];

        [_captureSession addOutput:_captureAudioDataOutput];

        

        /// 视频连接

        _videoConnection = [_captureVideoDataOutput connectionWithMediaType:AVMediaTypeVideo];

        _videoConnection.videoOrientation = AVCaptureVideoOrientationPortrait;

        /// 音频连接

        _audioConnection = [_captureAudioDataOutput connectionWithMediaType:AVMediaTypeAudio];

        [_captureSession startRunning];

    /// 创建的时候 最好调用一下 切换前后 摄像头,这样进来 从samplebuffer转化的iamge都是向左旋转了90度,不知道为什么,好像是因为 苹果默认横向录 是正方向,以后有机会再解决

    }

     实现代理

    - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{

       

        if (captureOutput == _captureVideoDataOutput) { // 只有是视频帧 过来才操作

            CFRetain(sampleBuffer);

      // 把samplebuffer 转化为图片 在方法里可做裁剪 

            UIImage *image = [self imageFromSampleBuffer:sampleBuffer];

      // 在这你可以对图片做一些算法操作 

            dispatch_async(dispatch_get_main_queue(), ^{

                bufferImageView.image = image;

            });

            CFRelease(sampleBuffer);

        }

    #pramark 再写入文件

     if(开始写入 == NO){

            return;

        }

        BOOL isVideo = YES;

        

        CFRetain(sampleBuffer);

        

        if (captureOutput != _captureVideoDataOutput) {

            isVideo = NO;

        }

        if (writer == nil && !isVideo) {

            videoPath = [NSString stringWithFormat:@"%@/Documents/movie.mp4",NSHomeDirectory()];

            CMFormatDescriptionRef fmt = CMSampleBufferGetFormatDescription(sampleBuffer);

            const AudioStreamBasicDescription *asbd = CMAudioFormatDescriptionGetStreamBasicDescription(fmt);

            [[NSFileManager defaultManager] removeItemAtPath:videoPath error:nil];

             [self initPath:videoPath videoWidth:480 videoHeight:480 channels:asbd->mChannelsPerFrame samples:asbd->mSampleRate];

        }

        if (CMSampleBufferDataIsReady(sampleBuffer)) {

            

            if (writer.assetWriter.status == 0) {

                if (isVideo == YES) {

                    [writer firstSamebuffer:sampleBuffer];

                }

            }

            if (writer.assetWriter.status == 3) {

                NSLog(@"写入失败 %@",writer.assetWriter.error);

            }

            if (isVideo) {

                if (writer.videoInput.readyForMoreMediaData == YES) {

                    

                    CMTime startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);

                    

                    [writer encodeImageDataToVideo:resultImage time:startTime];

        

                }

            }

            else{

                [writer encodeAudioFrame:sampleBuffer];

            }

            CFRelease(sampleBuffer);

            

        }

    }

    - (void)createWriter:(NSString *)path Width:(NSInteger)width Height:(NSInteger)height channels:(int)channels samples:(Float64)samples {

        /// 创建writer

        [[NSFileManager defaultManager] removeItemAtPath:path error:nil];

        NSURL *pathUrl = [NSURL fileURLWithPath:path];

        _assetWriter = [AVAssetWriter assetWriterWithURL:pathUrl fileType:AVFileTypeMPEG4 error:nil];

        _assetWriter.shouldOptimizeForNetworkUse = YES;

        

        

        [self initVideoInputHeight:height width];

        

        [self initAudioInputChannels:channels samples:samples];

    }

    //初始化视频输入

    - (void)initVideoInputHeight:(NSInteger)cy (NSInteger)cx {

        //录制视频的一些配置,分辨率,编码方式等等

        NSDictionary* settings = [NSDictionary dictionaryWithObjectsAndKeys:

                                  AVVideoCodecH264, AVVideoCodecKey,

                                  [NSNumber numberWithInteger: cx], AVVideoWidthKey,

                                  [NSNumber numberWithInteger: cy], AVVideoHeightKey,

                                  nil];

        //初始化视频写入类

        _videoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:settings];

        //表明输入是否应该调整其处理为实时数据源的数据

        _videoInput.expectsMediaDataInRealTime = YES;

        

      

        

        // 初始化写入图像类  add by david

        [self initVideoInputAdaptor];

        

        NSParameterAssert(_videoInput);

        NSParameterAssert([_assetWriter canAddInput:_videoInput]);

        //将视频输入源加入

        [_assetWriter addInput:_videoInput];

    }

    // 初始化写入图像类

    - (void)initVideoInputAdaptor

    {

        NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:

                                                               [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey, nil];

        

        _adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_videoInput

                                                                                    sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];

        

    }

    //初始化音频输入

    - (void)initAudioInputChannels:(int)ch samples:(Float64)rate {

        //音频的一些配置包括音频各种这里为AAC,音频通道、采样率和音频的比特率

        NSDictionary *settings = [NSDictionary dictionaryWithObjectsAndKeys:

                                  [ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,

                                  [ NSNumber numberWithInt: ch], AVNumberOfChannelsKey,

                                  [ NSNumber numberWithFloat: rate], AVSampleRateKey,

                                  [ NSNumber numberWithInt: 128000], AVEncoderBitRateKey,

                                  nil];

        //初始化音频写入类

        _audioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:settings];

        //表明输入是否应该调整其处理为实时数据源的数据

        _audioInput.expectsMediaDataInRealTime = YES;

        //将音频输入源加入

        [_assetWriter addInput:_audioInput];

        

    }

    - (CVPixelBufferRef )pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size

    {

        NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:

                                 [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,

                                 [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];

        CVPixelBufferRef pxbuffer = NULL;

        CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32BGRA, (__bridge CFDictionaryRef)options, &pxbuffer);

        

        

        NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);

        

        CVPixelBufferLockBaseAddress(pxbuffer, 0);

        void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);

        NSParameterAssert(pxdata != NULL);

        

        CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();

        CGContextRef context = CGBitmapContextCreate(pxdata, size.width, size.height, 8, 4*size.width, rgbColorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);

        NSParameterAssert(context);

        

        CGContextDrawImage(context, CGRectMake(0, 0, size.width, size.height), image);

        

        CGColorSpaceRelease(rgbColorSpace);

        CGContextRelease(context);

        

        CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

        

        return pxbuffer;

    }

    // 通过抽样缓存数据创建一个UIImage对象

    - (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer

    {

        // 为媒体数据设置一个CMSampleBuffer的Core Video图像缓存对象

        CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);

        // 锁定pixel buffer的基地址

        CVPixelBufferLockBaseAddress(imageBuffer, 0);

        

        // 得到pixel buffer的基地址

        void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);

        

        // 得到pixel buffer的行字节数

        size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);

        // 得到pixel buffer的宽和高

        size_t width = CVPixelBufferGetWidth(imageBuffer);

        size_t height = CVPixelBufferGetHeight(imageBuffer);

        if (width == 0 || height == 0) {

            return nil;

        }

        // 创建一个依赖于设备的RGB颜色空间

        CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();

        

        // 用抽样缓存的数据创建一个位图格式的图形上下文(graphics context)对象

        CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,

                                                     bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);

    //    

        CGAffineTransform transform = CGAffineTransformIdentity;

        

        CGContextConcatCTM(context, transform);

        

        // 根据这个位图context中的像素数据创建一个Quartz image对象

        CGImageRef quartzImage = CGBitmapContextCreateImage(context);

        // 裁剪 图片

        struct CGImage *cgImage = CGImageCreateWithImageInRect(quartzImage, CGRectMake(0, 0, height, height));

        // 解锁pixel buffer

        CVPixelBufferUnlockBaseAddress(imageBuffer,0);

        

        // 释放context和颜色空间

        CGContextRelease(context);

        CGColorSpaceRelease(colorSpace);

        

        // 用Quartz image创建一个UIImage对象image

        UIImage *image = [UIImage imageWithCGImage:cgImage];

        //    UIImage *image =  [UIImage imageWithCGImage:quartzImage scale:1.0 orientation:UIImageOrientationRight];

        

        // 释放Quartz image对象

        CGImageRelease(cgImage);

        CGImageRelease(quartzImage);

        //    NSLog(@"原来的%ld %f",(long)image.size.width,image.size.height);

        //    image = [self image:image rotation:UIImageOrientationRight];

        //    NSLog(@"变换过的%ld %f",(long)image.size.width,image.size.height);

        

        //    image.imageOrientation = 2;

        //    CGImageRelease(cgImage);

        

        

    //    UIImage *resultImage = [[JBFaceDetectorHelper sharedInstance] rotateWithImage:image isFont:isFront];

        

        return (image);

    }

  • 相关阅读:
    C#利用反射动态调用类及方法
    系统程序监控软件
    SQL server 2008 安装和远程访问的问题
    sql server 创建临时表
    IIS 时间问题
    windows 2008 安装 sql server 2008
    sql server xml nodes 的使用
    Window 7sp1 安装vs2010 sp1 打开xaml文件崩溃
    CSS资源网址
    Could not load type 'System.ServiceModel.Activation.HttpModule' from assembly 'System.ServiceModel, Version=3.0.0.0
  • 原文地址:https://www.cnblogs.com/xia0huihui/p/5803488.html
Copyright © 2011-2022 走看看