zoukankan      html  css  js  c++  java
  • IOS4直接获取摄像头数据

    需要添加的framework:CoreMedia,CoreVideo,QuartzCore,AVFoundation
    MyAVController.h:

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40
    41
    42
    43
    44
    45
    46
    47
    48
    49
    50
    51
    52
    53
    54
    55
    56
    57
    58
    59
    60
    61
    62
    63
    64
    65
    66
    67
    68
    69
    70
    71
    72
    73
    74
    75
    76
    77
    78
    79
    80
    81
    82
    83
    84
    85
    86
    87
    88
    89
    90
    91
    92
    93
    94
    95
    96
    97
    98
    99
    100
    101
    102
    103
    104
    105
    106
    107
    108
    109
    110
    111
    112
    113
    114
    115
    116
    117
    118
    119
    120
    121
    122
    123
    124
    125
    126
    127
    128
    129
    130
    131
    132
    133
    134
    135
    136
    137
    138
    139
    140
    141
    142
    143
    144
    #import <UIKit/UIKit.h>
    #import <AVFoundation/AVFoundation.h>
    #import <CoreGraphics/CoreGraphics.h>
    #import <CoreVideo/CoreVideo.h>
    #import <CoreMedia/CoreMedia.h>
     
    @interface MyAVController : UIViewController <
    AVCaptureVideoDataOutputSampleBufferDelegate> {
        AVCaptureSession *_captureSession;
        UIImageView *_imageView;
        CALayer *_customLayer;
        AVCaptureVideoPreviewLayer *_prevLayer;
    }
     
    @property (nonatomic, retain) AVCaptureSession *captureSession;
    @property (nonatomic, retain) UIImageView *imageView;
    @property (nonatomic, retain) CALayer *customLayer;
    @property (nonatomic, retain) AVCaptureVideoPreviewLayer *prevLayer;
    - (void)initCapture;
     
    @end
     
    MyAVController.m:
     
    #import "MyAVController.h"
     
    @implementation MyAVController
     
    @synthesize captureSession = _captureSession;
    @synthesize imageView = _imageView;
    @synthesize customLayer = _customLayer;
    @synthesize prevLayer = _prevLayer;
     
    #pragma mark -
    #pragma mark Initialization
    - (id)init {
        self = [super init];
        if (self) {
            self.imageView = nil;
            self.prevLayer = nil;
            self.customLayer = nil;
        }
        return self;
    }
     
    - (void)viewDidLoad {
        [self initCapture];
    }
     
    - (void)initCapture {
        AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput
                         deviceInputWithDevice:[AVCaptureDevice
    defaultDeviceWithMediaType:AVMediaTypeVideo]  error:nil];
        AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc]
    init];
        captureOutput.alwaysDiscardsLateVideoFrames = YES;
        //captureOutput.minFrameDuration = CMTimeMake(1, 10);
     
        dispatch_queue_t queue;
        queue = dispatch_queue_create("cameraQueue", NULL);
        [captureOutput setSampleBufferDelegate:self queue:queue];
        dispatch_release(queue);
        NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
        NSNumber* value = [NSNumber
    numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
        NSDictionary* videoSettings = [NSDictionary
    dictionaryWithObject:value forKey:key];
        [captureOutput setVideoSettings:videoSettings];
        self.captureSession = [[AVCaptureSession alloc] init];
        [self.captureSession addInput:captureInput];
        [self.captureSession addOutput:captureOutput];
        [self.captureSession startRunning];
        self.customLayer = [CALayer layer];
        self.customLayer.frame = self.view.bounds;
        self.customLayer.transform = CATransform3DRotate(
    CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);
        self.customLayer.contentsGravity = kCAGravityResizeAspectFill;
        [self.view.layer addSublayer:self.customLayer];
        self.imageView = [[UIImageView alloc] init];
        self.imageView.frame = CGRectMake(0, 0, 100, 100);
         [self.view addSubview:self.imageView];
        self.prevLayer = [AVCaptureVideoPreviewLayer
    layerWithSession: self.captureSession];
        self.prevLayer.frame = CGRectMake(100, 0, 100, 100);
        self.prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
        [self.view.layer addSublayer: self.prevLayer];
    }
     
    #pragma mark -
    #pragma mark AVCaptureSession delegate
    - (void)captureOutput:(AVCaptureOutput *)captureOutput
    didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
           fromConnection:(AVCaptureConnection *)connection
    {
     
        NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
     
        CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
        CVPixelBufferLockBaseAddress(imageBuffer,0);
        uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
        size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
        size_t width = CVPixelBufferGetWidth(imageBuffer);
        size_t height = CVPixelBufferGetHeight(imageBuffer);
     
        CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
        CGContextRef newContext = CGBitmapContextCreate(baseAddress,
     width, height, 8, bytesPerRow, colorSpace,
    kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
        CGImageRef newImage = CGBitmapContextCreateImage(newContext);
     
        CGContextRelease(newContext);
        CGColorSpaceRelease(colorSpace);
     
        [self.customLayer performSelectorOnMainThread:@selector(setContents:)
    withObject: (id) newImage waitUntilDone:YES];
     
        UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0
    orientation:UIImageOrientationRight];
     
        CGImageRelease(newImage);
     
        [self.imageView performSelectorOnMainThread:@selector(setImage:)
    withObject:image waitUntilDone:YES];
     
        CVPixelBufferUnlockBaseAddress(imageBuffer,0);
     
        [pool drain];
    }
     
    #pragma mark -
    #pragma mark Memory management
     
    - (void)viewDidUnload {
        self.imageView = nil;
        self.customLayer = nil;
        self.prevLayer = nil;
    }
     
    - (void)dealloc {
        [self.captureSession release];
        [super dealloc];
    }
     
    @end

    原文来自:http://www.benjaminloulier.com/articles/ios4-and-direct-access-to-the-camera

  • 相关阅读:
    python接口自动化测试二十四:上传多个附件,参数化
    python接口自动化测试二十三:文件上传
    python接口自动化测试二十二:文件下载
    python接口自动化测试二十一:类和方法
    Delphi开发ocx插件的调试
    Confluence 6 推荐的更新通知设置和禁用
    Confluence 6 配置推荐更新邮件通知默认的初始化设置
    为 Confluence 6 分发包设置一个邮件会话
    Confluence 6 测试电子邮件设置
    为 Confluence 6 配置发送邮件消息
  • 原文地址:https://www.cnblogs.com/mfryf/p/2388437.html
Copyright © 2011-2022 走看看