zoukankan      html  css  js  c++  java
  • IOS4直接获取摄像头数据

    需要添加的framework:CoreMedia,CoreVideo,QuartzCore,AVFoundation
    MyAVController.h:

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    37
    38
    39
    40
    41
    42
    43
    44
    45
    46
    47
    48
    49
    50
    51
    52
    53
    54
    55
    56
    57
    58
    59
    60
    61
    62
    63
    64
    65
    66
    67
    68
    69
    70
    71
    72
    73
    74
    75
    76
    77
    78
    79
    80
    81
    82
    83
    84
    85
    86
    87
    88
    89
    90
    91
    92
    93
    94
    95
    96
    97
    98
    99
    100
    101
    102
    103
    104
    105
    106
    107
    108
    109
    110
    111
    112
    113
    114
    115
    116
    117
    118
    119
    120
    121
    122
    123
    124
    125
    126
    127
    128
    129
    130
    131
    132
    133
    134
    135
    136
    137
    138
    139
    140
    141
    142
    143
    144
    #import <UIKit/UIKit.h>
    #import <AVFoundation/AVFoundation.h>
    #import <CoreGraphics/CoreGraphics.h>
    #import <CoreVideo/CoreVideo.h>
    #import <CoreMedia/CoreMedia.h>
     
    @interface MyAVController : UIViewController <
    AVCaptureVideoDataOutputSampleBufferDelegate> {
        AVCaptureSession *_captureSession;
        UIImageView *_imageView;
        CALayer *_customLayer;
        AVCaptureVideoPreviewLayer *_prevLayer;
    }
     
    @property (nonatomic, retain) AVCaptureSession *captureSession;
    @property (nonatomic, retain) UIImageView *imageView;
    @property (nonatomic, retain) CALayer *customLayer;
    @property (nonatomic, retain) AVCaptureVideoPreviewLayer *prevLayer;
    - (void)initCapture;
     
    @end
     
    MyAVController.m:
     
    #import "MyAVController.h"
     
    @implementation MyAVController
     
    @synthesize captureSession = _captureSession;
    @synthesize imageView = _imageView;
    @synthesize customLayer = _customLayer;
    @synthesize prevLayer = _prevLayer;
     
    #pragma mark -
    #pragma mark Initialization
    - (id)init {
        self = [super init];
        if (self) {
            self.imageView = nil;
            self.prevLayer = nil;
            self.customLayer = nil;
        }
        return self;
    }
     
    - (void)viewDidLoad {
        [self initCapture];
    }
     
    - (void)initCapture {
        AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput
                         deviceInputWithDevice:[AVCaptureDevice
    defaultDeviceWithMediaType:AVMediaTypeVideo]  error:nil];
        AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc]
    init];
        captureOutput.alwaysDiscardsLateVideoFrames = YES;
        //captureOutput.minFrameDuration = CMTimeMake(1, 10);
     
        dispatch_queue_t queue;
        queue = dispatch_queue_create("cameraQueue", NULL);
        [captureOutput setSampleBufferDelegate:self queue:queue];
        dispatch_release(queue);
        NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
        NSNumber* value = [NSNumber
    numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
        NSDictionary* videoSettings = [NSDictionary
    dictionaryWithObject:value forKey:key];
        [captureOutput setVideoSettings:videoSettings];
        self.captureSession = [[AVCaptureSession alloc] init];
        [self.captureSession addInput:captureInput];
        [self.captureSession addOutput:captureOutput];
        [self.captureSession startRunning];
        self.customLayer = [CALayer layer];
        self.customLayer.frame = self.view.bounds;
        self.customLayer.transform = CATransform3DRotate(
    CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);
        self.customLayer.contentsGravity = kCAGravityResizeAspectFill;
        [self.view.layer addSublayer:self.customLayer];
        self.imageView = [[UIImageView alloc] init];
        self.imageView.frame = CGRectMake(0, 0, 100, 100);
         [self.view addSubview:self.imageView];
        self.prevLayer = [AVCaptureVideoPreviewLayer
    layerWithSession: self.captureSession];
        self.prevLayer.frame = CGRectMake(100, 0, 100, 100);
        self.prevLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
        [self.view.layer addSublayer: self.prevLayer];
    }
     
    #pragma mark -
    #pragma mark AVCaptureSession delegate
    - (void)captureOutput:(AVCaptureOutput *)captureOutput
    didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
           fromConnection:(AVCaptureConnection *)connection
    {
     
        NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
     
        CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
        CVPixelBufferLockBaseAddress(imageBuffer,0);
        uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
        size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
        size_t width = CVPixelBufferGetWidth(imageBuffer);
        size_t height = CVPixelBufferGetHeight(imageBuffer);
     
        CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
        CGContextRef newContext = CGBitmapContextCreate(baseAddress,
     width, height, 8, bytesPerRow, colorSpace,
    kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
        CGImageRef newImage = CGBitmapContextCreateImage(newContext);
     
        CGContextRelease(newContext);
        CGColorSpaceRelease(colorSpace);
     
        [self.customLayer performSelectorOnMainThread:@selector(setContents:)
    withObject: (id) newImage waitUntilDone:YES];
     
        UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0
    orientation:UIImageOrientationRight];
     
        CGImageRelease(newImage);
     
        [self.imageView performSelectorOnMainThread:@selector(setImage:)
    withObject:image waitUntilDone:YES];
     
        CVPixelBufferUnlockBaseAddress(imageBuffer,0);
     
        [pool drain];
    }
     
    #pragma mark -
    #pragma mark Memory management
     
    - (void)viewDidUnload {
        self.imageView = nil;
        self.customLayer = nil;
        self.prevLayer = nil;
    }
     
    - (void)dealloc {
        [self.captureSession release];
        [super dealloc];
    }
     
    @end

    原文来自:http://www.benjaminloulier.com/articles/ios4-and-direct-access-to-the-camera

  • 相关阅读:
    Novell 被收购
    NetBeans IDE 7.0 Beta 发布
    关于去除PE文件中函数修饰的做法
    甲骨文宣布将于明年 7 月 28 日推 JDK 7
    PE文件格式的一些研究
    如何开发 Web 应用程序
    Novell 被收购
    如何开发 Web 应用程序
    分享:DFC开发平台的设计理念
    分享:FireBreath 1.7.0 RC1 发布
  • 原文地址:https://www.cnblogs.com/mfryf/p/2388437.html
Copyright © 2011-2022 走看看