转载自 http://www.it165.net/pro/html/201408/19449.html
-
工具类提供预览图像画面,自动处理旋转,并且以主动方式抓取图像(这样帧率可以无限大)
系统的接口多是异步接收图像,像我这种强迫症怎么受得了,必须吧被动接收图像的方式改成主动抓取。
头文件
实现文件:(里面用到了那个Lock可以去上一篇文章找)01.#import<Foundation/Foundation.h>02.#import<AVFoundation/AVFoundation.h>03.04.//这些比例都是4:3的比例。05.typedefenumTKVideoFrameSize06.{07.tkVideoFrame480x360 =480<<16|360,08.tkVideoFrame720x540 =720<<16|540,//用这个分辨率,效率会快很多。09.}TKVideoFrameSize;10.11.12.13.@interfaceTKVideoCapture : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>14.15.- (bool) create:(UIView*)preview frame:(TKVideoFrameSize)type ;16.- (bool) destory;17.18.- (bool) start ;19.- (bool) stop ;20.21.//返回 字节顺序 BGRA BGRA 的图像数据。22.- (uint8_t*) get_image_rgb32:(uint32_t*)length ;23.24.@end001.#import"TKVideoCapture.h"002.#import<UIKit/UIKit.h>003.#import<CoreGraphics/CoreGraphics.h>004.#import<CoreVideo/CoreVideo.h>005.#import<CoreMedia/CoreMedia.h>006.#import"TKLock.h"007.008.@interfaceTKVideoCapture ()009.{010.TKVideoFrameSize _frametype ;011.UIView* _preview ;012.AVCaptureSession* _captureSession ;013.AVCaptureVideoPreviewLayer* _capturePreview ;014.AVCaptureVideoDataOutput * _captureOutput ;015.AVCaptureDevice* _captureDevice ;016.AVCaptureDeviceInput* _captureInput ;017.018.uint8_t* _buffer_temp ;//每一帧数据都存储到这个缓存中019.uint8_t* _buffer_obox ;//需要使用时,从tempbuf 拷贝过来。020.CGRect _subImageRect ;//子图片的位置。021.022.TKLock* _buffer_lock ;023.}024.025.@end026.027.028.@implementationTKVideoCapture029.030.- (void) do_create031.{032.self->_captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo] ;033.self->_captureInput = [AVCaptureDeviceInput deviceInputWithDevice:self->_captureDevice error:nil];034.self->_captureOutput = [[AVCaptureVideoDataOutput alloc] init];035.036.if(self->_captureOutput)037.[self->_captureOutput setAlwaysDiscardsLateVideoFrames:true];038.039.dispatch_queue_t queue = dispatch_queue_create("cameraQueue", NULL);040.[self->_captureOutput setSampleBufferDelegate:self queue:queue];041.042.dispatch_release(queue);043.044.045.NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;046.NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];047.048.NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];049.050.[self->_captureOutput setVideoSettings:videoSettings];051.self->_captureSession = [[AVCaptureSession alloc] init];052.053.uint16_t width = (uint16_t)(((uint32_t)_frametype) >>16) ;054.uint16_t height = (uint16_t)(((uint32_t)_frametype) &0xFFFF) ;055.056._buffer_temp = (uint8_t*)malloc(width * height *4);057._buffer_obox = (uint8_t*)malloc(width * height *4);058.059.//0.75是预定比例060.switch(_frametype) {061.casetkVideoFrame480x360:062.{063._captureSession.sessionPreset = AVCaptureSessionPreset640x480 ;064._subImageRect = CGRectMake((640-360)/2,0,360,480);065.break;066.}067.casetkVideoFrame720x540:068.{069._captureSession.sessionPreset = AVCaptureSessionPresetiFrame1280x720 ;070._subImageRect = CGRectMake((1280-540)/2,0,540,720);071.break;072.}073.default:074.break;075.}076.077.if(self->_captureInput != nil)078.[self->_captureSession addInput:self->_captureInput];079.080.[self->_captureSession addOutput:self->_captureOutput];081.082.self->_capturePreview = [AVCaptureVideoPreviewLayer layerWithSession: self->_captureSession];083.self->_capturePreview.frame = self->_preview.bounds;//CGRectMake(100, 0, 100, 100);084.self->_capturePreview.videoGravity = AVLayerVideoGravityResizeAspectFill;085.self->_capturePreview.connection.videoOrientation = [self getOrientation] ;086.087.[self->_preview.layer addSublayer: self->_capturePreview];088.089._buffer_lock = [[TKLock alloc] init];090.[_buffer_lock open];091.}092.093.- (bool) create:(UIView*)preview frame:(TKVideoFrameSize)type094.{095.self->_frametype = type ;096.self->_preview = preview ;097.098.[self performSelectorOnMainThread:@selector(do_create) withObject:self waitUntilDone:true];099.100.returntrue;101.}102.103.- (AVCaptureVideoOrientation) getOrientation104.{105.UIInterfaceOrientation orientation = [UIApplication sharedApplication].statusBarOrientation ;106.switch(orientation)107.{108.caseUIInterfaceOrientationPortrait:returnAVCaptureVideoOrientationPortrait;109.caseUIInterfaceOrientationPortraitUpsideDown:returnAVCaptureVideoOrientationPortraitUpsideDown;110.caseUIInterfaceOrientationLandscapeLeft:returnAVCaptureVideoOrientationLandscapeLeft;111.caseUIInterfaceOrientationLandscapeRight:returnAVCaptureVideoOrientationLandscapeRight;112.}113.returnAVCaptureVideoOrientationLandscapeLeft ;114.}115.116.- (void) do_destory117.{118.[_buffer_lock close];119.[_buffer_lock release];120._buffer_lock = nil ;121.122.free(_buffer_temp);123.free(_buffer_obox);124._buffer_temp = NULL ;125._buffer_obox = NULL ;126.127.[self->_captureSession stopRunning];128.[self->_capturePreview removeFromSuperlayer];129.[self->_captureOutput release];130.[self->_captureSession release];131.self->_captureSession = nil ;132.self->_capturePreview = nil ;133.self->_captureOutput = nil ;134.self->_captureDevice = nil ;135.self->_captureInput = nil ;136.self->_preview = nil ;137.}138.139.- (bool) destory140.{141.[self performSelectorOnMainThread:@selector(do_destory) withObject:self waitUntilDone:true];142.returntrue;143.}144.145.- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection146.{147.CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);148.CVPixelBufferLockBaseAddress(imageBuffer,0);149.uint8_t* baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);150.151.size_t width = CVPixelBufferGetWidth(imageBuffer);152.size_t height = CVPixelBufferGetHeight(imageBuffer);153.size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);154.155.CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();156.CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, baseAddress, bytesPerRow * height, NULL);157.158.CGImageRef imageRef = CGImageCreate(width, height,8,32, bytesPerRow, colorSpace,159.kCGBitmapByteOrder32Little|kCGImageAlphaPremultipliedFirst,160.provider, NULL,false, kCGRenderingIntentDefault);161.162.CGImageRef subImageRef = CGImageCreateWithImageInRect(imageRef, _subImageRect);163.164.size_t subWidth = _subImageRect.size.height ;165.size_t subHeight = _subImageRect.size.width ;166.167.CGContextRef context = CGBitmapContextCreate(NULL, subWidth, subHeight,168.CGImageGetBitsPerComponent(subImageRef),0,169.CGImageGetColorSpace(subImageRef),170.CGImageGetBitmapInfo(subImageRef));171.172.173.CGContextTranslateCTM(context,0, subHeight);174.CGContextRotateCTM(context, -M_PI/2);175.176.CGContextDrawImage(context, CGRectMake(0,0, subHeight, subWidth), subImageRef);177.178.uint8_t* data = (uint8_t*)CGBitmapContextGetData(context);179.180.[_buffer_lock lock];181.memcpy(_buffer_temp, data, subWidth * subHeight *4);182.[_buffer_lock unlock];183.184.CGContextRelease(context);185.CGImageRelease(imageRef);186.CGImageRelease(subImageRef);187.CGDataProviderRelease(provider);188.CGColorSpaceRelease(colorSpace);189.CVPixelBufferUnlockBaseAddress(imageBuffer,0);190.}191.192.- (void) do_start193.{194.[self->_captureSession startRunning];195.}196.197.- (void) do_stop198.{199.[self->_captureSession stopRunning];200.}201.202.- (bool) start203.{204.[self performSelectorOnMainThread:@selector(do_start) withObject:self waitUntilDone:true];205.returntrue;206.}207.- (bool) stop208.{209.[self performSelectorOnMainThread:@selector(do_stop) withObject:self waitUntilDone:true];210.returntrue;211.}212.213.- (uint8_t*) get_image_rgb32:(uint32_t*)length214.{215.uint16_t width = (uint16_t)(((uint32_t)_frametype) >>16) ;216.uint16_t height = (uint16_t)(((uint32_t)_frametype) &0xFFFF) ;217.218.//从摄像头输出数据采集数据219.[_buffer_lock lock];220.memcpy(_buffer_obox, _buffer_temp, width * height *4);221.[_buffer_lock unlock];222.223.if(length)224.*length = width * height *4;225.226.return_buffer_obox ;227.}228.229.230.@end