zoukankan      html  css  js  c++  java
  • AVFoundation

    1:基础

    /*
     1:获取可用输入设备 AVCaptureDevice
     2:设置输入设备: [AVCaptureDeviceInput deviceInputWithDevice:self.captureDevice error:nil]
     3:设置输出设备: AVCaptureStillImageOutput 如果iOS10以上 AVCapturePhotoOutput
     4:设置Session:AVCaptureSession初始化,设置Preset,添加输入输出设备。
     5:预览layer:   AVCaptureVideoPreviewLayer, + (instancetype)layerWithSessionWithNoConnection:(AVCaptureSession *)session
     6:获取照片:    AVCapturePhotoOutput通过代理可以获取图片,AVCaptureStillImageOutput通过Block获取
     
     https://blog.csdn.net/vkooy/article/details/60867268
     1) 前后置摄像头的切换
     
       前后值不能切换,各种尝试找了半天没找到有原因。后来发现我在设置图片尺寸的时候设置为1080P [self.session canSetSessionPreset: AVCaptureSessionPreset1920x1080] ,前置摄像头并不支持这么大的尺寸,所以就不能切换前置摄像头。我验证了下 前置摄像头最高支持720P,720P以内可自由切换。  当然也可以在前后置摄像头切换的时候,根据前后摄像头来设置不同的尺寸,这里不在赘述。
     2)焦点位置
     
       CGPoint focusPoint = CGPointMake( point.y /size.height ,1-point.x/size.width );
     setExposurePointOfInterest:focusPoint 函数后面Point取值范围是取景框左上角(0,0)到取景框右下角(1,1)之间。官方是这么写的:
       The value of this property is a CGPoint that determines the receiver's focus point of interest, if it has one. A value of (0,0) indicates that the camera should focus on the top left corner of the image, while a value of (1,1) indicates that it should focus on the bottom right. The default value is (0.5,0.5).
       我也试了按这个来但位置就是不对,只能按上面的写法才可以。前面是点击位置的y/PreviewLayer的高度,后面是1-点击位置的x/PreviewLayer的宽度
     
     3)对焦和曝光
     
       我在设置对焦是 先设置了模式setFocusMode,后设置对焦位置,就会导致很奇怪的现象,对焦位置是你上次点击的位置。所以一定要先设置位置,再设置对焦模式。
       曝光同上
     */

    2:代码

    #import "FFCameraHelper.h"
    #define FFCameraHelperScreenWidth  ([UIScreen mainScreen].bounds.size.width)
    #define FFCameraHelperScreenHeight ([UIScreen mainScreen].bounds.size.height)
    #define FFCameraHelperFocusLayerWidth (80)
    
    @interface FFCameraHelper ()<AVCapturePhotoCaptureDelegate>
    /// 可用的设备
    @property (nonatomic, strong) NSArray<AVCaptureDevice *> *captureDeviceList;
    /// 捕获的设备
    @property (nonatomic, strong) AVCaptureDevice            *captureDevice;
    /// 输入设备
    @property (nonatomic, strong) AVCaptureDeviceInput       *captureInput;
    /// 输出图片
    @property (nonatomic, strong) AVCapturePhotoOutput       *photoOutput;
    /// 输出数据
    @property (nonatomic, strong) AVCaptureMetadataOutput    *metaDataOutput;
    
    /// 输出图片 iOS10.0 以下
    @property (nonatomic, strong) AVCaptureStillImageOutput  *imageOutput;
    
    /// 流: 把输入流和输出流结合在一起,并启动设备
    @property (nonatomic, strong) AVCaptureSession           *captureSession;
    
    ///
    @property (nonatomic, strong) CAShapeLayer               *focusLayer;
    
    @end
    
    @implementation FFCameraHelper
    - (instancetype)init
    {
        self = [super init];
        if (self) {
            [self FF_ReqeuestAuthortion];
            /// 授权
            AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
            if (status == AVAuthorizationStatusAuthorized) {
                [self FF_initSetUp];
            }
        }
        return self;
    }
    
    
    
    /// 请求授权
    - (void)FF_ReqeuestAuthortion {
        AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
        if (status == AVAuthorizationStatusNotDetermined) {
            [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
                
            }];
        }
    }
    
    - (void)FF_initSetUp {
        // 1: 设置后置摄像头
        [self FF_AcquireNeedDevice:AVCaptureDevicePositionBack];
        // 2: 设置输入设备
        self.captureInput = [AVCaptureDeviceInput deviceInputWithDevice:self.captureDevice error:nil];
        // 3: 设置输出内容
        if (@available(ios 10.0, *)) {
            self.photoOutput = [AVCapturePhotoOutput new];
        }else {
            self.imageOutput = [AVCapturePhotoOutput new];
        }
        // 4: 设置流
        self.captureSession = [[AVCaptureSession alloc] init];
        if ([self.captureSession canSetSessionPreset:AVCaptureSessionPresetHigh]) {
            [self.captureSession setSessionPreset:AVCaptureSessionPresetHigh];
        }else if ([self.captureSession canSetSessionPreset:AVCaptureSessionPresetMedium]) {
            [self.captureSession setSessionPreset:AVCaptureSessionPresetMedium];
        }
        if ([self.captureSession canAddInput:self.captureInput]) {
            [self.captureSession addInput:self.captureInput];
        }
        if (@available(ios 10.0, *)) {
            if ([self.captureSession canAddOutput:self.photoOutput]) {
                [self.captureSession addOutput:self.photoOutput];
            }
        }else if ([self.captureSession canAddOutput:self.imageOutput]) {
            [self.captureSession addOutput:self.imageOutput];
        }
        // 5: layer
        self.previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
        self.previewLayer.frame = CGRectMake(0, 0, FFCameraHelperScreenWidth, FFCameraHelperScreenHeight);
        self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
    }
    
    /**
     更改捕获设备
     */
    - (void)FF_ChangeCaptureDevice {
        [self FF_PreviewLayerTransform:self.captureDevice.position == AVCaptureDevicePositionBack];
        if (self.captureDevice == nil) {
        }else if (self.captureDevice.position == AVCaptureDevicePositionBack) {
            [self FF_AcquireNeedDevice:AVCaptureDevicePositionFront];
        }else {
            [self FF_AcquireNeedDevice:AVCaptureDevicePositionBack];
        }
        [self.captureSession beginConfiguration];
        [self.captureSession removeInput:self.captureInput];
        self.captureInput = [AVCaptureDeviceInput deviceInputWithDevice:self.captureDevice error:nil];
        if ([self.captureSession canAddInput:self.captureInput]) {
            [self.captureSession addInput:self.captureInput];
        }
        [self.captureSession commitConfiguration];
    }
    
    /// 前后摄像头切换动画
    - (void)FF_PreviewLayerTransform:(BOOL)isBack {
    //    转场动画可以设置的值 系统提供的(type)
    //    kCATransitionFade       淡出
    //    kCATransitionMoveIn  覆盖原图
    //    kCATransitionPush       推出
    //    kCATransitionReveal底部显出来
    //    如果为隐藏的效果,要使用kvc 即【Animation setType:@“”】;
    //    pageCurl               向上翻一页
    //    pageUnCurl             向下翻一页
    //    rippleEffect             滴水效果
    //    suckEffect 收缩效果,如一块布被抽走
    //    cube                   立方体效果
    //    oglFlip              上下翻转效果
        [self.previewLayer removeAllAnimations];
        CATransition *transition = [CATransition animation];
        [transition setType:@"oglFlip"];
        transition.subtype = isBack ? kCATransitionFromRight : kCATransitionFromLeft;
        transition.timingFunction = [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionEaseInEaseOut];
        transition.duration = 0.27;
        [self.previewLayer addAnimation:transition forKey:@"filp"];
        
    }
    
    /**
     开始捕获
     */
    - (void)FF_StartCapture {
        [self.captureSession startRunning];
    }
    
    
    - (void)FF_AcquireNeedDevice:(AVCaptureDevicePosition)position {
        self.captureDevice = nil;
        if (@available(ios 10.0, *)) {
            self.captureDeviceList = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[AVCaptureDeviceTypeBuiltInWideAngleCamera] mediaType:AVMediaTypeVideo position:position].devices;
            for (AVCaptureDevice *device in self.captureDeviceList) {
                if (device.position == position) {
                    self.captureDevice = device;
                    return;
                }
            }
        }else {
            self.captureDeviceList = [AVCaptureDevice devices];
            for (AVCaptureDevice *device in self.captureDeviceList) {
                if (device.position == position) {
                    self.captureDevice = device;
                    return;
                }
            }
        }
    }
    
    /// 获取照片
    - (void)FF_AcquireNeedImage {
        AVCaptureConnection *connection = nil;
        if (@available(ios 10.0, *)) {
            connection = [self.photoOutput connectionWithMediaType:AVMediaTypeAudio];
        }else {
            connection = [self.imageOutput connectionWithMediaType:AVMediaTypeAudio];
        }
        if (!connection) {
            NSLog(@"拍照失败");
        }
        if (@available(ios 10.0, *)) {
            [self.photoOutput capturePhotoWithSettings:[AVCapturePhotoSettings photoSettings] delegate:self];
        }else {
            __block UIImage *img = nil;
            [self.imageOutput captureStillImageAsynchronouslyFromConnection:connection completionHandler:^(CMSampleBufferRef  _Nullable imageDataSampleBuffer, NSError * _Nullable error) {
                if (imageDataSampleBuffer != nil) {
                    NSData *data = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
                    img = [UIImage imageWithData:data];
                }
            }];
            if (self.delegate && [self.delegate respondsToSelector:@selector(FF_CaptureImage:)]) {
                [self.delegate FF_CaptureImage:img];
            }
        }
    }
    
    /**
     ios 11.0 后输出的图片(拍照)
    */
    - (void)captureOutput:(AVCapturePhotoOutput *)output didFinishProcessingPhoto:(AVCapturePhoto *)photo error:(nullable NSError *)error {
        if (error == nil) {
            NSData *data = [photo fileDataRepresentation];
            UIImage *img = [UIImage imageWithData:data];
            if (self.delegate && [self.delegate respondsToSelector:@selector(FF_CaptureImage:)]) {
                [self.delegate FF_CaptureImage:img];
            }
        }
    }
    /**
     ios 10.0 后输出的图片(拍照)
     */
    - (void)captureOutput:(AVCapturePhotoOutput *)output didFinishProcessingPhotoSampleBuffer:(nullable CMSampleBufferRef)photoSampleBuffer previewPhotoSampleBuffer:(nullable CMSampleBufferRef)previewPhotoSampleBuffer resolvedSettings:(AVCaptureResolvedPhotoSettings *)resolvedSettings bracketSettings:(nullable AVCaptureBracketedStillImageSettings *)bracketSettings error:(nullable NSError *)error {
        if (error) {
            NSLog(@"%@", error.localizedDescription);
        }else {
            NSData *data = [AVCapturePhotoOutput JPEGPhotoDataRepresentationForJPEGSampleBuffer:photoSampleBuffer previewPhotoSampleBuffer:previewPhotoSampleBuffer];
            UIImage *img = [UIImage imageWithData:data];
            if (self.delegate && [self.delegate respondsToSelector:@selector(FF_CaptureImage:)]) {
                [self.delegate FF_CaptureImage:img];
            }
        }
    }
    
    /**
     设置曝光和自动对焦 ,有问题
    
     @param point 对焦重点
     */
    - (void)FF_SetupFocusAndWhiteBalance:(CGPoint)point {
        NSError *error = nil;
        [self.captureDevice lockForConfiguration:&error];
        if (error) {
            NSLog(@"%@", error.localizedDescription);
            return;
        }
        /// 设置白平衡
        if ([self.captureDevice isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeAutoWhiteBalance]) {
            self.captureDevice.whiteBalanceMode = AVCaptureWhiteBalanceModeAutoWhiteBalance;
        }
        /// 设置闪光灯
        if (@available(ios 10.0, *)) {
        }else {
            if ([self.captureDevice isFlashModeSupported:AVCaptureFlashModeAuto]) {
                self.captureDevice.flashMode = AVCaptureFlashModeAuto;
            }
        }
        /// 设置焦点
        if ([self.captureDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
            self.captureDevice.focusPointOfInterest = CGPointMake(point.y / FFCameraHelperScreenHeight, 1 - point.x / FFCameraHelperScreenWidth);
            self.captureDevice.focusMode = AVCaptureFocusModeAutoFocus;
        }
        /// 设置曝光
        if ([self.captureDevice isExposureModeSupported:AVCaptureExposureModeAutoExpose]) {
            self.captureDevice.exposurePointOfInterest = CGPointMake(point.y / FFCameraHelperScreenHeight, 1 - point.x / FFCameraHelperScreenWidth);
            self.captureDevice.exposureMode = AVCaptureExposureModeAutoExpose;
        }
        [self.captureDevice unlockForConfiguration];
        /// 对焦动画
    //    self.focusLayer.hidden = NO;
    //    [UIView animateWithDuration:3 animations:^{
    //        UIBezierPath *path = [UIBezierPath bezierPathWithRect:CGRectMake(point.x - 60, point.y - 60, 120, 120)];
    //        self.focusLayer.path = path.CGPath;
    //    } completion:^(BOOL finished) {
    //        UIBezierPath *path = [UIBezierPath bezierPathWithRect:CGRectMake(point.x - 40, point.y - 40, 80, 80)];
    //        self.focusLayer.path = path.CGPath;
    //    }];
    
    //    [UIView animateWithDuration:1 animations:^{
    //        self.focusLayer.transform = CATransform3DMakeScale(1.25, 1.25, 1.0);
    //    } completion:^(BOOL finished) {
    //        [UIView animateWithDuration:1 animations:^{
    //            self.focusLayer.transform = CATransform3DIdentity;
    //            self.focusLayer.hidden = YES;
    //            [self.focusLayer removeFromSuperlayer];
    //        }];
    //    }];
    
        
    }
    
    /**
     对焦时的提示框
    */
    - (void)FF_SetUpFocusLayerWithPoint:(CGPoint)point {
        if (self.focusLayer == nil) {
            self.focusLayer = [CAShapeLayer layer];
            self.focusLayer.strokeColor = [UIColor lightGrayColor].CGColor;
            self.focusLayer.fillColor = [UIColor clearColor].CGColor;
            self.focusLayer.lineDashPhase = 2;
            self.focusLayer.lineWidth = 1;
            self.focusLayer.lineCap = kCALineCapRound;
        }else {
            [self.focusLayer removeFromSuperlayer];
        }
        CGFloat radiu = FFCameraHelperFocusLayerWidth / 2;
        if (point.x - radiu < 0) {
            point.x = radiu;
        }else if (point.x + radiu > FFCameraHelperScreenWidth) {
            point.x = FFCameraHelperScreenWidth - radiu;
        }
        
        if (point.y - radiu < 22) {
            point.y = radiu + 22;
        }else if (point.y + radiu + 200 > FFCameraHelperScreenHeight) {
            point.y = FFCameraHelperScreenHeight - 200 - radiu;
        }
        
        UIBezierPath *path = [UIBezierPath bezierPathWithRect:CGRectMake(point.x - radiu, point.y - radiu, radiu * 2 , radiu * 2)];
        self.focusLayer.path = path.CGPath;
        [self.previewLayer addSublayer:self.focusLayer];
        [self FF_SetupFocusAndWhiteBalance:point];
    }
    
    @end
  • 相关阅读:
    sql server中使用链接服务器访问oracle数据库
    biztalk中使用信封(Envelope)消息
    EMS SQL Manager 2007 for MySQL发布
    MySQL Connector/NET
    Silverlight相关资源
    ADO.NET嵌套数据绑定
    收到网上订得书了,开始充电...
    几个.net下的ajax框架
    Visual Studio 2008 Beta 2 初步体验
    .Net Remoting常用部署结构
  • 原文地址:https://www.cnblogs.com/jisa/p/9506336.html
Copyright © 2011-2022 走看看