zoukankan      html  css  js  c++  java
  • 获取音视频输入

    // 捕获音视频
    - (void)setupCaputureVideo
    {
        // 1.创建捕获会话,必须要强引用,否则会被释放
        /*
         协调输入与输出之间传输数据,
         系统作用:可以操作硬件设备
         工作原理:让app与系统之间产生一个捕捉会话,相当于app和硬件设备之间有联系,我们只需要把硬件输入对象和输出对象添加到绘画中,会话就会自动把硬件输入对象和输出产生连接,这样硬件输入和输出设备就能传输音视频数据。
         现实生活场景:租客(输入钱)中介(会话)房东(输出房)中介会房租客和房东之间产生联系,
         */
        AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
        _captureSession = captureSession;
        
        // 2.获取AVCaptureDevicel 录像设备(摄像头)录音设备(麦克风)注意不具备输入数据功能,只是用来调节硬件设备的配置
        // 2.1 获取摄像头设备,默认是后置摄像头
        AVCaptureDevice *videoDevice = [self getVideoDevice:AVCaptureDevicePositionFront];
        // 2.2 获取声音设备
        AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
        
        // 3 根据音频/视频硬件设备(AVCaptureDevice)创建音频/视频硬件输入数据对象(AVCaptureDeviceInput),专门管理数据输入。
        // 3.1 创建对应视频设备输入对象
        AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:nil];
        _currentVideoDeviceInput = videoDeviceInput;
        // 3.2 创建对应音频设备输入对象
        AVCaptureDeviceInput *audioDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:nil];
        
        // 4.添加到会话中
        // 注意“最好要判断是否能添加输入,会话不能添加空的
        // 4.1 添加视频
        if ([captureSession canAddInput:videoDeviceInput]) {
            [captureSession addInput:videoDeviceInput];
        }
        // 4.2 添加音频
        if ([captureSession canAddInput:audioDeviceInput]) {
            [captureSession addInput:audioDeviceInput];
        }
        
        // 5.获取视频数据输出设备,并设置样品缓存代理就可以通过它拿到采集到的视频数据
        AVCaptureVideoDataOutput *videoOutput = [[AVCaptureVideoDataOutput alloc] init];
        // 5.1 设置代理,捕获视频样品数据
        // 注意:队列必须是串行队列,才能获取到数据,而且不能为空
        dispatch_queue_t videoQueue = dispatch_queue_create("Video Capture Queue", DISPATCH_QUEUE_SERIAL);
        [videoOutput setSampleBufferDelegate:self queue:videoQueue];
        if ([captureSession canAddOutput:videoOutput]) {
            [captureSession addOutput:videoOutput];
        }
        
        // 6.获取音频数据输出设备,并设置样品缓存代理就可以通过它拿到采集到的音频数据
        AVCaptureAudioDataOutput *audioOutput = [[AVCaptureAudioDataOutput alloc] init];
        // 6.1 设置代理,捕获视频样品数据
        // 注意:队列必须是串行队列,才能获取到数据,而且不能为空
        dispatch_queue_t audioQueue = dispatch_queue_create("Audio Capture Queue", DISPATCH_QUEUE_SERIAL);
        [audioOutput setSampleBufferDelegate:self queue:audioQueue];
        if ([captureSession canAddOutput:audioOutput]) {
            [captureSession addOutput:audioOutput];
        }
        
        /*
         将数据输入对象AVCaptureDeviceInput,数据输出对象AVCaptureOutput添加到媒体会话管理对象AVCaptureSession中,就会自动让音频输入与输出和视频输入与输出产生连接。
         */
        
        // 9.获取视频输入与输出连接,用于分辨音视频数据
        _videoConnection = [videoOutput connectionWithMediaType:AVMediaTypeVideo];
        
        // 10.添加视频预览图层
        AVCaptureVideoPreviewLayer *previedLayer = [AVCaptureVideoPreviewLayer layerWithSession:captureSession];
        previedLayer.frame = [UIScreen mainScreen].bounds;
        [self.view.layer insertSublayer:previedLayer atIndex:0];
        _previedLayer = previedLayer;
        
        // 11.启动会话,只有开启才会开始输入到输出数据流传输
        [captureSession startRunning];
    }
    
    // 指定摄像头方向获取摄像头
    - (AVCaptureDevice *)getVideoDevice:(AVCaptureDevicePosition)position
    {
        NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
        for (AVCaptureDevice *device in devices) {
            if (device.position == position) {
                return device;
            }
        }
        return nil;
    }
    
    //  1.切换摄像头
    - (IBAction)changeVideoPosition:(UIButton *)sender {
        
        CATransition *animation = [CATransition animation];
        animation.duration = .5f;
        animation.timingFunction = [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionEaseInEaseOut];
        animation.type = @"cube";
        
        //  1.获取当前视频设备输入对象
        AVCaptureDevicePosition curPosition = _currentVideoDeviceInput.device.position;
        
        //  2.获取需要改变的的方向,判断当前视频设备是前置还是后置
        AVCaptureDevicePosition togglePosition = curPosition == AVCaptureDevicePositionBack ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack;
        
        //  3.获取改变的摄像头设备
        AVCaptureDevice *toggleDevice = [self getVideoDevice:togglePosition];
        
        //  4.获取改变的摄像头的输入设备
        AVCaptureDeviceInput *toggleDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:toggleDevice error:nil];
        
        //  5.移除之前摄像头的输入设备
        [_captureSession removeInput:_currentVideoDeviceInput];
        
        //  6.添加新的摄像头输入设备
        [_captureSession addInput:toggleDeviceInput];
        
        //  7.记录当前摄像头输入设备
        _currentVideoDeviceInput = toggleDeviceInput;
        
        //  添加动画
        [_previedLayer addAnimation:animation forKey:nil];
    }
    
    //  2.聚焦光标
    //  点击屏幕,出现聚焦视图
    -(void)touchesBegan:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event{
        //  获取点击的位置
        UITouch *touch = [touches anyObject];
        CGPoint point = [touch locationInView:self.view];
        NSLog(@"%f---%f",point.x,point.y);
        //  把当前位置转换为摄像头点上的位置
        CGPoint cameraPoint = [_previedLayer captureDevicePointOfInterestForPoint:point];
        
        //  设置聚焦点光标的位置
        [self setFocusCursorWithPoint:point];
        
        //  设置聚焦
        // 设置聚焦
        [self focusWithMode:AVCaptureFocusModeAutoFocus exposureMode:AVCaptureExposureModeAutoExpose atPoint:cameraPoint];
    }
    
    /**
     *  设置聚焦
     */
    -(void)focusWithMode:(AVCaptureFocusMode)focusMode exposureMode:(AVCaptureExposureMode)exposureMode atPoint:(CGPoint)point{
        
        AVCaptureDevice *captureDevice = _currentVideoDeviceInput.device;
        // 锁定配置
        [captureDevice lockForConfiguration:nil];
        
        // 设置聚焦
        if ([captureDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus]) {
            [captureDevice setFocusMode:AVCaptureFocusModeAutoFocus];
        }
        if ([captureDevice isFocusPointOfInterestSupported]) {
            [captureDevice setFocusPointOfInterest:point];
        }
        
        // 设置曝光
        if ([captureDevice isExposureModeSupported:AVCaptureExposureModeAutoExpose]) {
            [captureDevice setExposureMode:AVCaptureExposureModeAutoExpose];
        }
        if ([captureDevice isExposurePointOfInterestSupported]) {
            [captureDevice setExposurePointOfInterest:point];
        }
        
        // 解锁配置
        [captureDevice unlockForConfiguration];
    }
    
    
    /**
     *  设置聚焦光标位置
     *
     *  @param point 光标位置
     */
    -(void)setFocusCursorWithPoint:(CGPoint)point{
        self.testView.center=point;
        self.testView.transform=CGAffineTransformMakeScale(1.5, 1.5);
        self.testView.alpha=1.0;
        [UIView animateWithDuration:1.0 animations:^{
            self.testView.transform=CGAffineTransformIdentity;
        } completion:^(BOOL finished) {
            self.testView.alpha=0;
        }];
    }
    
    
    
    #pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate
    // 获取输入设备数据,有可能是音频有可能是视频
    - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
    {
        if (_videoConnection == connection) {
            NSLog(@"采集到视频数据");
        } else {
            NSLog(@"采集到音频数据");
        }
    }
    

      

  • 相关阅读:
    基于 RocketMQ Prometheus Exporter 打造定制化 DevOps 平台
    RocketMQ-Console安装及RocketMQ命令行管理工具介绍
    RocketMQ之一:RocketMQ整体介绍
    Prometheus 监控之 zookeeper
    详解MySQL数据类型
    Linux2:Linux目录结构
    再谈AbstractQueuedSynchronizer3:基于AbstractQueuedSynchronizer的并发类实现
    再谈AbstractQueuedSynchronizer2:共享模式与基于Condition的等待/通知机制实现
    Java虚拟机15:再谈四种引用状态
    再谈AbstractQueuedSynchronizer1:独占模式
  • 原文地址:https://www.cnblogs.com/chenjiangxiaoyu/p/7289824.html
Copyright © 2011-2022 走看看