zoukankan      html  css  js  c++  java
  • qt for ios扫描二维码功能实现

    问题:  

    公司项目考虑到跨平台一直都是用qt做,由于项目需求,项目上要增加一个二维码扫描功能,在安卓可以用QVideoProbe实现抓取摄像头视频帧,用QZxing解码图片,从而实现二维码扫描,但是在ios上,QVideProbe并不支持,所以只好选择其他抓取视频帧的方法,考虑使用OPencv实现抓取视频帧,但是在查看ios文档时,ios7 以上直接支持二维码扫描功能,所以放弃使用opencv抓取 + zxing解码的方法.从而采取ios官方提供的二维码解码功能.

    实现:

    由于我们项目ui一直是用qml实现,但是要实现扫描二维码功能,需要调用AVFoundation中的方法,同时要显示ios中的ui显示摄像头及返回qml 键.所以这里需要结合oc 和 qt编程.

    直接上代码:

    pro文件增加

    ios {

      OBJECTIVE_SOURCES += IOSView.mm    # object c++ file

           IOSCamera.mm 

      HEADER +=  IOSView.h

          IOSCamera.h

          IOSCameraViewProtocol.h 

      QMAKE_LFLAGS += -framework AVFoundation  #add AVfoundation framework

      QT += gui private

    }

    重新qmake生成xcode project

    IOSView.#include <QQuickItem>

    class IOSView : public QQuickItem
    {
        Q_OBJECT
        Q_PROPERTY(QString qrcodeText READ qrcodeText WRITE setQrcodeText NOTIFY qrcodeTextChanged)
            
    public:
        explicit IOSView(QQuickItem *parent = 0);
            
        QString qrcodeText() {
           return m_qrcodeText;
        }
        
        void setQrcodeText(QString text){
            m_qrcodeText = text;
            emit qrcodeTextChanged();
        }
            
       QString m_qrcodeText;
            
            
    public slots:
        void startScan();  //for open ios camera scan and ui
            
    private:
        void *m_delegate;  //for communication with qt
            
    signals:
        void qrcodeTextChanged();  
        void stopCameraScan();  //show qml
    };

    IOSView..mm

    #include <UIKit/UIKit.h>
    #include <QtQuick>
    #include <QtGui>
    #include <QtGui/qpa/qplatformnativeinterface.h>
    #include "IOSView.h"
    #include "IOSCamera.h"
    
    @interface IOSCameraDelegate : NSObject <IOSCameraProtocol> {
        IOSView *m_iosView;
    }
    @end
    
    @implementation IOSCameraDelegate
    
    - (id) initWithIOSCamera:(IOSView *)iosView
    {
        self = [super init];
        if (self) {
            m_iosView = iosView;
        }
        return self;
    }
    
    -(void) scanCancel{
        emit m_iosView->stopCameraScan();
    }
    
    -(void) scanResult :(NSString *) result{
        m_iosView->setQrcodeText(QString::fromNSString(result));
    }
    
    @end
    
    IOSView::IOSView(QQuickItem *parent) :
        QQuickItem(parent), m_delegate([[IOSCameraDelegate alloc] initWithIOSCamera:this])
    {
    }
        
    void IOSView::startScan()
    {
        // Get the UIView that backs our QQuickWindow:
        UIView *view = static_cast<UIView *>(QGuiApplication::platformNativeInterface()->nativeResourceForWindow("uiview", window()));
        UIViewController *qtController = [[view window] rootViewController];
    
        IOSCamera *iosCamera = [[[IOSCameraView alloc] init ]autorelease];
        iosCamera.delegate = (id)m_delegate;
        // Tell the imagecontroller to animate on top:
        [qtController presentViewController:iosCamera animated:YES completion:nil];
        [iosCamera startScan];
    }
    
    

      

    IOSCameraViewProtocol.h

    #import <Foundation/Foundation.h>
    
    @protocol CameraScanViewProtocol <NSObject>
    
    @required
    -(void) scanCancel;
    -(void) scanResult :(NSString *) result;
    
    
    @end
    

      

    IOSCamera.h

    #import <UIKit/UIKit.h>
    #import <AVFoundation/AVFoundation.h>
    #import "CameraViewProtocol.h"
    
    @interface IOSCamera : UIViewController <AVCaptureMetadataOutputObjectsDelegate>{
        id<CameraScanViewProtocol> delegate;
    }
    @property (retain, nonatomic) IBOutlet UIView *viewPreview;
    - (IBAction)backQtApp:(id)sender;
    
    -(void) startScan;
    
    @property (retain) id<CameraScanViewProtocol> delegate;
    
    @end
    

      

    IOSCamera.cpp#import "IOSCamera.h"

    
    @interface IOSCamera ()
    @property (nonatomic,strong) AVCaptureSession * captureSession;
    @property (nonatomic,strong) AVCaptureVideoPreviewLayer * videoPreviewLayer;-(BOOL) startReading;
    -(void) stopReading;
    -(void) openQtLayer;
    @end
    
    @implementation CameraScanView
    @synthesize delegate;    //Sync delegate for interactive with qt
    
    - (void)viewDidLoad {
        [super viewDidLoad];
        // Do any additional setup after loading the view from its nib.
        
        // Initially make the captureSession object nil.
        _captureSession = nil;
        
        // Set the initial value of the flag to NO.
        _isReading = NO;
        
        // Begin loading the sound effect so to have it ready for playback when it's needed.
        [self loadBeepSound];
    } - (void)didReceiveMemoryWarning { [super didReceiveMemoryWarning]; // Dispose of any resources that can be recreated. } /* #pragma mark - Navigation - (void)dealloc { [_viewPreview release]; [super dealloc]; } - (void)viewDidUnload { [self setViewPreview:nil]; [super viewDidUnload]; }
    - (IBAction)backQtApp:(id)sender {    [delegate scanCancel];
        [self stopReading];
    }

    -(void) openQtLayer{
        // Bring back Qt's view controller:
        UIViewController *rvc = [[[UIApplication sharedApplication] keyWindow] rootViewController];
        [rvc dismissViewControllerAnimated:YES completion:nil];
    }

    -(void) startScan{
        if (!_isReading) {
            // This is the case where the app should read a QR code when the start button is tapped.
            if ([self startReading]) {
                // If the startReading methods returns YES and the capture session is successfully
                // running, then change the start button title and the status message.
                NSLog(@"Start Reading !!");
            }
        }
    }

    #pragma mark - Private method implementation

    - (BOOL)startReading {
        NSError *error;
        
        // Get an instance of the AVCaptureDevice class to initialize a device object and provide the video
        // as the media type parameter.
        AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
        
        // Get an instance of the AVCaptureDeviceInput class using the previous device object.
        AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
        
        if (!input) {
            // If any error occurs, simply log the description of it and don't continue any more.
            NSLog(@"%@", [error localizedDescription]);
            return NO;
        }
        
        // Initialize the captureSession object.
        _captureSession = [[AVCaptureSession alloc] init];
        // Set the input device on the capture session.
        [_captureSession addInput:input];
        
        
        // Initialize a AVCaptureMetadataOutput object and set it as the output device to the capture session.
        AVCaptureMetadataOutput *captureMetadataOutput = [[AVCaptureMetadataOutput alloc] init];
        [_captureSession addOutput:captureMetadataOutput];
        
        // Create a new serial dispatch queue.
        dispatch_queue_t dispatchQueue;
        dispatchQueue = dispatch_queue_create("myQueue", NULL);
        [captureMetadataOutput setMetadataObjectsDelegate:self queue:dispatchQueue];
        [captureMetadataOutput setMetadataObjectTypes:[NSArray arrayWithObject:AVMetadataObjectTypeQRCode]];
        
        // Initialize the video preview layer and add it as a sublayer to the viewPreview view's layer.
        _videoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_captureSession];
        [_videoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
        [_videoPreviewLayer setFrame:_viewPreview.layer.bounds];
        [_viewPreview.layer addSublayer:_videoPreviewLayer];
        
        
        // Start video capture.
        [_captureSession startRunning];
        _isReading = YES;
        return YES;
    }


    -(void)stopReading{
        // Stop video capture and make the capture session object nil.
        [_captureSession stopRunning];
        _captureSession = nil;
       
       // Remove the video preview layer from the viewPreview view's layer.
       [_videoPreviewLayer removeFromSuperlayer];
       _isReading = NO;
       [self openQtLayer];
    }

    #pragma mark - AVCaptureMetadataOutputObjectsDelegate method implementation

    -(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection{
        
        // Check if the metadataObjects array is not nil and it contains at least one object.
        if (metadataObjects != nil && [metadataObjects count] > 0) {
            // Get the metadata object.
            AVMetadataMachineReadableCodeObject *metadataObj = [metadataObjects objectAtIndex:0];
            if ([[metadataObj type] isEqualToString:AVMetadataObjectTypeQRCode]) {
                // If the found metadata is equal to the QR code metadata then update the status label's text,
                // stop reading and change the bar button item's title and the flag's value.
                // Everything is done on the main thread.
                [delegate scanResult:[metadataObj stringValue]];  //send scan result to qt show
                
                [self performSelectorOnMainThread:@selector(stopReading) withObject:nil waitUntilDone:NO];
                
                _isReading = NO;
            }
        }
    }

    @end
     

    OK.大概流程就这些了,添加xib文件等就不介绍了.

  • 相关阅读:
    What is a .Net Assembly?
    Reading Assembly attributes in VB.NET
    Debugging With Visual Studio 2005
    The Rules for GetHashCode
    The article discusses a couple of new features introduced for assemblies and versioning in Visual Studio 2005.
    Getting a copy of a DLL in the GAC
    Modeling SingleNeuron Dynamics and Computations: A Balance of Detail and Abstraction
    从数学到密码学(八)
    从数学到密码学(十)
    从数学到密码学(三)
  • 原文地址:https://www.cnblogs.com/fuyanwen/p/4428599.html
Copyright © 2011-2022 走看看