zoukankan      html  css  js  c++  java
  • 神经网络识别车牌字符

    Ronny大神曾在OpenCV进阶之路:神经网络识别车牌字符写过有关用神经网络识别车牌字符的文章,但无奈仅仅给出了样本,没有详细的代码,在这里补足这个遗憾。

    opencv环境请自行配置。因为用到了MFC类库查找文件夹下的文件夹,请选择使用共享的MFC类库。

    下面是代码:
    #include <afx.h>
    const char*mlpmodel="ann.xml";
    //中国车牌
    const char strCharacters[] = {'0','1','2','3','4','5',
    '6','7','8','9','A','B', 'C', 'D', 'E','F', 'G', 'H', /* 没有I */
    'J', 'K', 'L', 'M', 'N', /* 没有O */ 'P', 'Q', 'R', 'S', 'T',
    'U','V', 'W', 'X', 'Y', 'Z'};
    void calcGradientFeat(const Mat& imgSrc, vector<float>& feat) 

       float sumMatValue(const Mat& image); // 计算图像中像素灰度值总和 
       
       Mat image; 
       cvtColor(imgSrc,image,CV_BGR2GRAY); 
       resize(image,image,Size(8,16)); 
       
       // 计算x方向和y方向上的滤波 
       float mask[3][3] = { { 1, 2, 1 }, { 0, 0, 0 }, { -1, -2, -1 } };


       Mat y_mask = Mat(3, 3, CV_32F, mask) / 8; 
       Mat x_mask = y_mask.t(); // 转置 
       Mat sobelX, sobelY;


       filter2D(image, sobelX, CV_32F, x_mask); 
       filter2D(image, sobelY, CV_32F, y_mask);


       sobelX = abs(sobelX); 
       sobelY = abs(sobelY);


       float totleValueX = sumMatValue(sobelX); 
       float totleValueY = sumMatValue(sobelY);


       // 将图像划分为4*2共8个格子。计算每一个格子里灰度值总和的百分比 
       for (int i = 0; i < image.rows; i = i + 4) 
       { 
           for (int j = 0; j < image.cols; j = j + 4) 
           { 
               Mat subImageX = sobelX(Rect(j, i, 4, 4)); 
               feat.push_back(sumMatValue(subImageX) / totleValueX); 
               Mat subImageY= sobelY(Rect(j, i, 4, 4)); 
               feat.push_back(sumMatValue(subImageY) / totleValueY); 
           } 
       } 
    }
     
    float sumMatValue(const Mat& image) 
     { 
         float sumValue = 0; 
         int r = image.rows; 
         int c = image.cols; 
         if (image.isContinuous()) 
         { 
             c = r*c; 
             r = 1;    
         } 
         for (int i = 0; i < r; i++) 
         { 
             const uchar* linePtr = image.ptr<uchar>(i); 
             for (int j = 0; j < c; j++) 
             { 
                 sumValue += linePtr[j]; 
             } 
         } 
         return sumValue; 
     }


    void getFileFromDir(const char *directorypath,vector<string> &vfiles)
    {
    CFileFind finder;
    string ext2find=directorypath;
    ext2find+="\*.png";
    bool bResult=finder.FindFile(ext2find.c_str());
    if(!bResult)
    {
    return ;
    }
    while (bResult)
    {
    bResult = finder.FindNextFile();
    if (finder.IsDots()||finder.IsDirectory()) 
    continue;
    string str(finder.GetFilePath().GetBuffer(finder.GetFilePath().GetLength()));
    vfiles.push_back(str);
    }
    }


    void readSample(const char *directorypath,Mat &samples,Mat &labels)
    {
    cout<<"start reading characters:"<<endl; 
    for(int i=0;i<sizeof(strCharacters);i++)
    {
    // cout<<"reading:"<<strCharacters[i]<<endl;
    string subdir=directorypath;
    subdir=subdir+"\"+strCharacters[i];
    vector<string> vfiles;
    getFileFromDir(subdir.c_str(),vfiles);
    int stotal=0;
    for(vector<string>::iterator it=vfiles.begin();it!=vfiles.end();it++)
    {
      Mat img=imread(*it);
    Mat m(img.rows,img.cols,CV_32FC1);
    for(int j=0;j<img.rows;j++)
    for(int k=0;k<img.cols;k++)
    m.at<float>(j,k)=img.at<uchar>(j,k);
    resize(m,m,Size(4,8));
    m=m.reshape(1,1);
    normalize(m,m);
    samples.push_back(m);
    Mat fl=Mat::zeros(1,34,CV_32FC1);
    fl.at<float>(0,i)=1;
    labels.push_back(fl);
    }
    }
    cout<<"good,reading characters finished!"<<endl;
    }
    void MLPTrain(Mat &train,Mat &trainLabel)
    {
    CvANN_MLP NeuralNetworks;
    std::vector<int> LayerSizes;
    LayerSizes.push_back(train.cols);    // input layer
    LayerSizes.push_back(train.cols+trainLabel.cols);// hidden layer has neurons
    LayerSizes.push_back(trainLabel.cols);   // output layer
    // Activate function
    int ActivateFunc = CvANN_MLP::SIGMOID_SYM;
    double Alpha = 1;
    double Beta = 1;
    // create the network
    NeuralNetworks.create(cv::Mat(LayerSizes), ActivateFunc, Alpha, Beta);
    // Training Params
    CvANN_MLP_TrainParams TrainParams;
    TrainParams.train_method = CvANN_MLP_TrainParams::BACKPROP;
    TrainParams.bp_dw_scale = 0.0001;
    TrainParams.bp_moment_scale = 0;


    // iteration number
    CvTermCriteria TermCrlt;
    TermCrlt.type = CV_TERMCRIT_ITER | CV_TERMCRIT_EPS;
    TermCrlt.epsilon = 0.0001f;
    TermCrlt.max_iter = 1000;
    TrainParams.term_crit = TermCrlt;
    // Training the networks
    cout<<"starting mlp training"<<endl;
    NeuralNetworks.train(train,trainLabel, cv::Mat(), cv::Mat(), TrainParams);
    NeuralNetworks.save(mlpmodel);
    cout<<"mlp train finished"<<endl;
    }


    void MLPTest(Mat &test,Mat &testLabel)
    {
    CvANN_MLP NeuralNetworks;
    NeuralNetworks.load(mlpmodel);
    int total=0;
    int right=0,error=0;
    while((bool)(total<test.rows))
    {
    Mat m=test.row(total);
    Mat nearest(1, 10, CV_32FC1, Scalar(0));
    NeuralNetworks.predict(m, nearest);
    //char label=testLabel.at<char>(total);
    Point maxLoc;
    minMaxLoc(nearest, NULL, NULL, NULL, &maxLoc);
    char ret=maxLoc.x;
    //char label=testLabel.at<float>(total,0);
    char label=0;
    for(int i=0;i<34;i++)
    {
    if(testLabel.at<float>(total,i)==1)
    {
    label=i;
    break;
    }
    }
    if(ret==label)
    right++;
    else
    error++;
    total++;
    }


    cout<<"precision"<<right*1.0/total<<endl;
    }
    int main()
    {
    Mat Sameples,Labels;
    readSample("charSamples",Sameples,Labels);
    // MLPTrain(Sameples,Labels);
    MLPTest(Sameples,Labels);
    return 0;
    }

  • 相关阅读:
    一点小小的心得
    JavaScript的跳转脚本举例
    如何在Windows Server 2008 R2上开启Windows Power Shell ISE
    如何设置密码输错N次后自动锁住账户?
    IE的local intranet zone里有个URL删不掉, 怎么办?
    理解cookie机制
    ajaxpro.2.dll 简单应用
    cookie概述
    PetShop是如何兼容数据库的.NET教程,数据库应用
    .Net PetShop 4.0的分布式数据库设计.NET教程,.NET Framework
  • 原文地址:https://www.cnblogs.com/bhlsheji/p/5396918.html
Copyright © 2011-2022 走看看