zoukankan      html  css  js  c++  java
  • 将摄像头原始RGB数据流编码成H.264文件

    查阅了很多资料,都是将YUV文件编码成H.264视频,几乎没有找到用摄像头数据直接存储为H.264文件的。

    以下是我的实现方法,借鉴了网上的一些做法,整合而成。需要先安装ffshow。

    extern "C"
    {
    #include "avcodec.h"
    #include "avformat.h"
    #include "avio.h"
    #include "avutil.h"
    #include "common.h"
    #include "intfloat_readwrite.h"
    #include "inttypes.h"
    #include "log.h"
    #include "mathematics.h"
    #include "mem.h"
    #include "rational.h"
    #include "stdint.h"
    #include "swscale.h"
    }
    
    #pragma comment(lib,"avcodec.lib")
    #pragma comment(lib,"avformat.lib")
    #pragma comment(lib,"avutil.lib")
    #define RGBTOYUV(B, G, R, Y, U, V) \
    Y = ( ( 66 * R + 129 * G + 25 * B + 128) >> 8) + 16;\
    U = ( ( -38 * R - 74 * G + 112 * B + 128) >> 8) + 128;\
    V = ( ( 112 * R - 94 * G - 18 * B + 128) >> 8) + 128;
    
    size_t BufSize = 9437184;
    BYTE *pStream = new BYTE[BufSize];
    BYTE* pBmp24;
    int CALLBACK SnapThreadCallback(BYTE *pBuffer) //摄像头显示回调函数
    {
    
    pBmp24 = CameraISP(pBuffer);
    if(pBmp24)
    {
    if (bVideo)
    {
    memcpy(pStream, pBmp24, BufSize);
    }
    CameraDisplayRGB24(pBmp24);
    }
    return TRUE;
    }
    
    void CXXDlg::OnBtnVideo() 
    {
        // TODO: Add your control notification handler code here
        //CWinThread *pVedioStart;
        if (m_VideoMode != VIDEOMODE_PLAY)
        {
            GetDlgItem(IDC_BTN_VIDEO)->SetWindowText("停止录像");    
            AfxBeginThread((AFX_THREADPROC)VIDEO, this, THREAD_PRIORITY_ABOVE_NORMAL, 0, NULL);
            m_VideoMode = VIDEOMODE_PLAY;        
        }
        else
        {
            m_VideoMode = VIDEOMODE_STOP;
            GetDlgItem(IDC_BTN_VIDEO)->SetWindowText("开始录像");        
        }
    }
    
    UINT VIDEO(LPVOID pParam)
    {
        CXXDlg * dlg = (CXXDlg *)pParam;
        BYTE* yuv = new BYTE[bufzise * 3 / 2];
        CString svideofile; 
        CTime time = CTime::GetCurrentTime();
        svideofile.Format("%02d%02d%02d%02d%02d", time.GetMonth(), time.GetDay(), 
            time.GetHour(), time.GetMinute(), time.GetSecond());
        CFile file(svideofile, CFile::modeWrite|CFile::modeCreate);
        int m_frame_count = 0;
        Sleep(100);
        bVideo = TRUE;
        while(dlg->m_VideoMode != VIDEOMODE_STOP)
        {
            if (StreamOK)
            {
                dlg->RGB2YUV(&pStream, &yuv, 3, ImgW, ImgH);
                memset(lpCurrentImage, 0, bufzise * 3);
                file.SeekToEnd();
                file.Write(yuv, bufzise*3/2);
                m_frame_count++;
            }        
        }
        file.Close();
        dlg->SaveToH264(svideofile, ImgW, ImgH, m_frame_count);
        AfxMessageBox("录像已保存!");
        
        return 0;
    }
    
    void CXXDlg::RGB2YUV(BYTE** rgb, BYTE** yuv, unsigned rgbIncrement, int nWidth, int nHeight)
    {
        unsigned width    = nWidth;  // 图像宽度
        unsigned height    = nHeight; // 图像高度    
        const unsigned planeSize = width*height;
        const unsigned halfWidth = width >> 1;
        unsigned count = 0;
    
        BYTE * yplane  = *yuv;
        BYTE * uplane  = *yuv + planeSize;
        BYTE * vplane  = *yuv + planeSize + (planeSize >> 2);
        BYTE * rgbIndex = *rgb;
        
        for (unsigned y = 0; y < height; y++) 
        {
            BYTE * yline  = yplane + (y * width);
            BYTE * uline  = uplane + ((y >> 1) * halfWidth);
            BYTE * vline  = vplane + ((y >> 1) * halfWidth);
                    
            rgbIndex = *rgb + (width*(height-1-y)*rgbIncrement); //将图像翻转
    
            for (unsigned x = 0; x < width; x+=2) 
            {
                RGBTOYUV(rgbIndex[0], rgbIndex[1], rgbIndex[2], *yline, *uline, *vline);            
                rgbIndex += rgbIncrement; count++;
                yline++;
                RGBTOYUV(rgbIndex[0], rgbIndex[1], rgbIndex[2],*yline, *uline, *vline);
                rgbIndex += rgbIncrement; count++;
                yline++;
                uline++;
                vline++;
            }
        }
    }
    
    BOOL CXXDlg::SaveToH264 (CString YUVfile, int vWidth, int vHeight, int count)
    {
        AVFormatContext* oc;
        AVOutputFormat* fmt;
        AVStream* video_st;
        double video_pts;
        uint8_t* video_outbuf;
        uint8_t* picture_buf;
        AVFrame* picture;
        int size;
        int ret;
        int video_outbuf_size;    
        CString s;
        s = YUVfile + ".avi";
        const char *videofile = s.GetBuffer(0);
        s.ReleaseBuffer();
        FILE *fin = fopen(YUVfile, "rb"); 
           CFile::modeWrite|CFile::modeCreate);
    
        av_register_all();
    
        fmt = guess_format(NULL, videofile, NULL);
        oc = av_alloc_format_context();
        oc->oformat = fmt;
        snprintf(oc->filename, sizeof(oc->filename), "%s", videofile);
    
        video_st = NULL;
        if (fmt->video_codec != CODEC_ID_NONE)
        {
            AVCodecContext* c;
            video_st = av_new_stream(oc, 0);
            c = video_st->codec;
            c->codec_id = fmt->video_codec;
            c->codec_type = CODEC_TYPE_VIDEO;
            c->bit_rate = 400000;
            c->width = vWidth;
            c->height = vHeight;
            c->time_base.num = 1;
            c->time_base.den = 10;
            c->gop_size = 12;
            c->pix_fmt = PIX_FMT_YUV420P;
            if (c->codec_id == CODEC_ID_MPEG2VIDEO)
            {
                c->max_b_frames = 2;
            }
            if (c->codec_id == CODEC_ID_MPEG1VIDEO)
            {
                c->mb_decision = 2;
            }
            if (!strcmp(oc->oformat->name, "mp4") || !strcmp(oc->oformat->name, "mov") || !strcmp(oc->oformat->name, "3gp"))
            {
                c->flags |= CODEC_FLAG_GLOBAL_HEADER;
            }
        }
    
        if (av_set_parameters(oc, NULL)<0)
        {
            return FALSE;
        }
        
        dump_format(oc, 0, videofile, 1);
        if (video_st)
        {
            AVCodecContext* c;
            AVCodec* codec;
            c = video_st->codec;
            codec = avcodec_find_encoder(c->codec_id);
            if (!codec)
            {
                return FALSE;
            }
            if (avcodec_open(c, codec) < 0)
            {
                return FALSE;
            }
            if (!(oc->oformat->flags & AVFMT_RAWPICTURE))
            {
                video_outbuf_size = 200000;
                video_outbuf = (uint8_t*)av_malloc(video_outbuf_size);
            }
            picture = avcodec_alloc_frame();
            size = avpicture_get_size(c->pix_fmt, c->width, c->height);
            picture_buf = (uint8_t*)av_malloc(size);
            if (!picture_buf)
            {
                av_free(picture);
            }
            avpicture_fill((AVPicture*)picture, picture_buf, c->pix_fmt, c->width, c->height);
        }
    
        if (!(fmt->flags & AVFMT_NOFILE))
        {
            if (url_fopen(&oc->pb, videofile, URL_WRONLY) < 0)
            {
                return FALSE;
            }
        }
        av_write_header(oc);
    
        for (int i=0; i<count; i++)
        {
            if (savedlg)
            {
                savedlg->PostMessage(WM_COUNT, NULL, (LPARAM)i);
            }
        
            if (video_st)
            {
                video_pts = (double)(video_st->pts.val * video_st->time_base.num / video_st->time_base.den);
            }
            else
            {
                video_pts = 0.0;
            }
            if (!video_st/* || video_pts >= 5.0*/)
            {
                break;
            }
            AVCodecContext* c;
            c = video_st->codec;
            size = c->width * c->height;
    
            if (fread(picture_buf, 1, size*3/2, fin) < 0)
            {
                break;
            }
            
            picture->data[0] = picture_buf;  // 亮度
            picture->data[1] = picture_buf+ size;  // 色度 
            picture->data[2] = picture_buf+ size*5/4; // 色度 
    
            if (oc->oformat->flags & AVFMT_RAWPICTURE)
            {
                AVPacket pkt;
                av_init_packet(&pkt);
                pkt.flags |= PKT_FLAG_KEY;
                pkt.stream_index = video_st->index;
                pkt.data = (uint8_t*)picture;
                pkt.size = sizeof(AVPicture);
                ret = av_write_frame(oc, &pkt);
            }
            else
            {
                int out_size = avcodec_encode_video(c, video_outbuf, video_outbuf_size, picture);
                if (out_size > 0)
                {
                    AVPacket pkt;
                    av_init_packet(&pkt);
                    pkt.pts = av_rescale_q(c->coded_frame->pts, c->time_base, video_st->time_base);
                    if (c->coded_frame->key_frame)
                    {
                        pkt.flags |= PKT_FLAG_KEY;
                    }
                    pkt.stream_index = video_st->index;
                    pkt.data = video_outbuf;
                    pkt.size = out_size;
            
                    ret = av_write_frame(oc, &pkt);
                }
            }
        }
    
        if (video_st)
        {
            avcodec_close(video_st->codec);
    
            av_free(picture);
            av_free(video_outbuf);
    
        }
        av_write_trailer(oc);
        for (int j=0; j<oc->nb_streams; j++)
        {
            av_freep(&oc->streams[j]->codec);
            av_freep(&oc->streams[j]);
        }
        if (!(fmt->flags & AVFMT_NOFILE))
        {
            url_fclose(oc->pb);
        }
        av_free(oc);
        fclose(fin);
        DeleteFile(YUVfile);
        //AfxMessageBox("视频已保存!");
        
        return TRUE;
    
    }

    第一次发博,代码可能不够详细,希望与网友们一起学习!

  • 相关阅读:
    DHCP配置实例
    upupw phpmyadmin写shell
    网络配置课学期总结
    c#写一个网站后台扫描器
    移位运算符
    JavaScript 事件
    JS自动爆炸案例
    生成树协议
    暴力操作节点
    为博客园添加统计访问量的工具
  • 原文地址:https://www.cnblogs.com/youfal/p/2665900.html
Copyright © 2011-2022 走看看