zoukankan      html  css  js  c++  java
  • ffmpeg实时编码解码部分代码

    程序分为编码端和解码端,两端通过tcp  socket通信,编码端一边编码一边将编码后的数据发送给解码端。解码端一边接收数据一边将解码得到的帧显示出来。

    代码中的编码端编码的是实时屏幕截图。

    代码调用了Qt SDK。

    #ifndef MAPTHREAD_H
    #define MAPTHREAD_H
    
    #include <QThread>
    #include <QTcpSocket>
    #include <QTimer>
    #include <QColor>
    #include <QImage>
    #include <QPixmap>
    #include <QTime>
    #include <QDateTime>
    
    #include <stdio.h>
    
    struct AVFrame;
    struct AVPacket;
    struct AVCodec;
    struct AVCodecContext;
    
    
    class MapThread : public QThread
    {
        Q_OBJECT
    
    private:
    #ifdef DEBUG
        FILE* log;
        QTime* time;
        QDateTime dt;
    #endif
    
        AVFrame *frame;
        AVPacket* pkt;
        AVCodec *codec;
        AVCodecContext *c;
        int i, ret, x, y, got_output;
    
        int dest_width;           //client指定的宽度
        int dest_height;          //client指定的高度
        int send_width;           //发送图像的宽度
        int send_height;          //发送图像的高度
        int scaleby;              //缩放是根据目的图像的高度还是宽度
    
        uchar* sent_img_buf;   //buffer of the image that have been sent
        uchar* curt_img_buf;   //buffer of the current image
        uchar* send_data_buf;
        uchar cmd_buf[4];
        int   cmd_buf_fill;
    
        bool started;
        bool inited;
    
        int          interval;         //帧间时间间隔
        QTcpSocket*  mapSocket;
        QTimer* timer;
    
    public:
        MapThread(QTcpSocket* socket, QObject *parent = 0);
        ~MapThread();
    
        const static int SCALE_BY_WIDTH  = 1;
        const static int SCALE_BY_HEIGHT = 2;
    
        void setSendInterval(int i);
        
    signals:
        
    public slots:
        void sendFrame();
        void newData();
        void newCommand();
        void quit();
    
    protected:
        void run();
        
    };
    
    #endif // MAPTHREAD_H
    


    void MapThread::sendFrame()
    {
        if(!started)
            return;
    
        if(!inited)
        {
            avcodec_register_all();
    
            c= NULL;
            pkt = new AVPacket;
            i = 0;
    
    #ifdef DEBUG
            fprintf(log, "Encode video file %s
    ", "test.mpg");
            fflush(log);
    #endif
            /* find the mpeg1 video encoder */
            codec = avcodec_find_encoder(AV_CODEC_ID_MPEG1VIDEO);
    
            if (codec == 0)
            {
    #ifdef DEBUG
                fprintf(log, "Codec not found
    ");
                fflush(log);
    #endif
                exit(1);
            }
    
            c = avcodec_alloc_context3(codec);
            if (!c)
            {
    #ifdef DEBUG
                fprintf(log, "Could not allocate video codec context
    ");
                fflush(log);
    #endif
                exit(1);
            }
            //c->bit_rate = 400000;
            c->width = dest_width;
            c->height = dest_height;
    
            c->time_base = (AVRational){1,25};
            c->gop_size = 100;
            c->max_b_frames = 0;
            c->delay = 0;
            c->pix_fmt = AV_PIX_FMT_YUV420P;
    
            //av_opt_set(c->priv_data, "preset", "slow", 0);
    
            av_opt_set(c->priv_data, "preset", "superfast", 0);
            av_opt_set(c->priv_data, "tune", "zerolatency", 0);
    
            int re = avcodec_open2(c, codec, NULL);
            av_opt_set(c->priv_data, "tune", "zerolatency", 0);
            if (re < 0) {
    #ifdef DEBUG
                fprintf(log, "Could not open codec:%d
    ", re);
                fflush(log);
    #endif
                exit(1);
            }
    
            frame = av_frame_alloc();
            if (!frame) {
    #ifdef DEBUG
                fprintf(log, "Could not allocate video frame
    ");
                fflush(log);
    #endif
                exit(1);
            }
            frame->format = c->pix_fmt;
            frame->width  = c->width;
            frame->height = c->height;
    
            ret = av_image_alloc(frame->data, frame->linesize, c->width, c->height, c->pix_fmt, 32);
            if (ret < 0) {
    #ifdef DEBUG
                fprintf(log, "Could not allocate raw picture buffer
    ");
                fflush(log);
    #endif
                exit(1);
            }
            inited = true;
        }
    
    
        if(mapSocket == 0)
        {
    #ifdef DEBUG
            qDebug()<<"null socket"<<endl;
    #endif
            return;
        }
        else if(mapSocket->isOpen() == false)
        {
            return;
        }
        else if(mapSocket->isWritable() == false)
        {
            return;
        }
    #ifdef DEBUG
        fprintf(log, "start cap:%d
    ", QDateTime::currentDateTime().msecsTo(dt));
    #endif
        QImage image = Interface::grapScreen().toImage();
        image = image.scaled(QSize(dest_width, dest_height));
    
    #ifdef DEBUG
        fprintf(log, "end cap:%d
    ", QDateTime::currentDateTime().msecsTo(dt));
        //fprintf(log, "cap:%d
    ", time->elapsed());
        fflush(log);
    #endif
    
    
        av_init_packet(pkt);
        pkt->data = NULL;    // packet data will be allocated by the encoder
        pkt->size = 1000000;
    
        for (int h = 0; h < c->height; h++)
        {
            for (int w = 0; w < c->width; w++)
            {
                QRgb rgb = image.pixel(w, h);
    
                int r = qRed(rgb);
                int g = qGreen(rgb);
                int b = qBlue(rgb);
    
                int dy = ((66*r + 129*g + 25*b) >> 8) + 16;
                int du = ((-38*r + -74*g + 112*b) >> 8) + 128;
                int dv = ((112*r + -94*g + -18*b) >> 8) + 128;
    
                uchar yy = (uchar)dy;
                uchar uu = (uchar)du;
                uchar vv = (uchar)dv;
    
                frame->data[0][h * frame->linesize[0] + w] = yy;
    
                if(h % 2 == 0 && w % 2 == 0)
                {
                    frame->data[1][h/2 * (frame->linesize[1]) + w/2] = uu;
                    frame->data[2][h/2 * (frame->linesize[2]) + w/2] = vv;
                }
    
            }
        }
    
        frame->pts = i;
    
        ret = avcodec_encode_video2(c, pkt, frame, &got_output);
    
        if (ret < 0)
        {
    #ifdef DEBUG
            fprintf(log, "Error encoding frame
    ");
            fflush(log);
    #endif
            exit(1);
        }
    
        if (got_output)
        {
    #ifdef DEBUG
            fprintf(log, "start send:%d
    ", QDateTime::currentDateTime().msecsTo(dt));
    #endif
            int ss = pkt->size;
    #ifdef DEBUG
            qDebug()<<"ss:"<<ss;
            fprintf(log, "size:%d
    ", ss);
    #endif
            writeAndBlock(mapSocket, pkt->data, ss);
            mapSocket->flush();
    #ifdef DEBUG
            fprintf(log, "end send:%d
    ", QDateTime::currentDateTime().msecsTo(dt));
    #endif
    
            av_free_packet(pkt);
        }
    
        i ++;
    }


    解码端:

    #ifndef MAPTHREAD_H
    #define MAPTHREAD_H
    
    #include <QThread>
    #include <QTcpSocket>
    #include <QDebug>
    #include <QImage>
    #include <QTime>
    
    #include "version.h"
    struct AVFrame;
    struct AVPacket;
    struct AVCodec;
    struct AVCodecContext;
    
    #define INBUF_SIZE 4096000
    #define AUDIO_INBUF_SIZE 20480
    #define AUDIO_REFILL_THRESH 4096
    #define QIMAGE_BUFFER_SIZE 12000000
    
    class MapThread : public QThread
    {
        Q_OBJECT
    
    private:
        QTime* time;
        int newF;
    
        uchar* img_buf;
        QTcpSocket* mapSocket;
        QString     address;
        int         port;
        bool socketConnected;
    
        int request_width;
        int request_height;
    
        uchar* recv_buf;
        uchar* frame_buf;
        int    frame_buf_fill;
        int    frame_bytes;
        uchar  cmd_buf[8];
        int    cmd_buf_fill;
    
        bool cmd_got;
        bool frame_size_setted;
    
        bool cmd_parsed;
        int subX;
        int subY;
        int subWidth;
        int subHeight;
        int subSize;
        int subFill;
    
        bool inited;
        AVFrame *frame;
        AVPacket* pkt;
        AVCodec *codec;
        AVCodecContext *c;
        int i, ret, x, y, got_output;
    
        //QImage* image;
    
        uint8_t* inbuf;
    #ifdef DEBUG
        FILE* log;
        int readlen;
    #endif
    
        int decode_write_frame(AVCodecContext *avctx, AVFrame *frame, AVPacket *pkt);
    
    public:
        int received_frame_width;
        int received_frame_height;
        QImage*  image;
    
        MapThread(QString add, int p, int w, int h, QObject *parent = 0);
        void sendRequestSize(int width, int height);
        void getSubWindow();
        void parseCommand();
        void updateFrame();
        
    signals:
        void frameGot(QImage*);
        void frameSizeChanged(int, int);
        
    public slots:
        void newData();
        void hostConnected();
    
    protected:
        void run();
        
    };
    
    #endif // MAPTHREAD_H
    


    int MapThread::decode_write_frame(AVCodecContext *avctx, AVFrame *frame, AVPacket *pkt)
    {
        int len, got_frame;
        len = avcodec_decode_video2(avctx, frame, &got_frame, pkt);
    
        if (got_frame)
        {
    #ifdef DEBUG
            fprintf(tlog, "get frame:%d
    ", QDateTime::currentDateTime().msecsTo(dt));
            //fprintf(tlog, "got frame:%d
    ", ttime->elapsed());
    #endif
    
            if(image == 0)
                image = new QImage(img_buf, avctx->width, avctx->height, QImage::Format_RGB888);
    
            received_frame_width = avctx->width;
            received_frame_height = avctx->height;
    
            for(int h = 0; h < avctx->height; h++)
            {
                for(int w = 0; w < avctx->width; w ++)
                {
                    int hh = h >> 1;
                    int ww = w >> 1;
                    int Y = frame->data[0][h * frame->linesize[0] + w];
                    int U = frame->data[1][hh * (frame->linesize[1]) + ww];
                    int V = frame->data[2][hh * (frame->linesize[2]) + ww];
    
                    int C = Y - 16;
                    int D = U - 128;
                    int E = V - 128;
    
                    int r = 298 * C           + 409 * E + 128;
                    int g = 298 * C - 100 * D - 208 * E + 128;
                    int b = 298 * C + 516 * D           + 128;
    
                    r = qBound(0, r >> 8, 255);
                    g = qBound(0, g >> 8, 255);
                    b = qBound(0, b >> 8, 255);
    
                    QRgb rgb = qRgb(r, g, b);
                    image->setPixel(QPoint(w, h), rgb);
                }
            }
    #ifdef DEBUG
            fprintf(tlog, "emit frame:%d
    ", QDateTime::currentDateTime().msecsTo(dt));
            //fprintf(tlog, "emit frame:%d
    ", ttime->elapsed());
    #endif
            emit frameGot(image);
        }
        if (pkt->data) {
            pkt->size -= len;
            pkt->data += len;
        }
    
        return 0;
    }


    void MapThread::newData()
    {
        if(!inited)
        {
            avcodec_register_all();
    
            pkt = new AVPacket;
            av_init_packet(pkt);
    
            memset(inbuf + INBUF_SIZE, 0, FF_INPUT_BUFFER_PADDING_SIZE);
    
            codec = avcodec_find_decoder(AV_CODEC_ID_MPEG1VIDEO);
            if (!codec)
            {
    #ifdef DEBUG
                fprintf(log, "Codec not found
    ");
                fflush(log);
    #endif
                exit(1);
            }
            c = avcodec_alloc_context3(codec);
    
            if (!c) {
    #ifdef DEBUG
                fprintf(log, "Could not allocate video codec context
    ");
                fflush(log);
    #endif
                exit(1);
            }
    
            av_opt_set(c->priv_data, "preset", "superfast", 0);
            av_opt_set(c->priv_data, "tune", "zerolatency", 0);
    
            c->delay = 0;
    
            if(codec->capabilities&CODEC_CAP_TRUNCATED)
                c->flags|= CODEC_FLAG_TRUNCATED;
            if (avcodec_open2(c, codec, NULL) < 0) {
    #ifdef DEBUG
                fprintf(log, "Could not open codec
    ");
                fflush(log);
    #endif
                exit(1);
            }
    
            frame = av_frame_alloc();
            if (!frame) {
    #ifdef DEBUG
                fprintf(log, "Could not allocate video frame
    ");
                fflush(log);
    #endif
                exit(1);
            }
    
            inited = true;
        }
        while(true)
        {
            int nread = mapSocket->read((char*)inbuf, INBUF_SIZE);
    #ifdef DEBUG
            readlen += nread;
            fprintf(tlog, "recv time:%d
    ", QDateTime::currentDateTime().msecsTo(dt));
            fprintf(tlog, "recv all:%d
    ", readlen);
            fflush(tlog);
    #endif
    
            if(nread <= 0)
                break;
    
            av_init_packet(pkt);
            pkt->size = nread;
            pkt->data = inbuf;
            while (pkt->size > 0)
            {
                if (decode_write_frame(c, frame, pkt) < 0)
                    exit(1);
            }
            av_free_packet(pkt);
        }
    }



  • 相关阅读:
    [HDFS Manual] CH6 HDFS Federation
    [HDFS Manual] CH4 HDFS High Availability Using the Quorum Journal Manager
    [HDFS Manual] CH3 HDFS Commands Guide
    [HDFS Manual] CH2 HDFS Users Guide
    [HDFS Manual] CH1 HDFS体系结构
    [20180312]进程管理其中的SQL Server进程占用内存远远大于SQL server内部统计出来的内存
    [MySQL Status] Queries,Questions,read/s区别,Com_Commit和handle_commit
    [MySQL TroubleShooting] 服务启动报错
    [MySQL Code]Innodb 锁分配和锁冲突判断
    [MySQL Reference Manual]17 Group Replication
  • 原文地址:https://www.cnblogs.com/cynchanpin/p/7204463.html
Copyright © 2011-2022 走看看