zoukankan      html  css  js  c++  java
  • 智能指针处理---bo

    // sdltest1.cpp : 定义控制台应用程序的入口点。
    //
    
    #include "stdafx.h"
    #include <stdio.h>
    #include <stdlib.h>
    extern "C"
    {
        #include <SDL.h>
        #include "libavutil/opt.h"
        #include "libavutil/channel_layout.h"
        #include "libavutil/common.h"
        #include "libavutil/imgutils.h"
        #include "libavutil/mathematics.h"
        #include "libavutil/samplefmt.h"
        #include "libavutil/time.h"
        #include "libavutil/fifo.h"
        #include "libavcodec/avcodec.h"
        #include "libavformat/avformat.h"
        #include "libavformat/avio.h"
        #include "libavfilter/avfiltergraph.h"
        #include "libavfilter/avfilter.h"
        #include "libavfilter/buffersink.h"
        #include "libavfilter/buffersrc.h"
        #include "libswscale/swscale.h"
        #include "libswresample/swresample.h"
    }
    #include <memory>
    #include <vector>
    #include <windows.h>
    #include <thread>
    #include <mutex>
    #include <queue>
    #include <iostream>
    #include "sdlplayer.h"
    
    #pragma comment(lib, "avcodec.lib")
    
    bool saveAsBitmap(AVFrame *pFrameRGB, int width, int height, int iFrame);
    
    
    //int mymain();
    //extern "C" _declspec(dllexport) int mymain();
    int mymain(char* file);
    //int _tmain(int argc, _TCHAR* argv[])
    int main(int argc, char* argv[])
    {
        printf("come this -->
    ");
        if(argc!=2) {
            printf("Args count not right!
    ");
            //return 0;
        }
    
    
        printf("%s 
    ",(char*)argv[1]);
        mymain((char*)argv[1]);
        //mymain();
        return 0;
    }
    
    
    
    AVInputFormat mFormat;
    AVDictionary* iformat_opts;
    using namespace std;
    #define INBUF_SIZE 4096
    int videoIndex;
    #define DelayTime 5
    void Init()
    {
        av_register_all();
        avfilter_register_all();
        avformat_network_init();
        av_log_set_level(AV_LOG_ERROR);
    }
    AVFormatContext *ic = NULL;
    int64_t lastReadPacktTime ;
    std::shared_ptr <AVPacket> readPacketFromSource()
    {
        std::shared_ptr<AVPacket> packet(static_cast<AVPacket*>(av_malloc(sizeof(AVPacket))), [&](AVPacket *p) { av_packet_free(&p); av_freep(&p);});
        av_init_packet(packet.get());
        lastReadPacktTime = av_gettime();
        int ret = av_read_frame(ic, packet.get());
        if(ret >= 0)
        {
            return packet;
        }
        else
        {
            return nullptr;
        }
    
    }    
    bool videoDecode(AVPacket* packet, AVFrame *frame)
    {
        int gotFrame = 0;
        //videoIndex
        auto hr = avcodec_decode_video2(ic->streams[videoIndex]->codec, frame, &gotFrame, packet);
        if (hr >= 0 && gotFrame != 0)
        {
            return true;
        }
        return false;
    }
    
    
    int initVideoDecodeContext()
    {    
        auto codecId = ic->streams[videoIndex]->codec->codec_id;
        auto codec = avcodec_find_decoder(codecId);
        if (!codec)
        {
            return -1;
        }
    
        int ret = avcodec_open2(ic->streams[videoIndex]->codec, codec, NULL);
        return ret;
    
    }
    
    
    
    static void pgm_save(unsigned char *buf, int wrap, int xsize, int ysize,
                         char *filename)
    {
        FILE *f;
        int i;
        printf("filename = %s
    ",filename);
        f = fopen(filename,"w");
        fprintf(f, "P5
    %d %d
    %d
    ", xsize, ysize, 255);
        for (i = 0; i < ysize; i++)
            fwrite(buf + i * wrap, 1, xsize, f);
        fclose(f);
    }
    
    int gWidth = 0;
    int gHeight = 0;
    
    int linesize[5] = { 640,320,320,0,0 };
    
    
    
    typedef struct  {
        //BYTE  buf[640*360*3/2+100];
        std::shared_ptr<BYTE> buf;
        BYTE*    data[3];
        int64_t  dts;
    } DecodeFrame;
    
    //queue<DecodeFrame*> queFrame;
    
    
    queue<std::shared_ptr<DecodeFrame>> queFrame;
    
    int playState = 1;
    std::shared_ptr<CGSDLRender> sdlRender;
    mutex g_lock;
    int64_t lastDts=0;
    unsigned long lastTime = 0;
    
    
    
    void playFun(){
        
        for(;;){
            if(playState == 0) break;
            if (queFrame.size()>0){
    
                g_lock.lock();
                //DecodeFrame* dfr= queFrame.front();
                std::shared_ptr<DecodeFrame> dfr= queFrame.front();
    
                queFrame.pop();
                g_lock.unlock();
                auto diff = dfr->dts - lastDts;
                int duration = diff * 1000 /(ic->streams[videoIndex]->time_base.den
                            /ic->streams[videoIndex]->time_base.num);
                if(duration > DelayTime && duration < 1000){
                    Sleep(duration );
                }
    
                std::cout<<"duration1: "<<duration<<endl;
    
                printf("packet->dts time =%d --->
     ", dfr->dts * 1000 / (ic->streams[videoIndex]->time_base.den
                    / ic->streams[videoIndex]->time_base.num));
    
                sdlRender->Display((char**)dfr->data,linesize);
                lastDts = dfr->dts;
                unsigned long nowTime = GetTickCount();
                printf(" %ld cha  %ld
    ",nowTime,nowTime-lastTime); //得到ms
                lastTime = nowTime;
                //delete dfr;
                
            }else{
                Sleep(150);
            }
        }
    }
    
    
    int mymain(char* file)
    {
        int scan_all_pmts_set = 0;
        /* register all codecs, demux and protocols */
        Init();
        ic = avformat_alloc_context();
        int ret;
        if (!ic) {
            av_log(NULL, AV_LOG_FATAL, "Could not allocate context.
    ");
            ret = AVERROR(ENOMEM);
            printf("alloc err %d
    ",ret);
        }
    
        int err = avformat_open_input(&ic, "F://test.mp4", nullptr, nullptr);
        //int err = avformat_open_input(&ic, "F://3s.mp4", nullptr, nullptr);
        if (err < 0) {
            printf("open err err=%d
    ",err);
        }
        printf("come 2
    ");
    
        err = avformat_find_stream_info(ic, nullptr);
        printf("ic->nb_streams %d
    ",ic->nb_streams);
        if(err<0){
    
        }else{
            for(int i=0;i<ic->nb_streams;i++){
                int type =  ic->streams[i]->codec->codec_type;
                printf("type = %d
    ",type);
                if(type == AVMediaType::AVMEDIA_TYPE_VIDEO){
                    videoIndex  = i;
                    printf("videoIndex =%d 
    ",videoIndex);
                }
            }                                        
        }
    
        gWidth = ic->streams[videoIndex]->codec->width;
        gHeight = ic->streams[videoIndex]->codec->height;
    
        int ret1 =initVideoDecodeContext();
        printf("ret1 = %d
    ",ret1);
        //std::shared_ptr<CGSDLRender>
            sdlRender =std::make_shared<CGSDLRender>();//???????
        ret = initVideoDecodeContext();
        if(ret < 0) return ret;
        sdlRender->InitVideo(0);
    
        sdlRender->CreateVideoSurface(gWidth, gHeight);
        AVRational time_base =ic->streams[videoIndex]->time_base;
        printf("num %d,den %d--
    ",time_base.num,time_base.den);
        // to thi
        playState = 1;
        thread t1(playFun);
        t1.detach();
        int64_t lastDts=0;
    
        int w = gWidth;
        int h = gHeight;
        AVFrame * videoFrame = av_frame_alloc();
        for(int i=0;i<10000;i++){
            auto packet = readPacketFromSource();
            if(packet){
                if(packet->stream_index==videoIndex){
                    
                    if(videoDecode(packet.get(),videoFrame))
                    {
    
                        printf("%d---
    ",i);
                        //printf("%lld 
    ",videoFrame->pkt_pos );
    
                        //DecodeFrame* dfr=&arrFrame[decodeFrame];
                        //DecodeFrame* dfr = new DecodeFrame;
                        std::shared_ptr<DecodeFrame> dfr(static_cast<DecodeFrame*>( new DecodeFrame), [&](DecodeFrame *p) { delete p;});
    
                         std::shared_ptr<BYTE> tmpbuf (new BYTE[w * h * 3 / 2 + 100](), std::default_delete<BYTE[]>());
                         dfr->buf = tmpbuf;
    
                        memcpy(dfr->buf.get(),videoFrame->data[0],w*h);
                        memcpy(dfr->buf.get() + w * h, videoFrame->data[1], w * h / 4);
                        memcpy(dfr->buf.get() + w * h * 5 / 4, videoFrame->data[2], w * h / 4);
                        dfr->data[0] = dfr->buf.get();
                        dfr->data[1] = dfr->buf.get() + w * h;
                        dfr->data[2] = dfr->buf.get() + w * h * 5 / 4;
                        dfr->dts = packet->dts;
    
    
                        g_lock.lock();
                        queFrame.push(dfr);
                        g_lock.unlock();
                        printf("packet->dts time =%d <---
     ",dfr->dts*1000/(ic->streams[videoIndex]->time_base.den
                        /ic->streams[videoIndex]->time_base.num));
                        if(queFrame.size()>30)
                                Sleep(100);
    
                    }
                }
            }
            else{
                break;
            }
        }
    
        av_frame_free(&videoFrame);
        //playFun();
        Sleep(8000);
        playState = 0;
        Sleep(600);
        system("PAUSE");
        
        return 0;
    }
  • 相关阅读:
    Kotlin泛型与协变及逆变原理剖析
    struts2中action的class属性值意义
    重新设置Eclipse的workspace路径
    windows下将mysql加入环境变量
    Eclipse插件安装4种方法
    Maven常用命令
    IntelliJ IDEA光标变粗 backspace无法删除内容解决方法
    Weblogic Exception in AppMerge flows' progression
    Oracle的dual
    lgp20151222 解决-Dmaven.multiModuleProjectDirectory system property is not set. Check $M2_HOME environment variable and mvn script match.
  • 原文地址:https://www.cnblogs.com/cnchengv/p/15356918.html
Copyright © 2011-2022 走看看