zoukankan      html  css  js  c++  java
  • 使用ffmpeg实现对h264视频解码 -- (实现了一个易于使用的c++封装库)

    H264是当今流行的视频压缩格式;ffmpeg是一个开源库,实现了对h264视频文件的解压缩。

    为了降低使用ffmpeg的复杂性,尽量隐藏实现细节,我写了一个封装库。c#也可以很方便的使用此库。解压后的数据可以为yuv格式,也可以为rgb格式。同时可以对rgb格式视频做缩放。

    类H264Decode实现了所有解码功能。最后,再把此类封装成c函数,以便于与c#交互。

    代码下载地址 https://download.csdn.net/download/qq_29939347/10674597

    下面讲述一下此类的实现细节:

    1 BOOL H264Decode::Init()

     初始化解码库,打开h264解码器。

    BOOL H264Decode::Init()
    {
        if (_init)
            return TRUE;
    
        _init = TRUE;
        if (!_av_register)
        {
            _av_register = TRUE;
            av_register_all();
        }
    
        _pCodecContext = avcodec_alloc_context3(NULL);
        _pH264VideoDecoder = avcodec_find_decoder(AV_CODEC_ID_H264);
        if (_pH264VideoDecoder == NULL)
        {
            return FALSE;
        }    
    
        //初始化参数,下面的参数应该由具体的业务决定  AV_PIX_FMT_YUV420P;
        _pCodecContext->time_base.num = 1;
        _pCodecContext->frame_number = 1; //每包一个视频帧  
        _pCodecContext->codec_type = AVMEDIA_TYPE_VIDEO;
        _pCodecContext->bit_rate = 0;
        _pCodecContext->time_base.den = 25;//帧率  
        _pCodecContext->width = 0;//视频宽  
        _pCodecContext->height = 0;//视频高 
        _pCodecContext->pix_fmt = AV_PIX_FMT_YUVJ420P;
        _pCodecContext->color_range = AVCOL_RANGE_MPEG;
    
        if (avcodec_open2(_pCodecContext, _pH264VideoDecoder, NULL) >= 0)
            _pFrameYuv = av_frame_alloc();
        else
            return FALSE;
    
        return TRUE;
    }

    2 向解码器输入数据

    INT32 H264Decode::H264_PutVideoStream(char* buffer, INT32 bufferLen)
    {
        AVPacket packet = { 0 };
        packet.data = (uint8_t*)buffer;    //这里填入一个指向完整H264数据帧的指针  
        packet.size = bufferLen;        //这个填入H264数据帧的大小  
    
        int ret = avcodec_send_packet(_pCodecContext, &packet);
        return ret;
    }

    3 读取解压后的数据 yuv格式

    INT32 H264Decode::H264_GetNextVideoFrame(char* buffer, INT32 bufferLen, INT32 yuFormate)
    {
        if (avcodec_receive_frame(_pCodecContext, _pFrameYuv) == 0)
        {
            int height = _pCodecContext->height;
            int width = _pCodecContext->width;
    
            if (yuFormate == 1)
            {
                ////写入数据  
                int yLen = height * width;
                memcpy(buffer, _pFrameYuv->data[0], yLen);
    
                int uLen = yLen / 4;
                memcpy(buffer + yLen, _pFrameYuv->data[1], uLen);
    
                int vLen = uLen;
                memcpy(buffer + yLen + uLen, _pFrameYuv->data[2], vLen);
                return 0;
            }
            else
            {
                ////写入数据  
                int yLen = height * width;
                memcpy(buffer, _pFrameYuv->data[0], yLen);
    
                int uLen = yLen / 4;
                memcpy(buffer + yLen, _pFrameYuv->data[2], uLen);
    
                int vLen = uLen;
                memcpy(buffer + yLen + uLen, _pFrameYuv->data[1], vLen);
                return 0;
            }
        }
        return -1;
    }

    4 获取RGB格式视频;可同时,根据需要的大小,对视频做缩放。

    INT32 H264Decode::H264_GetNextVideoFrame_Rgb(char* buffer, INT32 bufferLen, INT32 width, INT32 height)
    {
        if (avcodec_receive_frame(_pCodecContext, _pFrameYuv) == 0)
        {
            ResetRgbScale(width, height);
    
            int n = (_out_rgb_buffer_len == bufferLen);
    
            uint8_t * data[3];
            data[0] = _pFrameYuv->data[0];
            data[1] = _pFrameYuv->data[2]; //u v 向量互换
            data[2] = _pFrameYuv->data[1];
    
            _dst_dataTmp[0] = (uint8_t *)buffer; //少一次复制
            int ret = sws_scale(_img_convert_ctx, (const unsigned char* const*)data, _pFrameYuv->linesize, 0, _pCodecContext->height,
                _dst_dataTmp, _dst_linesize);
            return 0;
        }
        return -1;
    }

    C语言封装接口

    extern "C"
    {
        LibFfmpegWrapper_API INT64 H264_CreateHandle();
        LibFfmpegWrapper_API INT32 H264_CloseHandle(INT64 handle);
    
        LibFfmpegWrapper_API INT32 H264_SetDefaultAlgorithm(INT32 flag);
        LibFfmpegWrapper_API INT32 H264_SetAlgorithm(INT64 handle, INT32 flag);
    
        LibFfmpegWrapper_API INT32 H264_PutVideoStream(INT64 handle, char* buffer, INT32 bufferLen);
        LibFfmpegWrapper_API INT32 H264_GetVideoParam(INT64 handle, INT32& width, INT32& height);
    
        LibFfmpegWrapper_API INT32 H264_GetVideoFrameSize(INT64 handle);
        LibFfmpegWrapper_API INT32 H264_GetVideoFrameFormate(INT64 handle);
    
        LibFfmpegWrapper_API INT32 H264_GetNextVideoFrame(INT64 handle, char* buffer, INT32 bufferLen, INT32 yuFormate);
    
        LibFfmpegWrapper_API INT32 H264_GetVideoFrameSize_Rgb(INT64 handle);
        LibFfmpegWrapper_API INT32 H264_GetNextVideoFrame_Rgb(INT64 handle, char* buffer, INT32 bufferLen);
    
        LibFfmpegWrapper_API INT32 H264_GetVideoFrameSize_Rgb2(INT64 handle,INT32 width, INT32 height);
        LibFfmpegWrapper_API INT32 H264_GetNextVideoFrame_Rgb2(INT64 handle, char* buffer, INT32 bufferLen, INT32 width, INT32 height);
    }

    c# 调用封装

    class H264Decode
        {
            public const int Decode_Result_OK = 0;
    
            long _handleDecode = 0;
    
            public static void SetDefaultAlgorithm(EN_H264Algorithm flag)
            {
                H264DecodeWrapper.SetDefaultAlgorithm((int)flag);
            }
    
            ~H264Decode()
            {
                Close();
            }
    
            public void Init()
            {
                _handleDecode = H264DecodeWrapper.CreateHandle();
            }
    
            public Int32 SetAlgorithm(EN_H264Algorithm flag)
            {
                return H264DecodeWrapper.SetAlgorithm(_handleDecode, (int)flag);
            }
    
            public void Close()
            {
                if (_handleDecode == 0)
                    return;
                H264DecodeWrapper.CloseHandle(_handleDecode);
                _handleDecode = 0;
            }
    
            public int PutVideoStream(byte[] buffer)
            {
                return PutVideoStream(buffer, buffer.Length);
            }
    
            public int PutVideoStream(byte[] buffer, Int32 bufferLen)
            {
                return H264DecodeWrapper.PutVideoStream(_handleDecode, buffer, bufferLen);
            }
    
            public int GetVideoOrgSize(out int width, out int height)
            {
                int result = H264DecodeWrapper.GetVideoParam(_handleDecode, out width, out height);
                return result;
            }
    
            public int GetVideoFrameSize()
            {
                int result = H264DecodeWrapper.GetVideoFrameSize(_handleDecode);
                return result;
            }
            public AVPixelFormat GetVideoFrameFormate()
            {
                int result = H264DecodeWrapper.GetVideoFrameFormate(_handleDecode);
                return (AVPixelFormat)result;
            }
    
            public int GetNextVideoFrame(byte[] buffer, Int32 bufferLen, EN_H264_YU_Formate formate)
            {
                int result = H264DecodeWrapper.GetNextVideoFrame(_handleDecode, buffer, bufferLen, formate);
                return result;
            }
            public int GetNextVideoFrame(byte[] buffer, EN_H264_YU_Formate formate)
            {
                int result = H264DecodeWrapper.GetNextVideoFrame(_handleDecode, buffer, buffer.Length, formate);
                return result;
            }
    
            public int GetVideoFrameSize_Rgb()
            {
                int result = H264DecodeWrapper.GetVideoFrameSize_Rgb(_handleDecode);
                return result;
            }
    
            public int GetNextVideoFrame_Rgb(byte[] buffer, Int32 bufferLen)
            {
                int result = H264DecodeWrapper.GetNextVideoFrame_Rgb(_handleDecode, buffer, bufferLen);
                return result;
            }
    
            public int GetVideoFrameSize_Rgb2(Int32 width, Int32 height)
            {
                int result = H264DecodeWrapper.GetVideoFrameSize_Rgb2(_handleDecode, width, height);
                return result;
            }
    
            public int GetNextVideoFrame_Rgb2(byte[] buffer, Int32 bufferLen, Int32 width, Int32 height)
            {
                int result = H264DecodeWrapper.GetNextVideoFrame_Rgb2(_handleDecode, buffer, bufferLen, width, height);
                return result;
            }
    
        }
    
        public enum EN_H264Algorithm
        {
            SWS_FAST_BILINEAR = 1,
            SWS_BILINEAR = 2,
            SWS_BICUBIC = 4,
            SWS_X = 8,
            SWS_POINT = 0x10,
            SWS_AREA = 0x20,
            SWS_BICUBLIN = 0x40,
            SWS_GAUSS = 0x80,
            SWS_SINC = 0x100,
            SWS_LANCZOS = 0x200,
            SWS_SPLINE = 0x400,
        }
    
        public class H264DecodeWrapper
        {
            private const string DLLName = "LibFfmpegWrapper.dll";
    
            [DllImport(DLLName, EntryPoint = "H264_CreateHandle", CallingConvention = CallingConvention.Cdecl)]
            private static extern long H264_CreateHandle();
    
            [DllImport(DLLName, EntryPoint = "H264_CloseHandle", CallingConvention = CallingConvention.Cdecl)]
            private static extern Int32 H264_CloseHandle(long handle);
    
            [DllImport(DLLName, EntryPoint = "H264_PutVideoStream", CallingConvention = CallingConvention.Cdecl)]
            private static extern Int32 H264_PutVideoStream(long handle, IntPtr buffer, Int32 bufferLen);
    
            [DllImport(DLLName, EntryPoint = "H264_GetVideoParam", CallingConvention = CallingConvention.Cdecl)]
            private static extern Int32 H264_GetVideoParam(long handle, IntPtr width, IntPtr height);
    
            [DllImport(DLLName, EntryPoint = "H264_GetVideoFrameSize", CallingConvention = CallingConvention.Cdecl)]
            private static extern Int32 H264_GetVideoFrameSize(long handle);
    
            [DllImport(DLLName, EntryPoint = "H264_GetVideoFrameFormate", CallingConvention = CallingConvention.Cdecl)]
            private static extern Int32 H264_GetVideoFrameFormate(long handle);
    
            [DllImport(DLLName, EntryPoint = "H264_GetNextVideoFrame", CallingConvention = CallingConvention.Cdecl)]
            private static extern Int32 H264_GetNextVideoFrame(long handle, IntPtr buffer, Int32 bufferLen, Int32 yuFormate);
    
    
            [DllImport(DLLName, EntryPoint = "H264_GetVideoFrameSize_Rgb", CallingConvention = CallingConvention.Cdecl)]
            private static extern Int32 H264_GetVideoFrameSize_Rgb(long handle);
    
            [DllImport(DLLName, EntryPoint = "H264_GetNextVideoFrame_Rgb", CallingConvention = CallingConvention.Cdecl)]
            private static extern Int32 H264_GetNextVideoFrame_Rgb(long handle, IntPtr buffer, Int32 bufferLen);
    
    
            [DllImport(DLLName, EntryPoint = "H264_GetVideoFrameSize_Rgb2", CallingConvention = CallingConvention.Cdecl)]
            private static extern Int32 H264_GetVideoFrameSize_Rgb2(long handle, Int32 width, Int32 height);
    
            [DllImport(DLLName, EntryPoint = "H264_GetNextVideoFrame_Rgb2", CallingConvention = CallingConvention.Cdecl)]
            private static extern Int32 H264_GetNextVideoFrame_Rgb2(long handle, IntPtr buffer, Int32 bufferLen, Int32 width, Int32 height);
    
            [DllImport(DLLName, EntryPoint = "H264_SetDefaultAlgorithm", CallingConvention = CallingConvention.Cdecl)]
            private static extern Int32 H264_SetDefaultAlgorithm(Int32 flag);
    
            [DllImport(DLLName, EntryPoint = "H264_SetAlgorithm", CallingConvention = CallingConvention.Cdecl)]
            private static extern Int32 H264_SetAlgorithm(long handle,Int32 flag);
    
    
            public static long CreateHandle()
            {
                return H264_CreateHandle();
            }
    
            public static long CloseHandle(long handle)
            {
                return H264_CloseHandle(handle);
            }
    
            public static int PutVideoStream(long handle, byte[] buffer, Int32 bufferLen)
            {
                GCHandle hin = GCHandle.Alloc(buffer, GCHandleType.Pinned);
                int result = H264_PutVideoStream(handle, hin.AddrOfPinnedObject(), bufferLen);
                hin.Free();
                return result;
            }
    
            public static int GetVideoParam(long handle, out int width, out int height)
            {
                width = 0;
                height = 0;
                byte[] width2 = new byte[4];
                byte[] height2 = new byte[4];
    
                GCHandle hin_width = GCHandle.Alloc(width2, GCHandleType.Pinned);
                GCHandle hin_height = GCHandle.Alloc(height2, GCHandleType.Pinned);
                int result = H264_GetVideoParam(handle, hin_width.AddrOfPinnedObject(), hin_height.AddrOfPinnedObject());
                hin_width.Free();
                hin_height.Free();
    
                if (result != 0)
                {
                    return result;
                }
    
                width = BitConverter.ToInt32(width2, 0);
                height = BitConverter.ToInt32(height2, 0);
                return result;
            }
    
            public static int GetVideoFrameSize(long handle)
            {
                int result = H264_GetVideoFrameSize(handle);
                return result;
            }
            public static int GetVideoFrameFormate(long handle)
            {
                int result = H264_GetVideoFrameFormate(handle);
                return result;
            }
    
            public static int GetNextVideoFrame(long handle, byte[] buffer, Int32 bufferLen, EN_H264_YU_Formate formate)
            {
                GCHandle hin = GCHandle.Alloc(buffer, GCHandleType.Pinned);
                int result = H264_GetNextVideoFrame(handle, hin.AddrOfPinnedObject(), bufferLen, (int)formate);
                hin.Free();
                return result;
            }
    
            public static int GetVideoFrameSize_Rgb(long handle)
            {
                int result = H264_GetVideoFrameSize_Rgb(handle);
                return result;
            }
    
            public static int GetNextVideoFrame_Rgb(long handle, byte[] buffer, Int32 bufferLen)
            {
                GCHandle hin = GCHandle.Alloc(buffer, GCHandleType.Pinned);
                int result = H264_GetNextVideoFrame_Rgb(handle, hin.AddrOfPinnedObject(), bufferLen);
                hin.Free();
                return result;
            }
    
            public static int GetVideoFrameSize_Rgb2(long handle, Int32 width, Int32 height)
            {
                int result = H264_GetVideoFrameSize_Rgb2(handle, width, height);
                return result;
            }
    
            public static  Int32 SetDefaultAlgorithm(Int32 flag)
            {
                return H264_SetDefaultAlgorithm(flag);
            }
    
            public static Int32 SetAlgorithm(long handle, Int32 flag)
            {
                return H264_SetAlgorithm(handle,flag);
            }
    
            public static int GetNextVideoFrame_Rgb2(long handle, byte[] buffer, Int32 bufferLen, Int32 width, Int32 height)
            {
                GCHandle hin = GCHandle.Alloc(buffer, GCHandleType.Pinned);
                int result = H264_GetNextVideoFrame_Rgb2(handle, hin.AddrOfPinnedObject(), bufferLen, width, height);
                hin.Free();
                return result;
            }
    
            public enum EN_H264_YU_Formate
            {
                Y_U_V = 1,
                Y_V_U = 2,
            }
        }
  • 相关阅读:
    css添加方法
    node + vue 实现服务端单向推送消息,利用EventSource
    获取公众号openid,通过unionid 和小程序用户绑定起来
    小程序 构建npm
    powershell禁止系统运行脚本
    mongoose 删除
    mongoose 查询
    moogoose 更新
    小程序,用户授权手机号,node需要检验和解析
    小程序:支付的时候缺少参数:total_fee,支付失败
  • 原文地址:https://www.cnblogs.com/yuanchenhui/p/ffmpeg-h264_decode.html
Copyright © 2011-2022 走看看