zoukankan      html  css  js  c++  java
  • 对最近的RTP和H264学习进行总结整理-04.20

      虽然还是没有搞出来,但总感觉快了哈哈(哪来的自信)

      1、RTP协议接受数据

    #region 1-RTP协议变量声明
            RTPSession session;
            RTPReceiver receiver;
            RTPParticipant participant;
            private Dictionary<uint, List<RTPPacket>> Clients;
    #endregion  
    
    
    #region 对RTP进行初始化,并接收数据,调用之后就可以接收数据了
                session = new RTPSession();
                receiver = new RTPReceiver();
                IPEndPoint rtpEp = new IPEndPoint(IPAddress.Parse("192.168.1.109"), 5000);
                participant = new RTPParticipant(rtpEp);
                receiver.AddParticipant(participant);
                session.NewRTPPacket = new RTPSession.NewRTPPacket_Callback(NewRTPPacket);
                session.AddReceiver(receiver);
                Clients = new Dictionary<uint, List<RTPPacket>>();
    #endregion

      其中NewRTPPackt是

    public delegate bool NewRTPPacket_Callback(
        RTPPacket packet
    )

    类型的委托。packet为接收到的RTP包,我们就对这些包进行处理得到想要的帧,然后再把帧进行解码,得到想要的图像(我是这样理解的)

      2、H.264进行解码

      我从网络上搜索到了一个海思的DLL,可以对H.264进行解码

      

    #region 解码器相关变量声明
            /// <summary>
            /// 数据的句柄
            /// </summary>
            IntPtr pData;
            /// <summary>
            /// 这是解码器属性信息
            /// </summary>
            public H264Dec.hiH264_DEC_ATTR_S decAttr;
            /// <summary>
            /// 这是解码器输出图像信息
            /// </summary>
            public H264Dec.hiH264_DEC_FRAME_S _decodeFrame = new H264Dec.hiH264_DEC_FRAME_S();
            /// <summary>
            /// 解码器句柄
            /// </summary>
            public IntPtr _decHandle; 
    #endregion
    
    
     #region 解码器相关初始化,一般在窗口load中进行初始化
                decAttr = new H264Dec.hiH264_DEC_ATTR_S();
                decAttr.uPictureFormat = 0;
                decAttr.uStreamInType = 0;
                decAttr.uPicWidthInMB = 480 >> 4;
                decAttr.uPicHeightInMB = 640 >> 4;
                decAttr.uBufNum = 8;
                decAttr.uWorkMode = 16;
                //创建、初始化解码器句柄
                _decHandle = H264Dec.Hi264DecCreate(ref decAttr);
                //_decodeFrame = new H264Dec.hiH264_DEC_FRAME_S();
    #endregion
    
    
    //这一写代码就是h264解码的代码,其中未声明的函数和变量会在下面进行声明给出,主要是讲YUV转为RGB,在保存为Bitmap文件
    if (H264Dec.Hi264DecAU(_decHandle, pData, (uint)newData.Length, 0, ref _decodeFrame, 0) == 0)
                    {
                        if (_decodeFrame.bError == 0)
                        {
                            //策画 y u v 的长度
                            var yLength = _decodeFrame.uHeight * _decodeFrame.uYStride;
                            var uLength = _decodeFrame.uHeight * _decodeFrame.uUVStride / 2;
                            var vLength = uLength;
                            var yBytes = new byte[yLength];
                            var uBytes = new byte[uLength];
                            var vBytes = new byte[vLength];
                            var decodedBytes = new byte[yLength + uLength + vLength];
    
                            //_decodeFrame 是解码后的数据对象,里面包含 YUV 数据、宽度、高度等信息
    
                            Marshal.Copy(_decodeFrame.pY, yBytes, 0, (int)yLength);
                            Marshal.Copy(_decodeFrame.pU, uBytes, 0, (int)uLength);
                            Marshal.Copy(_decodeFrame.pV, vBytes, 0, (int)vLength);
    
                            //将从 _decodeFrame 中取出的 YUV 数据放入 decodedBytes 中
                            Array.Copy(yBytes, decodedBytes, yLength);
                            Array.Copy(uBytes, 0, decodedBytes, yLength, uLength);
                            Array.Copy(vBytes, 0, decodedBytes, yLength + uLength, vLength);
    
                            ConvertYUV2RGB(yuv, rgb, width, height);
                            ConvertYUV2RGB(decodedBytes, rgb, width, height);
                            // 写 BMP 文件。
                            WriteBMP(rgb, width, height, string.Format("E:\test\yuv2bmp_{0}.bmp", index++));
                        }
                    }

      其中pData为需要的一帧数据,因为pData为Intptr类型,而一帧数据是byte[]类型,所以我从网上查了查怎么转换,下面是代码,newData是byte【】,pData是intptr类型。

     GCHandle hObject = GCHandle.Alloc(newData, GCHandleType.Pinned);
     pData = hObject.AddrOfPinnedObject();

      H264解码类

    public class H264Dec
        {
            public const int HI_SUCCESS = 0;
    
            public const int HI_FAILURE = -1;
    
            public const int HI_LITTLE_ENDIAN = 1234;
    
            public const int HI_BIG_ENDIAN = 4321;
    
            public const int HI_DECODER_SLEEP_TIME = 60000;
    
            public const int HI_H264DEC_OK = 0;
    
            public const int HI_H264DEC_NEED_MORE_BITS = -1;
    
            public const int HI_H264DEC_NO_PICTURE = -2;
    
            public const int HI_H264DEC_ERR_HANDLE = -3;
    
    
    
            [DllImport("hi_h264dec_w.dll", EntryPoint = "Hi264DecImageEnhance", CallingConvention = CallingConvention.Cdecl)]
            public static extern int Hi264DecImageEnhance(IntPtr hDec, ref hiH264_DEC_FRAME_S pDecFrame, uint uEnhanceCoeff);
    
            [DllImport("hi_h264dec_w.dll", EntryPoint = "Hi264DecCreate", CallingConvention = CallingConvention.Cdecl)]
            public static extern IntPtr Hi264DecCreate(ref hiH264_DEC_ATTR_S pDecAttr);
    
            [DllImport("hi_h264dec_w.dll", EntryPoint = "Hi264DecDestroy", CallingConvention = CallingConvention.Cdecl)]
            public static extern void Hi264DecDestroy(IntPtr hDec);
    
    
            [DllImport("hi_h264dec_w.dll", EntryPoint = "Hi264DecGetInfo", CallingConvention = CallingConvention.Cdecl)]
            public static extern int Hi264DecGetInfo(ref hiH264_LIBINFO_S pLibInfo);
    
            /// <summary>
            /// 对输入的一段码流进行解码并按帧输出图像
            /// </summary>
            /// <param name="hDec">解码器句柄</param>
            /// <param name="pStream">码流起始地址</param>
            /// <param name="iStreamLen">码流长度</param>
            /// <param name="ullPTS">时间戳信息</param>
            /// <param name="pDecFrame">图像信息</param>
            /// <param name="uFlags">解码模式 0:正常解码;1、解码完毕并要求解码器输出残留图像</param>
            /// <returns></returns>
            [DllImport("hi_h264dec_w.dll", EntryPoint = "Hi264DecFrame", CallingConvention = CallingConvention.Cdecl)]
            public static extern int Hi264DecFrame(IntPtr hDec, IntPtr pStream, uint iStreamLen, ulong ullPTS, ref hiH264_DEC_FRAME_S pDecFrame, uint uFlags);
    
            [DllImport("hi_h264dec_w.dll", EntryPoint = "Hi264DecAU", CallingConvention = CallingConvention.Cdecl)]
            public static extern int Hi264DecAU(IntPtr hDec, IntPtr pStream, uint iStreamLen, ulong ullPTS, ref hiH264_DEC_FRAME_S pDecFrame, uint uFlags);
            /// <summary>
            /// 解码器属性信息。
            /// </summary>
            [StructLayout(LayoutKind.Sequential)]
            public struct hiH264_DEC_ATTR_S
            {
                /// <summary>
                /// 解码器输出图像格式,目前解码库只支持YUV420图像格式
                /// </summary>
                public uint uPictureFormat;
                /// <summary>
                /// 输入码流格式 0x00: 目前解码库只支持以“00 00 01”为nalu分割符的流式H.264码流 
                /// </summary>
                public uint uStreamInType;
                /// <summary>
                /// 图像宽度
                /// </summary>
                public uint uPicWidthInMB;
                /// <summary>
                /// 图像高度
                /// </summary>
                public uint uPicHeightInMB;
                /// <summary>
                /// 参考帧数目
                /// </summary>
                public uint uBufNum;
                /// <summary>
                /// 解码器工作模式
                /// </summary>
                public uint uWorkMode;
                /// <summary>
                /// 用户私有数据
                /// </summary>
                public IntPtr pUserData;
                /// <summary>
                /// 保留字
                /// </summary>
                public uint uReserved;
    
            }
    
            /// <summary>
            /// 解码器输出图像信息数据结构
            /// </summary>
            [StructLayout(LayoutKind.Sequential)]
            public struct hiH264_DEC_FRAME_S
            {
                /// <summary>
                /// Y分量地址
                /// </summary>
                public IntPtr pY;
                /// <summary>
                /// U分量地址
                /// </summary>
                public IntPtr pU;
                /// <summary>
                /// V分量地址
                /// </summary>
                public IntPtr pV;
                /// <summary>
                /// 图像宽度(以像素为单位)
                /// </summary>
                public uint uWidth;
                /// <summary>
                /// 图像高度(以像素为单位)
                /// </summary>
                public uint uHeight;
                /// <summary>
                /// 输出Y分量的stride (以像素为单位)
                /// </summary>
                public uint uYStride;
                /// <summary>
                /// 输出UV分量的stride (以像素为单位)
                /// </summary>
                public uint uUVStride;
                /// <summary>
                /// 图像裁减信息:左边界裁减像素数
                /// </summary>
                public uint uCroppingLeftOffset;
                /// <summary>
                /// 图像裁减信息:右边界裁减像素数
                /// </summary>
                public uint uCroppingRightOffset;
                /// <summary>
                /// 图像裁减信息:上边界裁减像素数
                /// </summary>
                public uint uCroppingTopOffset;
                /// <summary>
                /// 图像裁减信息:下边界裁减像素数
                /// </summary>
                public uint uCroppingBottomOffset;
                /// <summary>
                /// 输出图像在dpb中的序号
                /// </summary>
                public uint uDpbIdx;
                /// <summary>
                /// 图像类型:0:帧; 1:顶场; 2:底场 */
                /// </summary>
                public uint uPicFlag;
                /// <summary>
                /// 图像类型:0:帧; 1:顶场; 2:底场 */
                /// </summary>
                public uint bError;
                /// <summary>
                /// 图像是否为IDR帧:0:非IDR帧;1:IDR帧
                /// </summary>
                public uint bIntra;
                /// <summary>
                /// 时间戳
                /// </summary>
                public ulong ullPTS;
                /// <summary>
                /// 图像信号
                /// </summary>
                public uint uPictureID;
                /// <summary>
                /// 保留字
                /// </summary>
                public uint uReserved;
                /// <summary>
                /// 指向用户私有数据
                /// </summary>
                public IntPtr pUserData;
    
            }
    
    
            /// <summary>
            /// 解码库版本、版权和能力集信息。
            /// </summary>
            [StructLayout(LayoutKind.Sequential)]
            public struct hiH264_LIBINFO_S
            {
                /// <summary>
                /// 主编号
                /// </summary>
                public uint uMajor;
                /// <summary>
                /// 次编号
                /// </summary>
                public uint uMinor;
                /// <summary>
                /// 发布编号
                /// </summary>
                public uint uRelease;
                /// <summary>
                /// 建构编号
                /// </summary>
                public uint uBuild;
                /// <summary>
                /// 版本信息
                /// </summary>
                [MarshalAs(UnmanagedType.LPStr)]
                public string sVersion;
                /// <summary>
                /// 版权信息
                /// </summary>
                [MarshalAs(UnmanagedType.LPStr)]
                public string sCopyRight;
                /// <summary>
                /// 解码库能力集
                /// </summary>
                public uint uFunctionSet;
                /// <summary>
                /// 支持的输出图像格式
                /// </summary>
                public uint uPictureFormat;
                /// <summary>
                /// 输入码流格式
                /// </summary>
                public uint uStreamInType;
                /// <summary>
                /// 最大图像宽度(以像素为单位)
                /// </summary>
                public uint uPicWidth;
                /// <summary>
                /// 最大图像高度(以像素为单位)
                /// </summary>
                public uint uPicHeight;
                /// <summary>
                /// 最大参考帧数目
                /// </summary>
                public uint uBufNum;
                /// <summary>
                /// 保留字
                /// </summary>
                public uint uReserved;
    
            }
    
            /// <summary>
            /// 用户私有数据信息。
            /// </summary>
            [StructLayout(LayoutKind.Sequential)]
            public struct hiH264_USERDATA_S
            {
                /// <summary>
                /// 用户数据类型
                /// </summary>
                public uint uUserDataType;
                /// <summary>
                /// 用户数据长度
                /// </summary>
                public uint uUserDataSize;
                /// <summary>
                /// 用户数据缓冲区
                /// </summary>
                public IntPtr pData;
                /// <summary>
                /// 指向下一段用户数据
                /// </summary>
                public IntPtr pNext;
            }
        }
    View Code

      这是YUV转RGB图像。

            /// <summary>
            /// 将转换后的 RGB 图像数据按照 BMP 格式写入文件。
            /// </summary>
            /// <param name="rgbFrame">RGB 格式图像数据。</param>
            /// <param name="width">图像宽(单位:像素)。</param>
            /// <param name="height">图像高(单位:像素)。</param>
            /// <param name="bmpFile"> BMP 文件名。</param>
            static void WriteBMP(byte[] rgbFrame, int width, int height, string bmpFile)
            {
                // 写 BMP 图像文件。
                int yu = width * 3 % 4;
                int bytePerLine = 0;
                yu = yu != 0 ? 4 - yu : yu;
                bytePerLine = width * 3 + yu;
    
                using (FileStream fs = File.Open(bmpFile, FileMode.Create))
                {
                    using (BinaryWriter bw = new BinaryWriter(fs))
                    {
                        bw.Write('B');
                        bw.Write('M');
                        bw.Write(bytePerLine * height + 54);
                        bw.Write(0);
                        bw.Write(54);
                        bw.Write(40);
                        bw.Write(width);
                        bw.Write(height);
                        bw.Write((ushort)1);
                        bw.Write((ushort)24);
                        bw.Write(0);
                        bw.Write(bytePerLine * height);
                        bw.Write(0);
                        bw.Write(0);
                        bw.Write(0);
                        bw.Write(0);
    
                        byte[] data = new byte[bytePerLine * height];
                        int gIndex = width * height;
                        int bIndex = gIndex * 2;
    
                        for (int y = height - 1, j = 0; y >= 0; y--, j++)
                        {
                            for (int x = 0, i = 0; x < width; x++)
                            {
                                data[y * bytePerLine + i++] = rgbFrame[bIndex + j * width + x];    // B
                                data[y * bytePerLine + i++] = rgbFrame[gIndex + j * width + x];    // G
                                data[y * bytePerLine + i++] = rgbFrame[j * width + x];  // R
                            }
                        }
    
                        bw.Write(data, 0, data.Length);
                        bw.Flush();
                    }
                }
            }
    
            /// <summary>
            /// 将一桢 YUV 格式的图像转换为一桢 RGB 格式图像。
            /// </summary>
            /// <param name="yuvFrame">YUV 格式图像数据。</param>
            /// <param name="rgbFrame">RGB 格式图像数据。</param>
            /// <param name="width">图像宽(单位:像素)。</param>
            /// <param name="height">图像高(单位:像素)。</param>
            static void ConvertYUV2RGB(byte[] yuvFrame, byte[] rgbFrame, int width, int height)
            {
                int uIndex = width * height;
                int vIndex = uIndex + ((width * height) >> 2);
                int gIndex = width * height;
                int bIndex = gIndex * 2;
    
                int temp = 0;
    
                for (int y = 0; y < height; y++)
                {
                    for (int x = 0; x < width; x++)
                    {
                        // R分量
                        temp = (int)(yuvFrame[y * width + x] + (yuvFrame[vIndex + (y / 2) * (width / 2) + x / 2] - 128) * YUV2RGB_CONVERT_MATRIX[0, 2]);
                        rgbFrame[y * width + x] = (byte)(temp < 0 ? 0 : (temp > 255 ? 255 : temp));
    
                        // G分量
                        temp = (int)(yuvFrame[y * width + x] + (yuvFrame[uIndex + (y / 2) * (width / 2) + x / 2] - 128) * YUV2RGB_CONVERT_MATRIX[1, 1] + (yuvFrame[vIndex + (y / 2) * (width / 2) + x / 2] - 128) * YUV2RGB_CONVERT_MATRIX[1, 2]);
                        rgbFrame[gIndex + y * width + x] = (byte)(temp < 0 ? 0 : (temp > 255 ? 255 : temp));
    
                        // B分量
                        temp = (int)(yuvFrame[y * width + x] + (yuvFrame[uIndex + (y / 2) * (width / 2) + x / 2] - 128) * YUV2RGB_CONVERT_MATRIX[2, 1]);
                        rgbFrame[bIndex + y * width + x] = (byte)(temp < 0 ? 0 : (temp > 255 ? 255 : temp));
                    }
                }
            }
    View Code

      

    3、这可能就是我遇到问题的地方了,怎么把RTPPack中的包数据转换为一帧图像信息,我找到的资料是;

      

    #region 对收到的数据进行处理
                if (!Clients.ContainsKey(packet.SSRC))//如果接受端第一次接受到某源的数据,则加入到
                {
                    if (Clients.Count < 4)//如果发送端为4,则丢弃包
                    {
                        Clients.Add(packet.SSRC, new List<RTPPacket> { packet });
                        //ImagesBoxMapping[ImagesBoxMapping.First(pair => pair.Value == null).Key] = packet.SSRC;
                    }
                }
                else
                {
                    Clients[packet.SSRC].Add(packet);
                }
    
                if (packet.Marker)//如果已经发送完毕
                {
                    //丢包检测
                    var orderPackets = Clients[packet.SSRC].OrderBy(rtpPacket => rtpPacket.SequenceNumber);
                    if (Clients[packet.SSRC].Count != (orderPackets.Last().SequenceNumber - orderPackets.First().SequenceNumber + 1))
                    {
                        Clients[packet.SSRC].Clear();//清空缓存区
                        return true;
                    }
    
                    //1.包重组
                    var count = Clients[packet.SSRC].Sum(rtpPacket => rtpPacket.DataSize);//数据总数
    
                    var newData = new byte[count];
    
                    long offSet = 0;
                    foreach (var rtpPacket in Clients[packet.SSRC])
                    {
                        Array.Copy(rtpPacket.DataPointer, 0, newData, offSet, rtpPacket.DataSize);
                        offSet += rtpPacket.DataSize;
                    }
                    Clients[packet.SSRC].Clear();//清空缓存区

      这里我理解的是newData里面就是一帧数据,但我测试了一下不对(晕)。

      4、总结

      这几天一直想要尽快做出来,却总没有办法深入去研究视频方面的东西。比如得到的包怎么变为一帧,怎么从一帧里面提取需要的数据,什么PPS、SPS、IDR都是什么,虽然知道名词,但总没法很明确的说出来。

      我的解码思路是:RTP协议收到包后(这一步没有问题),将包的数据转为帧(这个地方可能出问题了,也可能是传过来的帧数据不符合解码的要求),再把一帧的数据传给H264解码类解码,解码后输出的是YUV,YUV->RGB->图片进行显示就可以了。这是我的思路,但没有成功。如果读者您懂这一方面,还希望给我指导。谢谢

      每天写一点点,就能进步一点点.

      晚上更新:

      H264起始码有时是0x00000001,有时是0x000001,这两种的区别是:一共有两种起始码:3字节的0x000001和4字节的0x00000001,3字节的0x000001只有一种场合下使用,就是一个完整的帧被编为多个slice的时候,包含这些slice的nalu使用3字节起始码。其余场合都是4字节的。而海思的解码库中说的很清楚,只能解0x000001起始码的nalu,而我测试的都是0x00000001四个字节的,所以这方面可能出了点问题。哎,基础只是不好就是容易出现错误。使用VLC.NET开源可以解决RTP发送的H264码流,明天进行总结.

      

  • 相关阅读:
    PAT L3-021 神坛
    2019.07.08【NOIP提高组】模拟 A 组 总结
    2019.07.06【NOIP提高组】模拟 A 组 总结
    2019.07.05【NOIP提高组】模拟 A 组 总结
    jzoj 1287. 躲雨
    jzoj 4614. 【NOIP2016模拟7.12】字符串
    jzoj 3317. 【BOI2013】管道
    2019.07.04【NOIP提高组】模拟 A 组
    jzoj 3316. 【BOI2013】非回文数字
    jzoj 4616. 【NOI2016模拟7.12】二进制的世界
  • 原文地址:https://www.cnblogs.com/kui0621/p/4442873.html
Copyright © 2011-2022 走看看