zoukankan      html  css  js  c++  java
  • Windows下利用live555实现H264实时流RTSP发送

    文如其名,最近在做的项目要求利用RTSP协议转发处理完的H264视频数据给上一层客户端,环境是Windows的VS2013,于是就各种百度谷歌找代码。结果在得到利用live555去做比较简单的结论的同时也悲情地发现,网上别人贴出来的代码基本都是Linux上面的。在修改了两份来适用于Windows无效后,又一次陷入了百度谷歌的无尽搜索中。Anyway,最后终于解决了,所以贴出代码跟大家分享下,希望能给和我需求相似的童鞋一点启发,也希望有高手指正其中的问题。

    用live555进行RTSP的播放基本上是通过修改其给出来的播放本地文件的DEMO来实现的。但由于其DEMO封装的比较深,所以要直接修改他的fread处的代码变成内存拷贝来实现实时传输会显得比较别扭。本文参考了网上的一些代码,自定义了一个继承自H264VideoFileServerMediaSubsession的类来来进行处理,同时定义了一个继承自FramedSource的类来做内存的拷贝操作,该类亦是区别于读本地文件和实时流之紧要处。

    一口气杂七杂八说了好多,下面贴出代码吧。如果觉得需要或者懒得自己搭建live555的环境亦可以在文中最后的链接中下载该工程(环境为VS2013),如果你的VS版本合适即可直接运行。

    主文件(程序入口)

    #include "H264LiveVideoServerMediaSubssion.hh"
    #include "H264FramedLiveSource.hh"
    #include "liveMedia.hh"
    #include "BasicUsageEnvironment.hh"
    
    #define BUFSIZE 1024*200
    
    static void announceStream(RTSPServer* rtspServer, ServerMediaSession* sms,char const* streamName)//显示RTSP连接信息
    {
    	char* url = rtspServer->rtspURL(sms);
    	UsageEnvironment& env = rtspServer->envir();
    	env <<streamName<< "
    ";
    	env << "Play this stream using the URL "" << url << ""
    ";
    	delete[] url;
    }
    
    int main(int argc, char** argv) 
    {
    	//设置环境
    	UsageEnvironment* env;
    	Boolean reuseFirstSource = False;//如果为“true”则其他接入的客户端跟第一个客户端看到一样的视频流,否则其他客户端接入的时候将重新播放
    	TaskScheduler* scheduler = BasicTaskScheduler::createNew();
    	env = BasicUsageEnvironment::createNew(*scheduler);
    
    	//创建RTSP服务器
    	UserAuthenticationDatabase* authDB = NULL;
    	RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554, authDB);
    	if (rtspServer == NULL) {
    		*env << "Failed to create RTSP server: " << env->getResultMsg() << "
    ";
    		exit(1);
    	}
    	char const* descriptionString= "Session streamed by "testOnDemandRTSPServer"";
    
    	//模拟实时流发送相关变量
    	int datasize;//数据区长度
    	unsigned char*  databuf;//数据区指针
    	databuf = (unsigned char*)malloc(1024*1024);
    	bool dosent;//rtsp发送标志位,为true则发送,否则退出
    
    	//从文件中拷贝1M数据到内存中作为实时网络传输内存模拟,如果实时网络传输应该是双线程结构,记得在这里加上线程锁
    	//此外实时传输的数据拷贝应该是发生在H264FramedLiveSource文件中,所以这里只是自上往下的传指针过去给它
    	FILE *pf;
    	fopen_s(&pf, "test.264", "rb");
    	fread(databuf, 1, BUFSIZE, pf);
    	datasize = BUFSIZE;
    	dosent = true;
    	fclose(pf);
    
    	//上面的部分除了模拟网络传输的部分外其他的基本跟live555提供的demo一样,而下面则修改为网络传输的形式,为此重写addSubsession的第一个参数相关文件
    	char const* streamName = "h264ESVideoTest";
    	ServerMediaSession* sms = ServerMediaSession::createNew(*env, streamName, streamName,descriptionString);
    	sms->addSubsession(H264LiveVideoServerMediaSubssion::createNew(*env, reuseFirstSource, &datasize, databuf,&dosent));//修改为自己实现的H264LiveVideoServerMediaSubssion
    	rtspServer->addServerMediaSession(sms);
    
    	announceStream(rtspServer, sms, streamName);//提示用户输入连接信息
    	env->taskScheduler().doEventLoop(); //循环等待连接
    
    	free(databuf);//释放掉内存
    	return 0;
    }
    

    自定义H264VideoFileServerMediaSubsession

    H264VideoFileServerMediaSubsession.hh

    #ifndef _H264_LIVE_VIDEO_SERVER_MEDIA_SUBSESSION_HH
    #define _H264_LIVE_VIDEO_SERVER_MEDIA_SUBSESSION_HH
    #include "H264VideoFileServerMediaSubsession.hh"
    
    class H264LiveVideoServerMediaSubssion : public H264VideoFileServerMediaSubsession {
    
    public:
    	static H264LiveVideoServerMediaSubssion* createNew(UsageEnvironment& env, Boolean reuseFirstSource, int *datasize, unsigned char*  databuf, bool *dosent);
    
    protected: // we're a virtual base class
    	H264LiveVideoServerMediaSubssion(UsageEnvironment& env, Boolean reuseFirstSource, int *datasize, unsigned char*  databuf, bool *dosent);
    	~H264LiveVideoServerMediaSubssion();
    
    protected: // redefined virtual functions
    	FramedSource* createNewStreamSource(unsigned clientSessionId,unsigned& estBitrate);
    public:
    	char fFileName[100];
    
    	int *Server_datasize;//数据区大小指针
    	unsigned char*  Server_databuf;//数据区指针
    	bool *Server_dosent;//发送标示
    };
    #endif
    H264VideoFileServerMediaSubsession.cpp

    #include "H264LiveVideoServerMediaSubssion.hh"
    #include "H264FramedLiveSource.hh"
    #include "H264VideoStreamFramer.hh"
    
    H264LiveVideoServerMediaSubssion* H264LiveVideoServerMediaSubssion::createNew(UsageEnvironment& env, Boolean reuseFirstSource, int *datasize, unsigned char*  databuf, bool *dosent)
    {
    	return new H264LiveVideoServerMediaSubssion(env, reuseFirstSource, datasize, databuf, dosent);
    }
    
    H264LiveVideoServerMediaSubssion::H264LiveVideoServerMediaSubssion(UsageEnvironment& env, Boolean reuseFirstSource, int *datasize, unsigned char*  databuf, bool *dosent)
    : H264VideoFileServerMediaSubsession(env, fFileName, reuseFirstSource)//H264VideoFileServerMediaSubsession不是我们需要修改的文件,
    																	  //但是我们又要用它来初始化我们的函数,
    																	  //所以给个空数组进去即可
    {
    	Server_datasize = datasize;//数据区大小指针
    	Server_databuf = databuf;//数据区指针
    	Server_dosent = dosent;//发送标示
    }
    
    H264LiveVideoServerMediaSubssion::~H264LiveVideoServerMediaSubssion()
    {
    }
    
    FramedSource* H264LiveVideoServerMediaSubssion::createNewStreamSource(unsigned clientSessionId, unsigned& estBitrate)
    {
    	/* Remain to do : assign estBitrate */
    	estBitrate = 1000; // kbps, estimate
    
    	//创建视频源
    	H264FramedLiveSource* liveSource = H264FramedLiveSource::createNew(envir(), Server_datasize, Server_databuf, Server_dosent);
    	if (liveSource == NULL)
    	{
    		return NULL;
    	}
    
    	// Create a framer for the Video Elementary Stream:
    	return H264VideoStreamFramer::createNew(envir(), liveSource);
    }

    自定义H264FramedLiveSource

    H264FramedLiveSource.hh

    #ifndef _H264FRAMEDLIVESOURCE_HH
    #define _H264FRAMEDLIVESOURCE_HH
    
    #include <FramedSource.hh>
    
    
    class H264FramedLiveSource : public FramedSource
    {
    public:
    	static H264FramedLiveSource* createNew(UsageEnvironment& env, int *datasize, unsigned char*  databuf, bool *dosent, unsigned preferredFrameSize = 0, unsigned playTimePerFrame = 0);
    
    protected:
    	H264FramedLiveSource(UsageEnvironment& env, int *datasize, unsigned char*  databuf, bool *dosent, unsigned preferredFrameSize, unsigned playTimePerFrame);
    	~H264FramedLiveSource();
    
    private:
    	virtual void doGetNextFrame();
    	int TransportData(unsigned char* to, unsigned maxSize);
    
    protected:
    	int *Framed_datasize;//数据区大小指针
    	unsigned char *Framed_databuf;//数据区指针
    	bool *Framed_dosent;//发送标示
    
    	int readbufsize;//记录已读取数据区大小
    	int bufsizel;//记录数据区大小
    };
    
    #endif
    H264FramedLiveSource.cpp

    #include "H264FramedLiveSource.hh"
    
    H264FramedLiveSource::H264FramedLiveSource(UsageEnvironment& env, int *datasize, unsigned char*  databuf, bool *dosent, unsigned preferredFrameSize, unsigned playTimePerFrame)
    : FramedSource(env)
    {
    	Framed_datasize = datasize;//数据区大小指针
    	Framed_databuf = databuf;//数据区指针
    	Framed_dosent = dosent;//发送标示
    }
    
    H264FramedLiveSource* H264FramedLiveSource::createNew(UsageEnvironment& env, int *datasize, unsigned char*  databuf, bool *dosent, unsigned preferredFrameSize, unsigned playTimePerFrame)
    {
    	H264FramedLiveSource* newSource = new H264FramedLiveSource(env, datasize, databuf, dosent, preferredFrameSize, playTimePerFrame);
    	return newSource;
    }
    
    H264FramedLiveSource::~H264FramedLiveSource()
    {
    }
    
    void H264FramedLiveSource::doGetNextFrame()
    {
    	if (*Framed_dosent == true)
    	{
    		*Framed_dosent = false;
    		bufsizel = *Framed_datasize;
    		readbufsize = 0;
    
    		fFrameSize = fMaxSize;
    		memcpy(fTo, Framed_databuf + readbufsize, fFrameSize);
    		readbufsize += fFrameSize;
    	}
    	else
    	{
    		if (bufsizel - readbufsize>fMaxSize)
    		{
    			fFrameSize = fMaxSize;
    			memcpy(fTo, Framed_databuf + readbufsize, fFrameSize);
    			readbufsize += fFrameSize;
    		}
    		else
    		{
    			fFrameSize= bufsizel - readbufsize;//fFrameSize是会告诉live555发送数据区的大小,必须赋予正确的值			memcpy(fTo, Framed_databuf + readbufsize,fFrameSize);
    			*Framed_dosent = true;
    		}
    	}
    
    	nextTask() = envir().taskScheduler().scheduleDelayedTask(0,(TaskFunc*)FramedSource::afterGetting, this);//表示延迟0秒后再执行 afterGetting 函数
    	return;
    }
    


    工程下载地址:点击打开链接



  • 相关阅读:
    js难点之闭包理解
    rank() | dense_rank() | row_number() over(PARTITION BY sex order by age desc ) 的区别
    浏览器名称和版本判断
    Cookie与Session的初探
    ASP.NET基础系列
    ECharts使用
    java-保留x个小数位
    HttpServletResponse和HttpServletRequest
    java-socket通信
    java-序列化
  • 原文地址:https://www.cnblogs.com/weixinhum/p/3916676.html
Copyright © 2011-2022 走看看