zoukankan      html  css  js  c++  java
  • FFMPEG ffmpeg + cocos2d video test

    //核心

    {

    void AVdio::update(float delta)
    {
    	auto video = (Sprite*)(this->getChildByName("video"));
    	if (video != nullptr)
    	{
    		double      tims = fl->_pts * fl->_timeBase * 1000;
    		double      elsped = _timestamp.getElapsedTimeInMilliSec();
    		double      sleeps = (tims - elsped);
    
    		s = sleeps + elsped;
    
    		if (elsped - s > 0.0001)
    		{
    			is = ffReader.readFrame(*fl);
    			if (is)
    			{
    				texture->initWithData((const unsigned char*)fl->_data, fl->_dataSize, Texture2D::PixelFormat::RGB888,
    					fl->_width, fl->_height,
    					Size(fl->_width, fl->_height));
    
    				video->initWithTexture(texture);
    				video->setContentSize(Size(displayex.width - 10, displayex.height - 50));
    			}
    			else
    			{
    				video->removeFromParent();
    			}
    		}
    	}
    
    }
    

    }

    .h

    {

    #ifndef __AVDIO_H__
    #define __AVDIO_H__
    
    #include "cocos2d.h"
    using namespace cocos2d;
    #include <string.h>
    #include <iostream>
    #include "cocos/ui/CocosGUI.h"
    #include "socket/Private/ThreadPool.h"
    #include "Tools/Tools.h"
    #include "xml/XML.h"
    #include "iconv/UTF8.h"
    #ifdef _WIN32
    #include "ATBAudioEngine/ATBAudioEngine.h"
    #endif
    using namespace std;
    using namespace ui;
    
    
    class AVdio :public LayerColor, EditBoxDelegate
    {
    	Tools m_Tools;
    public:
    
    	static cocos2d::LayerColor* createAVdio();
    
    	~AVdio();
    
    	virtual bool init();
    
    	CREATE_FUNC(AVdio);
    
    
    
    	void OnCallback(cocos2d::Ref* pSender);
    	virtual bool onTouchBegan(Touch *touch, Event *unused_event) override;
    	virtual void update(float delta);
    
    	virtual void editBoxEditingDidBegin(EditBox* editBox)override;
    	CC_DEPRECATED_ATTRIBUTE virtual void editBoxEditingDidEnd(EditBox* editBox)override;
    	virtual void editBoxTextChanged(EditBox* editBox, const std::string& text)override;
    	virtual void editBoxReturn(EditBox* editBox)override;
    	virtual void editBoxEditingDidEndWithAction(EditBox* editBox, EditBoxDelegate::EditBoxEndAction action)override;
    private:
    	void initVideoStream(string filename);
    	void initInput();
    };
    
    
    
    
    
    
    
    
    
    extern "C"
    {
    #include <libavutil/imgutils.h>
    #include <libavutil/parseutils.h>
    #include <libswscale/swscale.h>
    #include <libavcodec/avcodec.h>
    #include <libavformat/avformat.h>
    #include <libavformat/avio.h>
    #include <libavutil/file.h>
    #include <libavdevice/avdevice.h>
    }
    
    struct  FrameInfor
    {
    	void*   _data;
    	int     _dataSize;
    	int     _width;
    	int     _height;
    	int64_t _pts;
    	double  _timeBase;
    
    };
    class   FFVideoReader
    {
    public:
    	AVFormatContext*_formatCtx;
    	int             _videoIndex;
    	AVCodecContext* _codecCtx;
    	AVCodec*        _codec;
    	AVFrame*        _frame;
    	AVFrame*        _frameRGB;
    	SwsContext*     _convertCtx;
    public:
    	int             _screenW;
    	int             _screenH;
    
    	int             _imageSize;
    public:
    	FFVideoReader()
    	{
    		_formatCtx = 0;
    		_videoIndex = -1;
    		_codecCtx = 0;
    		_codec = 0;
    		_frame = 0;
    		_frameRGB = 0;
    		_convertCtx = 0;
    		_screenW = 0;
    		_screenH = 0;
    
    	}
    
    	~FFVideoReader()
    	{
    		sws_freeContext(_convertCtx);
    		av_free(_frameRGB);
    		av_free(_frame);
    		avcodec_close(_codecCtx);
    		avformat_close_input(&_formatCtx);
    	}
    
    	void    setup()
    	{
    		av_register_all();
    		_formatCtx = avformat_alloc_context();
    	}
    	int     load(const char* filepath = "11.flv")
    	{
    		int     ret = 0;
    
    		//! 打开文件
    		if (avformat_open_input(&_formatCtx, filepath, NULL, NULL) != 0)
    		{
    			return -1;
    		}
    		//! 检测文件中是否存在数据流
    		if (avformat_find_stream_info(_formatCtx, NULL) < 0)
    		{
    			printf("检测文件中是否存在数据流");
    			return -1;
    		}
    		//! 获取视频流索引
    		_videoIndex = -1;
    		for (int i = 0; i < _formatCtx->nb_streams; i++)
    		{
    			if (_formatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
    			{
    				_videoIndex = i;
    				break;
    			}
    		}
    		/**
    		*   没有视频流,则返回
    		*/
    		if (_videoIndex == -1)
    		{
    			return -1;
    		}
    		_codecCtx = _formatCtx->streams[_videoIndex]->codec;
    
    		double dur = _formatCtx->duration / double(AV_TIME_BASE);
    		_codec = avcodec_find_decoder(_codecCtx->codec_id);
    		if (_codec == NULL)
    		{
    			printf("find decoder faild !!
    ");
    			return -1;
    		}
    		/**
    		*   打开解码器
    		*/
    		if (avcodec_open2(_codecCtx, _codec, NULL) < 0)
    		{
    			return -1;
    		}
    		_frame = av_frame_alloc();
    		_frameRGB = av_frame_alloc();
    
    		_screenW = _codecCtx->width;
    		_screenH = _codecCtx->height;
    
    		_convertCtx = sws_getContext(
    			_codecCtx->width
    			, _codecCtx->height
    			, _codecCtx->pix_fmt
    			, _codecCtx->width
    			, _codecCtx->height
    			, AV_PIX_FMT_RGB24
    			, SWS_BICUBIC
    			, NULL
    			, NULL
    			, NULL
    		);
    
    		int     numBytes = avpicture_get_size(AV_PIX_FMT_RGB24, _codecCtx->width, _codecCtx->height);
    		uint8_t*buffer = (uint8_t *)av_malloc(numBytes * sizeof(uint8_t));
    		avpicture_fill((AVPicture *)_frameRGB, buffer, AV_PIX_FMT_RGB24, _codecCtx->width, _codecCtx->height);
    		_imageSize = numBytes;
    		return  0;
    	}
    
    	bool    readFrame(FrameInfor& infor)
    	{
    		AVPacket packet;
    		av_init_packet(&packet);
    		for (;;)
    		{
    			if (av_read_frame(_formatCtx, &packet))
    			{
    				av_free_packet(&packet);
    				return false;
    			}
    			if (packet.stream_index != _videoIndex)
    			{
    				continue;
    			}
    			int frame_finished = 0;
    
    			int res = avcodec_decode_video2(_codecCtx, _frame, &frame_finished, &packet);
    
    			if (frame_finished)
    			{
    				AVStream*   streams = _formatCtx->streams[_videoIndex];
    				double      tmbase = av_q2d(streams->time_base);
    				int64_t     pts = _frame->pts;
    
    				char        buf[128];
    				sprintf(buf, "pts = %I64d     dts =  %I64d
    ", packet.pts, packet.dts);
    				int res = sws_scale(
    					_convertCtx
    					, (const uint8_t* const*)_frame->data
    					, _frame->linesize
    					, 0
    					, _codecCtx->height
    					, _frameRGB->data
    					, _frameRGB->linesize
    				);
    				av_packet_unref(&packet);
    
    				infor._data = _frameRGB->data[0];
    				infor._dataSize = _imageSize;
    				infor._width = _screenW;
    				infor._height = _screenH;
    				infor._pts = _frame->pts;
    				infor._timeBase = av_q2d(streams->time_base);
    
    				return  true;
    			}
    		}
    		return  false;
    	}
    	void*   readFrame()
    	{
    		AVPacket packet;
    		av_init_packet(&packet);
    		for (;;)
    		{
    			if (av_read_frame(_formatCtx, &packet))
    			{
    				av_free_packet(&packet);
    				return 0;
    			}
    			if (packet.stream_index != _videoIndex)
    			{
    				continue;
    			}
    			int frame_finished = 0;
    
    			int res = avcodec_decode_video2(_codecCtx, _frame, &frame_finished, &packet);
    
    			if (frame_finished)
    			{
    				AVStream*   streams = _formatCtx->streams[_videoIndex];
    				double      tmbase = av_q2d(streams->time_base);
    				int64_t     pts = _frame->pts;
    
    				char        buf[128];
    				sprintf(buf, "pts = %I64d     dts =  %I64d
    ", packet.pts, packet.dts);
    				int res = sws_scale(
    					_convertCtx
    					, (const uint8_t* const*)_frame->data
    					, _frame->linesize
    					, 0
    					, _codecCtx->height
    					, _frameRGB->data
    					, _frameRGB->linesize
    				);
    				av_packet_unref(&packet);
    
    				return  _frameRGB->data[0];
    			}
    		}
    		return  0;
    	}
    };
    
    
    
    #include <windows.h>
    
    class Timestamp
    {
    public:
    	Timestamp()
    	{
    		QueryPerformanceFrequency(&_frequency);
    		QueryPerformanceCounter(&_startCount);
    	}
    	~Timestamp()
    	{}
    
    	void    update()
    	{
    		QueryPerformanceCounter(&_startCount);
    	}
    	/**
    	*   获取当前秒
    	*/
    	double getElapsedSecond()
    	{
    		return  getElapsedTimeInMicroSec() * 0.000001;
    	}
    	/**
    	*   获取毫秒
    	*/
    	double getElapsedTimeInMilliSec()
    	{
    		return this->getElapsedTimeInMicroSec() * 0.001;
    	}
    	/**
    	*   获取微妙
    	*/
    	double getElapsedTimeInMicroSec()
    	{
    		LARGE_INTEGER endCount;
    		QueryPerformanceCounter(&endCount);
    
    		double  startTimeInMicroSec = _startCount.QuadPart * (1000000.0 / _frequency.QuadPart);
    		double  endTimeInMicroSec = endCount.QuadPart * (1000000.0 / _frequency.QuadPart);
    
    		return  endTimeInMicroSec - startTimeInMicroSec;
    	}
    protected:
    	LARGE_INTEGER   _frequency;
    	LARGE_INTEGER   _startCount;
    };
    
    
    
    
    
    #endif
    

    }

    .cpp

    {

    #include "AVdio.h"
    
    #ifndef DISPLAY
    #define DISPLAY Director::getInstance()->getVisibleSize()
    #endif
    #ifndef displayex
    #define displayex Director::getInstance()->getVisibleSize()
    #endif
    
    
    FFVideoReader   ffReader;
    FrameInfor *fl = new (std::nothrow) FrameInfor;
    auto texture = new (std::nothrow) Texture2D();
    bool is = false;
    Timestamp       _timestamp;
    double s = 0;
    
    
    cocos2d::LayerColor* AVdio::createAVdio()
    {
    	auto LC = AVdio::create();
    	if (LC)
    	{
    		return LC;
    	}
    	else
    	{
    		return nullptr;
    	}
    }
    
    AVdio::~AVdio()
    {
    
    }
    
    void AVdio::update(float delta)
    {
    	auto video = (Sprite*)(this->getChildByName("video"));
    	if (video != nullptr)
    	{
    		double      tims = fl->_pts * fl->_timeBase * 1000;
    		double      elsped = _timestamp.getElapsedTimeInMilliSec();
    		double      sleeps = (tims - elsped);
    
    		s = sleeps + elsped;
    
    		if (elsped - s > 0.0001)
    		{
    			is = ffReader.readFrame(*fl);
    			if (is)
    			{
    				texture->initWithData((const unsigned char*)fl->_data, fl->_dataSize, Texture2D::PixelFormat::RGB888,
    					fl->_width, fl->_height,
    					Size(fl->_width, fl->_height));
    
    				video->initWithTexture(texture);
    				video->setContentSize(Size(displayex.width - 10, displayex.height - 50));
    			}
    			else
    			{
    				video->removeFromParent();
    			}
    		}
    	}
    
    }
    
    bool AVdio::init()
    {
    	if (!LayerColor::init())
    	{
    		return false;
    	}
    
    	auto display = Director::getInstance()->getVisibleSize();
    	Vec2 origin = Director::getInstance()->getVisibleOrigin();
    
    	this->setTouchEnabled(true);
    	auto ELTOBO = EventListenerTouchOneByOne::create();
    	ELTOBO->setSwallowTouches(true);
    	ELTOBO->onTouchBegan = std::move(std::bind(&AVdio::onTouchBegan, this, std::placeholders::_1, std::placeholders::_2));
    	this->getEventDispatcher()->addEventListenerWithSceneGraphPriority(ELTOBO, this);
    
    
    
    
    	auto background = cocos2d::LayerColor::create(cocos2d::Color4B(0, 0, 0, 255));
    	this->addChild(background);
    
    	//close button
    	string file("res/Button.png");
    	auto btn = cocos2d::ui::Button::create(file, file, file);
    	btn->setColor(Color3B(24, 48, 64));
    	btn->setPressedActionEnabled(true);
    	btn->setScale9Enabled(true);
    	btn->setContentSize(Size(100, 50));
    	btn->setPosition(Vec2(displayex.width - btn->getContentSize().width / 2,
    		displayex.height - btn->getContentSize().height / 2));
    	btn->setTitleColor(Color3B::RED);
    	btn->setTitleFontSize(50);
    	btn->setName("X");
    	btn->setTitleText("X");
    	btn->addClickEventListener(std::bind(&AVdio::OnCallback, this, std::placeholders::_1));
    	this->addChild(btn);
    
    
    	//this->initVideoStream();
    
    	this->initInput();
    }
    
    void AVdio::initInput()
    {
    	auto edit = ui::EditBox::create(Size(800, 50), "res/input.png");
    	edit->setPosition(Vec2(0, displayex.height - edit->getContentSize().height / 2));
    	edit->setAnchorPoint(Vec2(0.0f, 0.5f));
    	edit->setName("videoPath");
    	edit->setFontColor(Color4B::YELLOW);
    	edit->setDelegate(this);
    	this->addChild(edit);
    
    	string file("res/Button.png");
    	auto btn = cocos2d::ui::Button::create(file, file, file);
    	btn->setColor(Color3B(24, 48, 64));
    	btn->setPressedActionEnabled(true);
    	btn->setScale9Enabled(true);
    	btn->setContentSize(Size(150, 50));
    	btn->setPosition(Vec2(displayex.width - (displayex.width - edit->getContentSize().width) + btn->getContentSize().width / 2,
    		edit->getPosition().y));
    	btn->setTitleColor(Color3B::RED);
    	btn->setTitleFontSize(50);
    	btn->setName("PLAY");
    	btn->setTitleText("PLAY");
    	btn->addClickEventListener(std::bind(&AVdio::OnCallback, this, std::placeholders::_1));
    	this->addChild(btn);
    }
    
    void AVdio::initVideoStream(string filename)
    {
    	this->unscheduleUpdate();
    
    	_timestamp.update();
    	
    	
    
    	ATBAE::GetInstance()->PauseAllMusicAndEffects();
    	auto video = (Sprite*)(this->getChildByName("video"));
    	if (video != nullptr)
    	{
    		video->removeFromParent();
    	}
    
    	s = 0;
    
    	auto display = Director::getInstance()->getVisibleSize();
    	Vec2 origin = Director::getInstance()->getVisibleOrigin();
    
    	auto ret = avdevice_version();
    	std::cout << ret << std::endl;
    
    	ffReader.~FFVideoReader();
    	ffReader.setup();
    	ffReader.load(filename.c_str());
    
    	ATBAE::GetInstance()->LoadMusicsAndPlay(filename.c_str());
    
    
    	is = ffReader.readFrame(*fl);
    
    	texture->initWithData((const unsigned char*)fl->_data, fl->_dataSize, Texture2D::PixelFormat::RGB888,
    		fl->_width - 1, fl->_height - 1,
    		Size(500, 500));
    
    	auto s = Sprite::create("06a03.jpg");
    	s->initWithTexture(texture);
    	s->setName("video");
    	s->setPosition(Vec2(display.width / 2, display.height / 2 - 25));
    	this->addChild(s);
    
    	this->scheduleUpdate();
    }
    
    
    void AVdio::OnCallback(cocos2d::Ref* pSender)
    {
    	string name = ((Node*)(pSender))->getName();
    	if (name == "X")
    	{
    		auto action = Sequence::create(MoveTo::create(0.2f, Vec3(-(displayex.width), 0, 0)),
    			DelayTime::create(0.1f),
    			CallFunc::create([=]()
    		{
    			this->removeFromParent();
    			ATBAE::GetInstance()->PauseAllMusicAndEffects();
    		}), nullptr);
    		this->runAction(action);
    	}
    	else if (name == "PLAY")
    	{
    		auto edit = (ui::EditBox*)this->getChildByName("videoPath");
    		string path = edit->getText();
    		if (path.length() > 0)
    		{
    			this->initVideoStream(path);
    		}
    		
    	}
    }
    bool AVdio::onTouchBegan(Touch *touch, Event *unused_event)
    {
    	return true;
    }
    
    
    void AVdio::editBoxEditingDidBegin(EditBox* editBox)
    {
    
    }
    void AVdio::editBoxEditingDidEnd(EditBox* editBox)
    {
    
    }
    void AVdio::editBoxTextChanged(EditBox* editBox, const std::string& text)
    {
    
    }
    void AVdio::editBoxReturn(EditBox* editBox)
    {
    
    }
    void AVdio::editBoxEditingDidEndWithAction(EditBox* editBox, EditBoxDelegate::EditBoxEndAction action)
    {
    
    }
    

    }

  • 相关阅读:
    反向映射和写时复制
    内存分析的好blog
    minicom使用
    tee的妙用
    网络带宽
    mem analyse
    linux 应用层常用API/命令
    ubuntu 库依赖问题
    Python基础学习笔记(一:hello world)
    第7章 取消与关闭
  • 原文地址:https://www.cnblogs.com/YZFHKMS-X/p/13193301.html
Copyright © 2011-2022 走看看