QT +FFMPEG4.3 拉取 RTMP/http-flv 流播放
Cc_Video_thread.h
#ifndef CC_VIDEO_THREAD_H
#define CC_VIDEO_THREAD_H
#include <QThread>
#include <QAtomicInt>
#include <QImage>
#ifdef __cplusplus
extern "C"
{
#endif
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libavcodec/avcodec.h>
#include <libavutil/frame.h>
#include <libavutil/mem.h>
#include <libavutil/dict.h>
#include <libavutil/imgutils.h>
#ifdef __cplusplus
}
#endif
class Cc_Video_thread : public QThread
{
Q_OBJECT
public:
Cc_Video_thread();
~Cc_Video_thread();
void run() override;
/**
* @brief open stream and decode
* @param addr
*/
int open(const QString &addr);
/**
* @brief exit
*/
void exit();
void packer_to_qimage();
/**
* @brief custom_interrupt_callback
* @return
*/
static int custom_interrupt_callback(void *){
// LOG(ERROR) <<"[ERROR]:OUT TIME..."<<std::endl;
return 0;
};
signals:
void sendimage(QImage img);
private:
QAtomicInt exit_state_ = 1; // thread exit state
QAtomicInt save_state_ = 1; //
QAtomicInt open_state_ = -1;
AVCodec* codec_;
AVPacket* packet_;
AVStream * video_st;
AVFrame* yuv_frame_;
AVFrame* pFrameRGB;
AVCodecContext* codecContext;
AVFormatContext* format_context_;
AVCodecParameters *codecParam;
SwsContext* y2r_sws_context_;
int video_stream_index_ = 0;
int audio_stream_index_ = 0;
int video_frame_size = 0 ;
int audio_frame_size = 0;
int video_frame_count = 0;
int audio_frame_count = 0;
uint8_t *m_OutBuffer;
};
#endif // CC_VIDEO_THREAD_H
Cc_Video_thread.cpp
#include "cc_video_thread.h"
#include <string>
#include "glog/logging.h"
Cc_Video_thread::Cc_Video_thread()
{
start();
}
Cc_Video_thread::~Cc_Video_thread()
{
exit_state_ = 0;
av_packet_free(&packet_);
av_frame_free(&pFrameRGB);
av_frame_free(&yuv_frame_);
avformat_close_input(&format_context_);
}
int Cc_Video_thread::open(const QString &addr)
{
if(open_state_ == 1){
return -1;
}
if(addr.isEmpty()){
LOG(ERROR) << "[ERROR] addr is empty... ";
return -1;
}
// init ffmpeg and open stream
LOG(INFO) << avcodec_configuration() << std::endl;
format_context_ = avformat_alloc_context();
format_context_->interrupt_callback.callback=custom_interrupt_callback;
format_context_->interrupt_callback.opaque=this;
codecContext=avcodec_alloc_context3(nullptr);
packet_ = av_packet_alloc();
yuv_frame_=av_frame_alloc();
pFrameRGB = av_frame_alloc();
avformat_network_init(); // init net work
AVDictionary *options = nullptr;
av_dict_set(&options,"rtsp_transport", "tcp", 0);
av_dict_set(&options,"stimeout","10000",0);
// 设置“buffer_size”缓存容量
av_dict_set(&options, "buffer_size", "1024000", 0);
int ret = avformat_open_input(&format_context_,addr.toStdString().c_str(),NULL,&options);
if(ret< 0 )
{
LOG(ERROR) <<"[ERROR]:avformat_open_input FAIL..."<<std::endl;
return ret;
}
//从媒体文件中读包进而获取流消息
if(avformat_find_stream_info(format_context_,nullptr)<0)
{
LOG(ERROR) <<"[ERROR]:avformat_find_stream_info FAIL..."<<std::endl;
return -1;
}
//打印
av_dump_format(format_context_,0,addr.toStdString().c_str(),0);
for(unsigned int i=0;i<format_context_->nb_streams;i++)
{
video_st=format_context_->streams[i];
//筛选视频流和音频流
if(video_st->codecpar->codec_type==AVMEDIA_TYPE_VIDEO){
video_stream_index_=i;
}
if(video_st->codecpar->codec_type==AVMEDIA_TYPE_AUDIO){
audio_stream_index_=i;
}
}
codecParam=format_context_->streams[video_stream_index_]->codecpar; //获取编解码器的参数集
codec_= const_cast<AVCodec*>(avcodec_find_decoder(codecParam->codec_id)); //获取编解码器
if(NULL == codec_){
LOG(ERROR)<<"获取编解码器 fail";
return -1;
}
codecContext=avcodec_alloc_context3(nullptr); //获取编解码上下文
avcodec_parameters_to_context(codecContext,codecParam);
if( avcodec_open2(codecContext,codec_,nullptr)!=0){
avcodec_free_context(&codecContext);
LOG(ERROR)<<"Error : can`t open codec";
return -1;
}
//构造一个格式转换上下文
y2r_sws_context_=sws_getContext(codecParam->width,codecParam->height,(AVPixelFormat)codecParam->format,codecParam->width,codecParam->height,AV_PIX_FMT_RGB32,SWS_BICUBIC,NULL,NULL,NULL);
int bytes = av_image_get_buffer_size(AV_PIX_FMT_RGB32, codecContext->width, codecContext->height,4);
m_OutBuffer = (uint8_t *)av_malloc(bytes * sizeof(uint8_t));
avpicture_fill((AVPicture *)pFrameRGB, m_OutBuffer, AV_PIX_FMT_RGB32, codecContext->width, codecContext->height);
open_state_ = 1;
return 1;
}
void Cc_Video_thread::run()
{
while(true){
if(exit_state_ != 1){
break;
}
if(open_state_ == 1)
{ //6.读取数据包
int ret=av_read_frame(format_context_,packet_);
if(ret<0)break;
char output[1024];
if(packet_->stream_index==video_stream_index_){
video_frame_size+=packet_->size;
memset(output,0,1024);
sprintf(output,"recv %5d video frame %5d-%5d\n", ++video_frame_count, packet_->size, video_frame_size);
LOG(INFO) << output;
ret =avcodec_send_packet(codecContext,packet_);//送packet中H264数据给解码器码器进行解码,解码好的YUV数据放在pInCodecCtx,
if(ret!=0)
{
LOG(ERROR)<<"send packet error code is " <<ret;
break;
}
av_packet_unref(packet_);
ret = avcodec_receive_frame(codecContext,yuv_frame_);//把解码好的YUV数据放到pFrame中
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
break;
else if (ret < 0) {
LOG(ERROR)<<"Error during decoding\n";
break;
}
ret = sws_scale(y2r_sws_context_,yuv_frame_->data,yuv_frame_->linesize,0
,codecParam->height,pFrameRGB->data,pFrameRGB->linesize);
if(ret <= 0){
LOG(ERROR) << "ERROR to rgb....";
}
// 转换到QImage
QImage tmmImage((uchar *)m_OutBuffer, codecContext->width, codecContext->height, QImage::Format_RGB32);
emit sendimage(tmmImage.copy());
}
if(packet_->stream_index==audio_stream_index_){
audio_frame_size+=packet_->size;
memset(output,0,1024);
sprintf(output,"recv %5d audio frame %5d-%5d\n", ++audio_frame_count, packet_->size, audio_frame_size);
LOG(INFO) << output;
}
}
if(open_state_ != 1){
LOG_EVERY_N(INFO, 100) << "thread id is :"<< currentThreadId() <<"open_state_:" <<open_state_<<" run...";
msleep(100);
}
}
}
void Cc_Video_thread::exit()
{
open_state_ = -1;
exit_state_ = 0;
}