一個基于 ?FFmpeg 4.x? 和 ?Qt? 的簡單視頻播放器代碼示例,實現視頻解碼和渲染到 Qt 窗口的功能。
1)ffmpeg庫界面,視頻解碼支持軟解和硬解方式。
2)QImage/QPixmap顯示視頻圖片。
?1. Qt 項目配置(.pro
?文件)
QT += core guigreaterThan(QT_MAJOR_VERSION, 4): QT += widgetsCONFIG += c++11INCLUDEPATH += $$PWD/ffmpeg-4.2.2-win32/include
LIBS += -L$$PWD/ffmpeg-4.2.2-win32/lib -lavcodec -lavformat -lavutil -lswscale# The following define makes your compiler emit warnings if you use
# any Qt feature that has been marked deprecated (the exact warnings
# depend on your compiler). Please consult the documentation of the
# deprecated API in order to know how to port your code away from it.
DEFINES += QT_DEPRECATED_WARNINGS# You can also make your code fail to compile if it uses deprecated APIs.
# In order to do so, uncomment the following line.
# You can also select to disable deprecated APIs only up to a certain version of Qt.
#DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000 # disables all the APIs deprecated before Qt 6.0.0SOURCES += \main.cpp \mainwindow.cpp \playimage.cpp \videodecode.cppHEADERS += \mainwindow.h \playimage.h \videodecode.hFORMS += \mainwindow.ui# Default rules for deployment.
qnx: target.path = /tmp/$${TARGET}/bin
else: unix:!android: target.path = /opt/$${TARGET}/bin
!isEmpty(target.path): INSTALLS += target
?2. 視頻解碼類?
?文件?videodecode.h
#ifndef VIDEODECODE_H
#define VIDEODECODE_H//視頻解碼類
#include <QString>
#include <QImage>
#include <thread>extern "C" {
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libswscale/swscale.h>
#include <libavutil/imgutils.h>
}//流類型
enum StreamType
{StreamType_Video = 0,StreamType_Audio = 1,StreamType_Text = 2,
};//格式類型
enum FormatType
{FormatType_RGB24 = 0,FormatType_RGB32 = 1,FormatType_YUV420 = 2,FormatType_YUV422 = 3,
};//文件狀態
enum FileStatus
{FileStatus_OverFileTail = 0, //達到文件尾FileStatus_OverFileHead = 1, //達到文件頭FileStatus_TrigeException = 2, //發生異常
};//流解碼回調函數
typedef void (*StreamDecodeCallback)(int nStreamType, int nFormatType, long long llDecodeTs, long long llPlayTs, int width, int height, unsigned char ** pStreamData, int * linesize, void * pUserData);//文件狀態回調函數
typedef void (*FileStatusCallback)(int FileStatus, int nErrorCode, void * pUserData);class VideoDecode
{
public:VideoDecode();~VideoDecode();public:void globalInit();//初始化ffmpeg庫(整個程序中只需加載一次)void globalUnInit();//反初始化ffmpeg庫(整個程序中只需加載一次)public:void setStreamDecodeCallback(StreamDecodeCallback funStreamDecodeCallback, void * userData);void setFileStatusCallback(FileStatusCallback funFileStatusCallback, void * userData);void setHWDecoder(bool flag); // 是否使用硬件解碼器bool isHWDecoder();bool open(const QString& url); // 打開媒體文件,或者流媒體rtmp、strp、httpvoid close(); // 關閉bool isClose();public:void decodeProccessThread(); //解碼線程static QImage ConvertRGB24FrameToQImage(unsigned char *data, int width, int height);protected:void initHWDecoder(const AVCodec *codec);bool dataCopy(); //硬件解碼完成需要將數據從GPU復制到CPUvoid freeDecode();qreal rationalToDouble(AVRational* rational);private:// FFmpeg 相關對象AVFormatContext *formatCtx = nullptr;AVCodecContext *codecCtx = nullptr;AVFrame *frame = nullptr, *rgbFrame = nullptr;AVFrame *frameHW = nullptr;SwsContext *swsCtx = nullptr;uchar* buffer = nullptr; // YUV圖像需要轉換位RGBA圖像,這里保存轉換后的圖形數據AVPacket* packet = nullptr;int videoStreamIndex = -1; // 視頻流索引qint64 totalTime = 0; // 視頻總時長qint64 totalFrames = 0; // 視頻總幀數qint64 obtainFrames = 0; // 視頻當前獲取到的幀數qint64 pts = 0; // 圖像幀的顯示時間qreal frameRate = 0; // 視頻幀率int width = 0; //視頻分辨率大小widthint height = 0; //視頻分辨率大小heightstd::vector<int> vecHWDeviceTypes; // 保存當前環境支持的硬件解碼器AVBufferRef* hw_device_ctx = nullptr; // 對數據緩沖區的引用bool hwDecoderFlag = false; // 記錄是否使用硬件解碼std::thread threadDecode;bool stopWorkFlag = true;StreamDecodeCallback funCallbackByStreamDecode = nullptr;void * userDataByStreamDecode = nullptr;FileStatusCallback funCallbackByFileStatus = nullptr;void * userDataByFileStatus = nullptr;
};#endif // VIDEODECODE_H
?文件?videodecode.cpp
#include "videodecode.h"
#include <QTime>
#include <QDebug>
#include <QStringList>
#include <chrono>/*********************************** FFmpeg獲取GPU硬件解碼幀格式的回調函數 *****************************************/
static enum AVPixelFormat g_pixelFormat;/*** @brief 回調函數,獲取GPU硬件解碼幀的格式* @param s* @param fmt* @return*/
AVPixelFormat get_hw_format(AVCodecContext* s, const enum AVPixelFormat* fmt)
{Q_UNUSED(s)const enum AVPixelFormat* p;for (p = fmt; *p != -1; p++){if(*p == g_pixelFormat){return *p;}}qDebug() << "無法獲取硬件表面格式."; // 當同時打開太多路視頻時,如果超過了GPU的能力,可能會返回找不到解碼幀格式return AV_PIX_FMT_NONE;
}
/************************************************ END ******************************************************/VideoDecode::VideoDecode()
{}VideoDecode::~VideoDecode()
{
}void VideoDecode::globalInit()
{// av_register_all(); // 已經從源碼中刪除/*** 初始化網絡庫,用于打開網絡流媒體,此函數僅用于解決舊GnuTLS或OpenSSL庫的線程安全問題。* 一旦刪除對舊GnuTLS和OpenSSL庫的支持,此函數將被棄用,并且此函數不再有任何用途。*/avformat_network_init();
}void VideoDecode::globalUnInit()
{avformat_network_deinit();
}qreal VideoDecode::rationalToDouble(AVRational* rational)
{qreal frameRate = (rational->den == 0) ? 0 : (qreal(rational->num) / rational->den);return frameRate;
}void VideoDecode::setStreamDecodeCallback(StreamDecodeCallback funStreamDecodeCallback, void * userData)
{funCallbackByStreamDecode = funStreamDecodeCallback;userDataByStreamDecode = userData;
}
void VideoDecode::setFileStatusCallback(FileStatusCallback funFileStatusCallback, void * userData)
{funCallbackByFileStatus = funFileStatusCallback;userDataByFileStatus = userData;
}//初始化硬件解碼器
void VideoDecode::initHWDecoder(const AVCodec *codec)
{if(!codec) return;for(int i = 0; ; i++){const AVCodecHWConfig* config = avcodec_get_hw_config(codec, i); // 檢索編解碼器支持的硬件配置。if(!config){qDebug() << "打開硬件解碼器失敗!";return; // 沒有找到支持的硬件配置}if (config->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX) // 判斷是否是設備類型{for(auto i : vecHWDeviceTypes){if(config->device_type == AVHWDeviceType(i)) // 判斷設備類型是否是支持的硬件解碼器{g_pixelFormat = config->pix_fmt;// 打開指定類型的設備,并為其創建AVHWDeviceContext。int ret = av_hwdevice_ctx_create(&hw_device_ctx, config->device_type, nullptr, nullptr, 0);if(ret < 0){freeDecode();return ;}qDebug() << "打開硬件解碼器:" << av_hwdevice_get_type_name(config->device_type);codecCtx->hw_device_ctx = av_buffer_ref(hw_device_ctx); // 創建一個對AVBuffer的新引用。codecCtx->get_format = get_hw_format; // 由一些解碼器調用,以選擇將用于輸出幀的像素格式return;}}}}
}//硬件解碼完成需要將數據從GPU復制到CPU
bool VideoDecode::dataCopy()
{if(frame->format != g_pixelFormat){av_frame_unref(frame);return false;}// av_hwframe_map處理速度比av_hwframe_transfer_data快(av_hwframe_map在ffmpeg3.3以后才有)int ret = av_hwframe_map(frameHW, frame, AV_HWFRAME_MAP_DIRECT); // 映射硬件數據幀/*av_hwframe_map 映射硬件數據幀,第3個參數值有三種類型:AV_HWFRAME_MAP_READ:目標幀可讀。AV_HWFRAME_MAP_WRITE:目標幀可寫。AV_HWFRAME_MAP_DIRECT:避免數據拷貝(依賴硬件支持)?。優先使用 AV_HWFRAME_MAP_DIRECT 減少內存拷貝開銷?。使用AV_HWFRAME_MAP_DIRECT時,你應該確保你的應用邏輯不會修改通過映射獲得的軟件幀內容,以避免不期望的副作用。使用AV_HWFRAME_MAP_READ時,你將獲得數據的一致性但可能會有性能上的損失。*/if(ret >= 0){//映射硬件數據幀成功frameHW->width = frame->width;frameHW->height = frame->height;}else{//映射硬件數據幀失敗ret = av_hwframe_transfer_data(frameHW, frame, 0); // 將解碼后的數據從GPU復制到CPU(frameHW) 比較耗時,但硬解碼速度比軟解碼快很多if(ret < 0){av_frame_unref(frame);return false;}av_frame_copy_props(frameHW, frame); // 僅將“metadata”字段從src復制到dst。}return true;
}void VideoDecode::setHWDecoder(bool flag)
{hwDecoderFlag = flag;
}
bool VideoDecode::isHWDecoder()
{return hwDecoderFlag;
}bool VideoDecode::open(const QString& url)
{if(url.isNull()) return false;AVHWDeviceType type = AV_HWDEVICE_TYPE_NONE; // ffmpeg支持的硬件解碼器QStringList strTypes;while ((type = av_hwdevice_iterate_types(type)) != AV_HWDEVICE_TYPE_NONE) // 遍歷支持的設備類型。{vecHWDeviceTypes.push_back(type);const char* ctype = av_hwdevice_get_type_name(type); // 獲取AVHWDeviceType的字符串名稱。if(ctype){strTypes.append(QString(ctype));}}qDebug() << "支持的硬件解碼器:";qDebug() << strTypes;AVDictionary* dict = nullptr;av_dict_set(&dict, "rtsp_transport", "tcp", 0); // 設置rtsp流使用tcp打開,如果打開失敗錯誤信息為【Error number -135 occurred】可以切換(UDP、tcp、udp_multicast、http),比如vlc推流就需要使用udp打開av_dict_set(&dict, "max_delay", "3", 0); // 設置最大復用或解復用延遲(以微秒為單位)。當通過【UDP】 接收數據時,解復用器嘗試重新排序接收到的數據包(因為它們可能無序到達,或者數據包可能完全丟失)。這可以通過將最大解復用延遲設置為零(通過max_delayAVFormatContext 字段)來禁用。av_dict_set(&dict, "timeout", "1000000", 0); // 以微秒為單位設置套接字 TCP I/O 超時,如果等待時間過短,也可能會還沒連接就返回了。// 打開輸入流并返回解封裝上下文int ret = avformat_open_input(&formatCtx, // 返回解封裝上下文url.toStdString().data(), // 打開視頻地址nullptr, // 如果非null,此參數強制使用特定的輸入格式。自動選擇解封裝器(文件格式)&dict); // 參數設置// 釋放參數字典if(dict){av_dict_free(&dict);}// 打開視頻失敗if(ret < 0){qDebug() << "Failed to avformat_open_input";return false;}// 讀取媒體文件的數據包以獲取流信息。ret = avformat_find_stream_info(formatCtx, nullptr);if(ret < 0){qDebug() << "Failed to avformat_find_stream_info";freeDecode();return false;}totalTime = formatCtx->duration / (AV_TIME_BASE / 1000); // 計算視頻總時長(毫秒)qDebug() << QString("視頻總時長:%1 ms,[%2]").arg(totalTime).arg(QTime::fromMSecsSinceStartOfDay(int(totalTime)).toString("HH:mm:ss zzz"));// 通過AVMediaType枚舉查詢視頻流ID(也可以通過遍歷查找),最后一個參數無用videoStreamIndex = av_find_best_stream(formatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, nullptr, 0);if(videoStreamIndex < 0){qDebug() << "Failed to av_find_best_stream";freeDecode();return false;}AVStream* videoStream = formatCtx->streams[videoStreamIndex]; // 通過查詢到的索引獲取視頻流// 獲取視頻圖像分辨率(AVStream中的AVCodecContext在新版本中棄用,改為使用AVCodecParameters)width = videoStream->codecpar->width;height = videoStream->codecpar->height;frameRate = rationalToDouble(&videoStream->avg_frame_rate); // 視頻幀率// 通過解碼器ID獲取視頻解碼器(新版本返回值必須使用const)const AVCodec* codec = avcodec_find_decoder(videoStream->codecpar->codec_id);totalFrames = videoStream->nb_frames;qDebug() << QString("分辨率:[w:%1,h:%2] 幀率:%3 總幀數:%4 解碼器:%5").arg(width).arg(height).arg(frameRate).arg(totalFrames).arg(codec->name);// 分配AVCodecContext并將其字段設置為默認值。codecCtx = avcodec_alloc_context3(codec);if(!codecCtx){qDebug() << "Failed to avcodec_alloc_context3";freeDecode();return false;}// 使用視頻流的codecpar為解碼器上下文賦值ret = avcodec_parameters_to_context(codecCtx, videoStream->codecpar);if(ret < 0){qDebug() << "Failed to avcodec_parameters_to_context";freeDecode();return false;}codecCtx->flags2 |= AV_CODEC_FLAG2_FAST; // 允許不符合規范的加速技巧。codecCtx->thread_count = 8; // 使用8線程解碼if(isHWDecoder()){initHWDecoder(codec); // 初始化硬件解碼器(在avcodec_open2前調用)}// 初始化解碼器上下文,如果之前avcodec_alloc_context3傳入了解碼器,這里設置NULL就可以ret = avcodec_open2(codecCtx, nullptr, nullptr);if(ret < 0){qDebug() << "Failed to avcodec_open2";freeDecode();return false;}// 分配AVPacket并將其字段設置為默認值。packet = av_packet_alloc();if(!packet){qDebug() << "Failed to av_packet_alloc";freeDecode();return false;}// 初始化幀和轉換上下文frame = av_frame_alloc();rgbFrame = av_frame_alloc();frameHW = av_frame_alloc();int size = av_image_get_buffer_size(AV_PIX_FMT_RGB24, codecCtx->width, codecCtx->height, 1);buffer = (uint8_t *)av_malloc(size + 1000);av_image_fill_arrays(rgbFrame->data, rgbFrame->linesize, buffer, AV_PIX_FMT_RGB24,codecCtx->width, codecCtx->height, 1);/*// 初始化 SWS 上下文(YUV -> RGB 轉換)swsCtx = sws_getContext(codecCtx->width, codecCtx->height, codecCtx->pix_fmt,codecCtx->width, codecCtx->height, AV_PIX_FMT_RGB24,SWS_BILINEAR, nullptr, nullptr, nullptr);*/stopWorkFlag = false;std::thread t(std::bind(&VideoDecode::decodeProccessThread,this));threadDecode = std::move(t);return true;
}void VideoDecode::close()
{stopWorkFlag = true;// 因為avformat_flush不會刷新AVIOContext (s->pb)。如果有必要,在調用此函數之前調用avio_flush(s->pb)。if(formatCtx && formatCtx->pb){avio_flush(formatCtx->pb);}if(formatCtx){avformat_flush(formatCtx); // 清理讀取緩沖}if(threadDecode.joinable()){threadDecode.join();}freeDecode();
}bool VideoDecode::isClose()
{return stopWorkFlag;
}QImage VideoDecode::ConvertRGB24FrameToQImage(unsigned char *data, int width, int height)
{// 創建 QImage 并顯示QImage img(data, width, height, QImage::Format_RGB888);return img;
}void VideoDecode::decodeProccessThread()
{std::chrono::high_resolution_clock::time_point tpStart = std::chrono::high_resolution_clock::now();int nWaitTimes = 40;if(frameRate != 0){nWaitTimes = 1000.0/frameRate;}long long llDecodeTs = 0;long long llPlayTs = 0;long long llStartPlayTs = 0;bool bStartPlayTsSetValueFlag = false;bool bProccessFileTail = false;while (true){if(stopWorkFlag){break;}// 讀取下一幀數據int readRet = av_read_frame(formatCtx, packet);if(readRet < 0){if (readRet == AVERROR_EOF){int ret = avcodec_send_packet(codecCtx, packet); // 讀取完成后向解碼器中傳如空AVPacket,否則無法讀取出最后幾幀if(ret < 0){av_packet_unref(packet);bProccessFileTail = true;break;}}else{break;}}else{if(stopWorkFlag){break;}if(packet->stream_index == videoStreamIndex) // 如果是圖像數據則進行解碼{av_packet_rescale_ts(packet, formatCtx->streams[videoStreamIndex]->time_base, codecCtx->time_base); // 轉換至解碼器時間基?// 將讀取到的原始數據包傳入解碼器int ret = avcodec_send_packet(codecCtx, packet);if(ret < 0){qDebug() << "Error sending packet";av_packet_unref(packet);continue;}}else{//其他流(比如:音頻)av_packet_unref(packet);continue;}}// 接收解碼后的幀(這里一次只解碼一幀)int ret = avcodec_receive_frame(codecCtx, frame);if (ret == AVERROR(EAGAIN)){av_packet_unref(packet);continue;}else if (ret == AVERROR_EOF){av_packet_unref(packet);//當無法讀取到AVPacket并且解碼器中也沒有數據時表示讀取完成bProccessFileTail = true;break;}else if (ret < 0){qDebug() << "Error during decoding";av_packet_unref(packet);continue;}else{// 這樣寫是為了兼容軟解碼或者硬件解碼打開失敗情況AVFrame* frameTemp = frame;if(!frame->data[0]) // 如果是硬件解碼就進入{// 將解碼后的數據從GPU拷貝到CPUif(!dataCopy()){av_frame_unref(frameHW);continue;}frameTemp = frameHW;}// 處理時間戳的核心邏輯int64_t raw_pts = frameTemp->pts;int64_t raw_dts = frameTemp->pkt_dts;// 處理未定義時間戳的情況if (raw_pts == AV_NOPTS_VALUE){// 使用DTS或估算PTS(需要根據幀率等參數)if(raw_dts != AV_NOPTS_VALUE){raw_pts = raw_dts;}else{raw_pts = 0;raw_dts = 0;}}// 轉換為顯示時間戳(秒)double display_time = raw_pts * av_q2d(codecCtx->time_base);// 轉換為全局時間基(例如用于音視頻同步)AVRational timeBaseTemp{1, AV_TIME_BASE};//AV_TIME_BASE_QllPlayTs = av_rescale_q(raw_pts, codecCtx->time_base, timeBaseTemp);llDecodeTs = av_rescale_q(raw_dts, codecCtx->time_base, timeBaseTemp);if(!bStartPlayTsSetValueFlag){llStartPlayTs = llPlayTs;bStartPlayTsSetValueFlag = true;}qDebug("Frame:%4d PTS:%lld display_time:%.2f DTS:%lld llPlayTs:%lld llDecodeTs:%lld packet dts:%lld pts:%lld",codecCtx->frame_number, raw_pts, display_time, raw_dts, llPlayTs, llDecodeTs, packet->dts, packet->pts);av_packet_unref(packet); // 釋放數據包,引用計數-1,為0時釋放空間if(!swsCtx || (frameTemp->width != width || frameTemp->height != height)){//重新申請width = frameTemp->width;height = frameTemp->height;if(swsCtx){sws_freeContext(swsCtx);swsCtx = nullptr;}if(buffer){av_free(buffer);buffer = nullptr;}int size = av_image_get_buffer_size(AV_PIX_FMT_RGB24, frameTemp->width, frameTemp->height, 1);buffer = (uint8_t *)av_malloc(size + 1000);av_image_fill_arrays(rgbFrame->data, rgbFrame->linesize, buffer, AV_PIX_FMT_RGB24,frameTemp->width, frameTemp->height, 1);swsCtx = sws_getCachedContext(swsCtx,frameTemp->width, // 輸入圖像的寬度frameTemp->height, // 輸入圖像的高度(AVPixelFormat)frameTemp->format, // 輸入圖像的像素格式frameTemp->width, // 輸出圖像的寬度frameTemp->height, // 輸出圖像的高度AV_PIX_FMT_RGB24, // 輸出圖像的像素格式SWS_BILINEAR, // 選擇縮放算法(只有當輸入輸出圖像大小不同時有效),一般選擇SWS_FAST_BILINEARnullptr, // 輸入圖像的濾波器信息, 若不需要傳NULLnullptr, // 輸出圖像的濾波器信息, 若不需要傳NULLnullptr);}//休眠等待long long llPlayTsDiff = llPlayTs - llStartPlayTs;auto duration = std::chrono::duration_cast<std::chrono::microseconds>(std::chrono::high_resolution_clock::now() - tpStart);// 計算需要等待的時間(單位:微秒)int64_t delay = llPlayTsDiff - duration.count();// 同步控制if (delay > 0){std::this_thread::sleep_for(std::chrono::microseconds(delay)); // 等待至目標時間?}else if (delay < -100000){// 允許100ms誤差閾值// 丟棄滯后幀,追趕進度?av_frame_unref(frame);av_frame_unref(frameHW);continue;}// 轉換顏色空間到 RGB24sws_scale(swsCtx, frameTemp->data, frameTemp->linesize, 0, frameTemp->height, rgbFrame->data, rgbFrame->linesize);//回調流書籍(方便渲染)if(funCallbackByStreamDecode){funCallbackByStreamDecode(StreamType_Video,FormatType_RGB24,llDecodeTs,llPlayTs,frameTemp->width,frameTemp->height,rgbFrame->data, rgbFrame->linesize, userDataByStreamDecode);}av_frame_unref(frame);av_frame_unref(frameHW);}}if(bProccessFileTail && !stopWorkFlag){if(funCallbackByFileStatus != nullptr){funCallbackByFileStatus(FileStatus_OverFileTail, 0, userDataByFileStatus);}}
}void VideoDecode::freeDecode()
{// 釋放資源if (swsCtx){sws_freeContext(swsCtx);swsCtx = nullptr;}if (rgbFrame){av_frame_free(&rgbFrame);rgbFrame = nullptr;}if (frame){av_frame_free(&frame);frame = nullptr;}if(frameHW){av_frame_free(&frameHW);frameHW = nullptr;}if (codecCtx){avcodec_free_context(&codecCtx);codecCtx = nullptr;}if (formatCtx){avformat_close_input(&formatCtx);formatCtx = nullptr;}if(buffer != nullptr){av_free(buffer);buffer = nullptr;}
}
?3.主窗口調用代碼
#include "mainwindow.h"
#include "ui_mainwindow.h"
#include <QFileDialog>
#include <QDebug>MainWindow::MainWindow(QWidget *parent): QMainWindow(parent), ui(new Ui::MainWindow)
{ui->setupUi(this);connect(this, SIGNAL(sgnShowImage(QImage)), this, SLOT(sltShowImage(QImage)));vdVideoDecode.globalInit();m_playImages = this->findChildren<PlayImage *>();
}MainWindow::~MainWindow()
{delete ui;vdVideoDecode.globalUnInit();
}void MainWindow::sltShowImage(QImage qimage)
{if(vdVideoDecode.isClose())return;for(int i = 0; i < m_playImages.count(); i++){m_playImages.at(i)->updateImage(qimage);}
}void MainWindow::on_pushButtonOpenFile_clicked(bool checked)
{QString filename = QFileDialog::getOpenFileName(nullptr, "Open Video File");if (!filename.isEmpty()){//vdVideoDecode.setHWDecoder(true);vdVideoDecode.setStreamDecodeCallback([](int nStreamType, int nFormatType, long long llDecodeTs, long long llPlayTs, int width, int height, unsigned char ** pStreamData, int * linesize, void * pUserData){MainWindow *pMainWindow = (MainWindow *)pUserData;QImage qimage = VideoDecode::ConvertRGB24FrameToQImage(pStreamData[0],width,height);emit pMainWindow->sgnShowImage(qimage);},this);vdVideoDecode.setFileStatusCallback([](int FileStatus, int nErrorCode, void * pUserData){qDebug()<<"file is end";},this);vdVideoDecode.open(filename);/*if(player.openFile(filename)){player.show();}*/}
}void MainWindow::on_pushButtonCloseFile_clicked()
{vdVideoDecode.close();
}
完整代碼下載:https://gitee.com/byxdaz/ffmpeg-qt-player