ffmpeg+QOpenGLWidget顯示視頻

?一個基于 ?FFmpeg 4.x? 和 QOpenGLWidget的簡單視頻播放器代碼示例,實現視頻解碼和渲染到 Qt 窗口的功能。
1)ffmpeg庫界面,視頻解碼支持軟解和硬解方式。
? ? ?硬解后,硬件解碼完成需要將數據從GPU復制到CPU。優先采用av_hwframe_map函數,失敗后再使用av_hwframe_transfer_data

    av_hwframe_map(frameHW, frame, AV_HWFRAME_MAP_READ);                   // 映射硬件數據幀/*av_hwframe_map 映射硬件數據幀,第3個參數值有三種類型:AV_HWFRAME_MAP_READ:目標幀可讀。AV_HWFRAME_MAP_WRITE:目標幀可寫。AV_HWFRAME_MAP_DIRECT:避免數據拷貝(依賴硬件支持)?。優先使用 AV_HWFRAME_MAP_DIRECT 減少內存拷貝開銷?。使用AV_HWFRAME_MAP_DIRECT時,你應該確保你的應用邏輯不會修改通過映射獲得的軟件幀內容,以避免不期望的副作用。使用AV_HWFRAME_MAP_READ時,你將獲得數據的一致性但可能會有性能上的損失。*/

2)顯示幀數據采用QOpenGLWidget。
class FrameOpenGLWidget : public QOpenGLWidget, protected QOpenGLFunctions{
}
該類支持YUV420P、NV12、RGB幀數據顯示。在這種數據之間切換時,調用類中?setFrameDataFormat方法切換。

3)解碼后,根據碼流類型,更新opengl初始化紋理。以便顯示YUV420P、NV12、RGB數據。
碼流類型改變后,發送信號出去,UI槽中調用setFrameDataFormat,更新opengl初始化紋理。(因為opengl初始化紋理是非線程安全的,需要在UI或同FrameOpenGLWidget線程中處理。)

4)解碼播放時間同步控制。

?1. Qt 項目配置(.pro?文件)
QT       += core gui openglgreaterThan(QT_MAJOR_VERSION, 4): QT += widgetsCONFIG += c++11INCLUDEPATH += $$PWD/ffmpeg-4.2.2-win32/include
LIBS += -L$$PWD/ffmpeg-4.2.2-win32/lib -lavcodec -lavformat -lavutil -lswscale# The following define makes your compiler emit warnings if you use
# any Qt feature that has been marked deprecated (the exact warnings
# depend on your compiler). Please consult the documentation of the
# deprecated API in order to know how to port your code away from it.
DEFINES += QT_DEPRECATED_WARNINGS# You can also make your code fail to compile if it uses deprecated APIs.
# In order to do so, uncomment the following line.
# You can also select to disable deprecated APIs only up to a certain version of Qt.
#DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000    # disables all the APIs deprecated before Qt 6.0.0SOURCES += \frameopenglwidget.cpp \main.cpp \mainwindow.cpp \videodecode.cppHEADERS += \YUV420PWidget.h \frameopenglwidget.h \mainwindow.h \videodecode.hFORMS += \mainwindow.ui# Default rules for deployment.
qnx: target.path = /tmp/$${TARGET}/bin
else: unix:!android: target.path = /opt/$${TARGET}/bin
!isEmpty(target.path): INSTALLS += targetDISTFILES += \RESOURCES += \player.qrc
?2. 視頻解碼類

videodecode.h

#ifndef VIDEODECODE_H
#define VIDEODECODE_H//視頻解碼類
#include <QString>
#include <QImage>
#include <thread>extern "C" {
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libswscale/swscale.h>
#include <libavutil/imgutils.h>
}/*
struct AVFormatContext;
struct AVCodecContext;
struct AVRational;
struct AVPacket;
struct AVFrame;
struct AVCodec;
struct SwsContext;
struct AVBufferRef;
*///流類型
enum StreamType
{StreamType_Video        = 0,StreamType_Audio        = 1,StreamType_Text         = 2,
};//格式類型
enum FormatType
{FormatType_RGB24        =   0,FormatType_RGB32        =   1,FormatType_YUV420P      =   2,FormatType_NV12         =   3,
};//文件狀態
enum  FileStatus
{FileStatus_OverFileTail     =   0,  //達到文件尾FileStatus_OverFileHead     =   1,  //達到文件頭FileStatus_TrigeException   =   2,  //發生異常
};//流解碼回調函數
typedef void (*StreamDecodeCallback)(int nStreamType, int nFormatType, long long llDecodeTs, long long llPlayTs, int width, int height, unsigned char ** pStreamData, int * linesize, void * pUserData);//文件狀態回調函數
typedef  void (*FileStatusCallback)(int FileStatus, int nErrorCode, void * pUserData);class VideoDecode
{
public:VideoDecode();~VideoDecode();public:void globalInit();//初始化ffmpeg庫(整個程序中只需加載一次)void globalUnInit();//反初始化ffmpeg庫(整個程序中只需加載一次)public:void setStreamDecodeCallback(StreamDecodeCallback funStreamDecodeCallback, void * userData);void setFileStatusCallback(FileStatusCallback funFileStatusCallback, void * userData);void setHWDecoder(bool flag);                 // 是否使用硬件解碼器bool isHWDecoder();bool open(const QString& url);              // 打開媒體文件,或者流媒體rtmp、strp、httpvoid close();                               // 關閉bool isClose();public:void decodeProccessThread();                //解碼線程static QImage ConvertRGB24FrameToQImage(unsigned char *data, int width, int height);protected:void initHWDecoder(const AVCodec *codec);bool dataCopy();    //硬件解碼完成需要將數據從GPU復制到CPUvoid freeDecode();qreal rationalToDouble(AVRational* rational);private:// FFmpeg 相關對象AVFormatContext *formatCtx = nullptr;AVCodecContext *codecCtx = nullptr;AVFrame *frame = nullptr, *rgbFrame = nullptr;AVFrame *frameHW = nullptr;SwsContext *swsCtx = nullptr;uchar* buffer = nullptr;                      // YUV圖像需要轉換位RGBA圖像,這里保存轉換后的圖形數據AVPacket* packet = nullptr;int videoStreamIndex = -1;  // 視頻流索引qint64 totalTime    = 0;                    // 視頻總時長qint64 totalFrames  = 0;                    // 視頻總幀數qint64 obtainFrames = 0;                    // 視頻當前獲取到的幀數qint64 pts          = 0;                    // 圖像幀的顯示時間qreal  frameRate    = 0;                    // 視頻幀率int  width = 0;         //視頻分辨率大小widthint  height = 0;        //視頻分辨率大小heightstd::vector<int> vecHWDeviceTypes;            // 保存當前環境支持的硬件解碼器AVBufferRef* hw_device_ctx = nullptr;         // 對數據緩沖區的引用bool   hwDecoderFlag = false;                 // 記錄是否使用硬件解碼std::thread threadDecode;bool stopWorkFlag = true;StreamDecodeCallback funCallbackByStreamDecode = nullptr;void * userDataByStreamDecode = nullptr;FileStatusCallback funCallbackByFileStatus = nullptr;void * userDataByFileStatus = nullptr;
};#endif // VIDEODECODE_H

videodecode.cpp

#include "videodecode.h"
#include <QTime>
#include <QDebug>
#include <QStringList>
#include <chrono>/*********************************** FFmpeg獲取GPU硬件解碼幀格式的回調函數 *****************************************/
static enum AVPixelFormat g_pixelFormat;/*** @brief      回調函數,獲取GPU硬件解碼幀的格式* @param s* @param fmt* @return*/
AVPixelFormat get_hw_format(AVCodecContext* s, const enum AVPixelFormat* fmt)
{Q_UNUSED(s)const enum AVPixelFormat* p;for (p = fmt; *p != -1; p++){if(*p == g_pixelFormat){return *p;}}qDebug() << "無法獲取硬件表面格式.";         // 當同時打開太多路視頻時,如果超過了GPU的能力,可能會返回找不到解碼幀格式return AV_PIX_FMT_NONE;
}
/************************************************ END ******************************************************/VideoDecode::VideoDecode()
{}VideoDecode::~VideoDecode()
{
}void VideoDecode::globalInit()
{//        av_register_all();         // 已經從源碼中刪除/*** 初始化網絡庫,用于打開網絡流媒體,此函數僅用于解決舊GnuTLS或OpenSSL庫的線程安全問題。* 一旦刪除對舊GnuTLS和OpenSSL庫的支持,此函數將被棄用,并且此函數不再有任何用途。*/avformat_network_init();
}void VideoDecode::globalUnInit()
{avformat_network_deinit();
}qreal VideoDecode::rationalToDouble(AVRational* rational)
{qreal frameRate = (rational->den == 0) ? 0 : (qreal(rational->num) / rational->den);return frameRate;
}void VideoDecode::setStreamDecodeCallback(StreamDecodeCallback funStreamDecodeCallback, void * userData)
{funCallbackByStreamDecode = funStreamDecodeCallback;userDataByStreamDecode = userData;
}
void VideoDecode::setFileStatusCallback(FileStatusCallback funFileStatusCallback, void * userData)
{funCallbackByFileStatus = funFileStatusCallback;userDataByFileStatus = userData;
}//初始化硬件解碼器
void VideoDecode::initHWDecoder(const AVCodec *codec)
{if(!codec) return;for(int i = 0; ; i++){const AVCodecHWConfig* config = avcodec_get_hw_config(codec, i);    // 檢索編解碼器支持的硬件配置。if(!config){qDebug() << "打開硬件解碼器失敗!";return;          // 沒有找到支持的硬件配置}if (config->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX)       // 判斷是否是設備類型{for(auto i : vecHWDeviceTypes){if(config->device_type == AVHWDeviceType(i))                 // 判斷設備類型是否是支持的硬件解碼器{g_pixelFormat = config->pix_fmt;// 打開指定類型的設備,并為其創建AVHWDeviceContext。int ret = av_hwdevice_ctx_create(&hw_device_ctx, config->device_type, nullptr, nullptr, 0);if(ret < 0){freeDecode();return ;}qDebug() << "打開硬件解碼器:" << av_hwdevice_get_type_name(config->device_type);codecCtx->hw_device_ctx = av_buffer_ref(hw_device_ctx);  // 創建一個對AVBuffer的新引用。codecCtx->get_format = get_hw_format;                    // 由一些解碼器調用,以選擇將用于輸出幀的像素格式return;}}}}
}//硬件解碼完成需要將數據從GPU復制到CPU
bool VideoDecode::dataCopy()
{if(frame->format != g_pixelFormat){av_frame_unref(frame);return false;}// av_hwframe_map處理速度比av_hwframe_transfer_data快(av_hwframe_map在ffmpeg3.3以后才有)int ret = av_hwframe_map(frameHW, frame, AV_HWFRAME_MAP_READ);                   // 映射硬件數據幀/*av_hwframe_map 映射硬件數據幀,第3個參數值有三種類型:AV_HWFRAME_MAP_READ:目標幀可讀。AV_HWFRAME_MAP_WRITE:目標幀可寫。AV_HWFRAME_MAP_DIRECT:避免數據拷貝(依賴硬件支持)?。優先使用 AV_HWFRAME_MAP_DIRECT 減少內存拷貝開銷?。使用AV_HWFRAME_MAP_DIRECT時,你應該確保你的應用邏輯不會修改通過映射獲得的軟件幀內容,以避免不期望的副作用。使用AV_HWFRAME_MAP_READ時,你將獲得數據的一致性但可能會有性能上的損失。*/if(ret >= 0){//映射硬件數據幀成功frameHW->width = frame->width;frameHW->height = frame->height;}else{//映射硬件數據幀失敗ret = av_hwframe_transfer_data(frameHW, frame, 0);       // 將解碼后的數據從GPU復制到CPU(frameHW) 比較耗時,但硬解碼速度比軟解碼快很多if(ret < 0){av_frame_unref(frame);return false;}av_frame_copy_props(frameHW, frame);   // 僅將“metadata”字段從src復制到dst。}return true;
}void VideoDecode::setHWDecoder(bool flag)
{hwDecoderFlag = flag;
}
bool VideoDecode::isHWDecoder()
{return hwDecoderFlag;
}bool VideoDecode::open(const QString& url)
{if(url.isNull()) return false;AVHWDeviceType type = AV_HWDEVICE_TYPE_NONE;      // ffmpeg支持的硬件解碼器QStringList strTypes;while ((type = av_hwdevice_iterate_types(type)) != AV_HWDEVICE_TYPE_NONE)       // 遍歷支持的設備類型。{vecHWDeviceTypes.push_back(type);const char* ctype = av_hwdevice_get_type_name(type);  // 獲取AVHWDeviceType的字符串名稱。if(ctype){strTypes.append(QString(ctype));}}qDebug() << "支持的硬件解碼器:";qDebug() << strTypes;AVDictionary* dict = nullptr;av_dict_set(&dict, "rtsp_transport", "tcp", 0);      // 設置rtsp流使用tcp打開,如果打開失敗錯誤信息為【Error number -135 occurred】可以切換(UDP、tcp、udp_multicast、http),比如vlc推流就需要使用udp打開av_dict_set(&dict, "max_delay", "3", 0);             // 設置最大復用或解復用延遲(以微秒為單位)。當通過【UDP】 接收數據時,解復用器嘗試重新排序接收到的數據包(因為它們可能無序到達,或者數據包可能完全丟失)。這可以通過將最大解復用延遲設置為零(通過max_delayAVFormatContext 字段)來禁用。av_dict_set(&dict, "timeout", "1000000", 0);         // 以微秒為單位設置套接字 TCP I/O 超時,如果等待時間過短,也可能會還沒連接就返回了。// 打開輸入流并返回解封裝上下文int ret = avformat_open_input(&formatCtx,          // 返回解封裝上下文url.toStdString().data(),  // 打開視頻地址nullptr,                   // 如果非null,此參數強制使用特定的輸入格式。自動選擇解封裝器(文件格式)&dict);                    // 參數設置// 釋放參數字典if(dict){av_dict_free(&dict);}// 打開視頻失敗if(ret < 0){qDebug() << "Failed to avformat_open_input";return false;}// 讀取媒體文件的數據包以獲取流信息。ret = avformat_find_stream_info(formatCtx, nullptr);if(ret < 0){qDebug() << "Failed to avformat_find_stream_info";freeDecode();return false;}totalTime = formatCtx->duration / (AV_TIME_BASE / 1000); // 計算視頻總時長(毫秒)qDebug() << QString("視頻總時長:%1 ms,[%2]").arg(totalTime).arg(QTime::fromMSecsSinceStartOfDay(int(totalTime)).toString("HH:mm:ss zzz"));// 通過AVMediaType枚舉查詢視頻流ID(也可以通過遍歷查找),最后一個參數無用videoStreamIndex = av_find_best_stream(formatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, nullptr, 0);if(videoStreamIndex < 0){qDebug() << "Failed to av_find_best_stream";freeDecode();return false;}AVStream* videoStream = formatCtx->streams[videoStreamIndex];  // 通過查詢到的索引獲取視頻流// 獲取視頻圖像分辨率(AVStream中的AVCodecContext在新版本中棄用,改為使用AVCodecParameters)width = videoStream->codecpar->width;height = videoStream->codecpar->height;frameRate = rationalToDouble(&videoStream->avg_frame_rate);  // 視頻幀率// 通過解碼器ID獲取視頻解碼器(新版本返回值必須使用const)const AVCodec* codec = avcodec_find_decoder(videoStream->codecpar->codec_id);totalFrames = videoStream->nb_frames;qDebug() << QString("分辨率:[w:%1,h:%2] 幀率:%3  總幀數:%4  解碼器:%5").arg(width).arg(height).arg(frameRate).arg(totalFrames).arg(codec->name);// 分配AVCodecContext并將其字段設置為默認值。codecCtx = avcodec_alloc_context3(codec);if(!codecCtx){qDebug() << "Failed to avcodec_alloc_context3";freeDecode();return false;}// 使用視頻流的codecpar為解碼器上下文賦值ret = avcodec_parameters_to_context(codecCtx, videoStream->codecpar);if(ret < 0){qDebug() << "Failed to avcodec_parameters_to_context";freeDecode();return false;}codecCtx->flags2 |= AV_CODEC_FLAG2_FAST;    // 允許不符合規范的加速技巧。codecCtx->thread_count = 8;                 // 使用8線程解碼if(isHWDecoder()){initHWDecoder(codec);     // 初始化硬件解碼器(在avcodec_open2前調用)}// 初始化解碼器上下文,如果之前avcodec_alloc_context3傳入了解碼器,這里設置NULL就可以ret = avcodec_open2(codecCtx, nullptr, nullptr);if(ret < 0){qDebug() << "Failed to avcodec_open2";freeDecode();return false;}// 分配AVPacket并將其字段設置為默認值。packet = av_packet_alloc();if(!packet){qDebug() << "Failed to av_packet_alloc";freeDecode();return false;}// 初始化幀和轉換上下文frame = av_frame_alloc();rgbFrame = av_frame_alloc();frameHW = av_frame_alloc();int size = av_image_get_buffer_size(AV_PIX_FMT_RGB24, codecCtx->width, codecCtx->height, 1);buffer = (uint8_t *)av_malloc(size + 1000);av_image_fill_arrays(rgbFrame->data, rgbFrame->linesize, buffer, AV_PIX_FMT_RGB24,codecCtx->width, codecCtx->height, 1);/*// 初始化 SWS 上下文(YUV -> RGB 轉換)swsCtx = sws_getContext(codecCtx->width, codecCtx->height, codecCtx->pix_fmt,codecCtx->width, codecCtx->height, AV_PIX_FMT_RGB24,SWS_BILINEAR, nullptr, nullptr, nullptr);*/stopWorkFlag = false;std::thread t(std::bind(&VideoDecode::decodeProccessThread,this));threadDecode = std::move(t);return true;
}void VideoDecode::close()
{stopWorkFlag = true;// 因為avformat_flush不會刷新AVIOContext (s->pb)。如果有必要,在調用此函數之前調用avio_flush(s->pb)。if(formatCtx && formatCtx->pb){avio_flush(formatCtx->pb);}if(formatCtx){avformat_flush(formatCtx);   // 清理讀取緩沖}if(threadDecode.joinable()){threadDecode.join();}freeDecode();
}bool VideoDecode::isClose()
{return stopWorkFlag;
}QImage VideoDecode::ConvertRGB24FrameToQImage(unsigned char *data, int width, int height)
{// 創建 QImage 并顯示QImage img(data, width, height, QImage::Format_RGB888);return img;
}void VideoDecode::decodeProccessThread()
{std::chrono::high_resolution_clock::time_point tpStart = std::chrono::high_resolution_clock::now();int nWaitTimes = 40;if(frameRate != 0){nWaitTimes = 1000.0/frameRate;}long long llDecodeTs = 0;long long llPlayTs = 0;long long llStartPlayTs = 0;bool bStartPlayTsSetValueFlag = false;bool bProccessFileTail = false;while (true){if(stopWorkFlag){break;}// 讀取下一幀數據int readRet = av_read_frame(formatCtx, packet);if(readRet < 0){if (readRet == AVERROR_EOF){int ret = avcodec_send_packet(codecCtx, packet); // 讀取完成后向解碼器中傳如空AVPacket,否則無法讀取出最后幾幀if(ret < 0){av_packet_unref(packet);bProccessFileTail = true;break;}}else{break;}}else{if(stopWorkFlag){break;}if(packet->stream_index == videoStreamIndex)     // 如果是圖像數據則進行解碼{av_packet_rescale_ts(packet, formatCtx->streams[videoStreamIndex]->time_base, codecCtx->time_base); // 轉換至解碼器時間基?// 將讀取到的原始數據包傳入解碼器int ret = avcodec_send_packet(codecCtx, packet);if(ret < 0){qDebug() << "Error sending packet";av_packet_unref(packet);continue;}}else{//其他流(比如:音頻)av_packet_unref(packet);continue;}}// 接收解碼后的幀(這里一次只解碼一幀)int ret = avcodec_receive_frame(codecCtx, frame);if (ret == AVERROR(EAGAIN)){av_packet_unref(packet);continue;}else if (ret == AVERROR_EOF){av_packet_unref(packet);//當無法讀取到AVPacket并且解碼器中也沒有數據時表示讀取完成bProccessFileTail = true;break;}else if (ret < 0){qDebug() << "Error during decoding";av_packet_unref(packet);continue;}else{// 這樣寫是為了兼容軟解碼或者硬件解碼打開失敗情況AVFrame*  frameTemp = frame;if(!frame->data[0])               // 如果是硬件解碼就進入{// 將解碼后的數據從GPU拷貝到CPUif(!dataCopy()){av_frame_unref(frameHW);continue;}frameTemp = frameHW;}// 處理時間戳的核心邏輯int64_t raw_pts = frameTemp->pts;int64_t raw_dts = frameTemp->pkt_dts;// 處理未定義時間戳的情況if (raw_pts == AV_NOPTS_VALUE){// 使用DTS或估算PTS(需要根據幀率等參數)if(raw_dts != AV_NOPTS_VALUE){raw_pts = raw_dts;}else{raw_pts = 0;raw_dts = 0;}}// 轉換為顯示時間戳(秒)double display_time = raw_pts * av_q2d(codecCtx->time_base);// 轉換為全局時間基(例如用于音視頻同步)AVRational timeBaseTemp{1, AV_TIME_BASE};//AV_TIME_BASE_QllPlayTs = av_rescale_q(raw_pts, codecCtx->time_base, timeBaseTemp);llDecodeTs = av_rescale_q(raw_dts, codecCtx->time_base, timeBaseTemp);if(!bStartPlayTsSetValueFlag){llStartPlayTs = llPlayTs;bStartPlayTsSetValueFlag = true;}qDebug("Frame:%4d PTS:%lld display_time:%.2f DTS:%lld llPlayTs:%lld llDecodeTs:%lld packet dts:%lld pts:%lld",codecCtx->frame_number, raw_pts, display_time, raw_dts, llPlayTs, llDecodeTs, packet->dts, packet->pts);av_packet_unref(packet);  // 釋放數據包,引用計數-1,為0時釋放空間if(!swsCtx || (frameTemp->width != width || frameTemp->height != height)){//重新申請width = frameTemp->width;height = frameTemp->height;if(swsCtx){sws_freeContext(swsCtx);swsCtx = nullptr;}if(buffer){av_free(buffer);buffer = nullptr;}int size = av_image_get_buffer_size(AV_PIX_FMT_RGB24, frameTemp->width, frameTemp->height, 1);buffer = (uint8_t *)av_malloc(size + 1000);av_image_fill_arrays(rgbFrame->data, rgbFrame->linesize, buffer, AV_PIX_FMT_RGB24,frameTemp->width, frameTemp->height, 1);swsCtx = sws_getCachedContext(swsCtx,frameTemp->width,                     // 輸入圖像的寬度frameTemp->height,                    // 輸入圖像的高度(AVPixelFormat)frameTemp->format,     // 輸入圖像的像素格式frameTemp->width,                     // 輸出圖像的寬度frameTemp->height,                    // 輸出圖像的高度AV_PIX_FMT_RGB24,                    // 輸出圖像的像素格式SWS_BILINEAR,                       // 選擇縮放算法(只有當輸入輸出圖像大小不同時有效),一般選擇SWS_FAST_BILINEARnullptr,                            // 輸入圖像的濾波器信息, 若不需要傳NULLnullptr,                            // 輸出圖像的濾波器信息, 若不需要傳NULLnullptr);}//休眠等待long long llPlayTsDiff = llPlayTs - llStartPlayTs;auto duration = std::chrono::duration_cast<std::chrono::microseconds>(std::chrono::high_resolution_clock::now() - tpStart);// 計算需要等待的時間(單位:微秒)int64_t delay = llPlayTsDiff - duration.count();// 同步控制if (delay > 0){std::this_thread::sleep_for(std::chrono::microseconds(delay)); // 等待至目標時間?}else if (delay < -100000){// 允許100ms誤差閾值// 丟棄滯后幀,追趕進度?av_frame_unref(frame);av_frame_unref(frameHW);continue;}if(/*0 && */frameTemp->format == AV_PIX_FMT_YUV420P){//回調流數據(方便渲染)if(funCallbackByStreamDecode && !stopWorkFlag){funCallbackByStreamDecode(StreamType_Video,FormatType_YUV420P,llDecodeTs,llPlayTs,frameTemp->width,frameTemp->height,frameTemp->data, frameTemp->linesize, userDataByStreamDecode);}}else if(frameTemp->format == AV_PIX_FMT_NV12){//回調流數據(方便渲染)if(funCallbackByStreamDecode && !stopWorkFlag){funCallbackByStreamDecode(StreamType_Video,FormatType_NV12,llDecodeTs,llPlayTs,frameTemp->width,frameTemp->height,frameTemp->data, frameTemp->linesize, userDataByStreamDecode);}}else{//其他格式,轉換成rgb// 轉換顏色空間到 RGB24sws_scale(swsCtx, frameTemp->data, frameTemp->linesize, 0, frameTemp->height, rgbFrame->data, rgbFrame->linesize);//回調流數據(方便渲染)if(funCallbackByStreamDecode && !stopWorkFlag){funCallbackByStreamDecode(StreamType_Video,FormatType_RGB24,llDecodeTs,llPlayTs,frameTemp->width,frameTemp->height,rgbFrame->data, rgbFrame->linesize, userDataByStreamDecode);}}av_frame_unref(frame);av_frame_unref(frameHW);}}if(bProccessFileTail && !stopWorkFlag){if(funCallbackByFileStatus != nullptr){funCallbackByFileStatus(FileStatus_OverFileTail, 0, userDataByFileStatus);}}qDebug()<<"thread is eixt";
}void VideoDecode::freeDecode()
{// 釋放資源if (swsCtx){sws_freeContext(swsCtx);swsCtx = nullptr;}if (rgbFrame){av_frame_free(&rgbFrame);rgbFrame = nullptr;}if (frame){av_frame_free(&frame);frame = nullptr;}if(frameHW){av_frame_free(&frameHW);frameHW = nullptr;}if (codecCtx){avcodec_free_context(&codecCtx);codecCtx = nullptr;}if (formatCtx){avformat_close_input(&formatCtx);formatCtx = nullptr;}if(buffer != nullptr){av_free(buffer);buffer = nullptr;}
}
?3. 幀數據顯示窗口類

frameopenglwidget.h

#ifndef FRAMEOPENGLWIDGET_H
#define FRAMEOPENGLWIDGET_H#include <QOpenGLWidget>
#include <QOpenGLShaderProgram>
#include <QOpenGLFunctions>//幀數據格式
enum FrameDataFormat
{FrameDataFormat_None         =  -1,FrameDataFormat_RGB24        =   0,FrameDataFormat_RGB32        =   1,FrameDataFormat_YUV420P      =   2,FrameDataFormat_NV12         =   3,
};class FrameOpenGLWidget : public QOpenGLWidget, protected QOpenGLFunctions
{
public:FrameOpenGLWidget(QWidget *parent = nullptr);virtual ~FrameOpenGLWidget();void  setFrameDataFormat(FrameDataFormat eFrameDataFormat);FrameDataFormat getFrameDataFormat();//更新NV12幀數據void updateNV12Frame(const uint8_t* yData, const uint8_t* uvData, int y_linesize, int uv_linesize, int width, int height);//更新YUV420P數據void updateYUV420PFrame(const uint8_t* yData, const uint8_t* uData, const uint8_t* vData, int y_linesize, int u_linesize,  int v_linesize, int width, int height);//更新RGB24數據void updateRGB24Frame(const uint8_t* rgbData, int width, int height, int linesize);//更新RGB32數據void updateRGB32Frame(const uint8_t* rgbData, int width, int height, int linesize);//更新幀數據void updateFrame(int eFrameDataFormat,  uint8_t **data, int *linesize, int width, int height);protected:void initializeGL() override;               // 初始化glvoid resizeGL(int w, int h) override;       // 窗口尺寸變化void paintGL() override;                    // 刷新顯示protected:void initializeGLByNV12Frame();void initTextureByNV12Frame();void releaseTextureByNV12Frame();void paintGLByNV12Frame();void initializeGLByYUV420PFrame();void initTextureByYUV420PFrame();void releaseTextureByYUV420PFrame();void paintGLByYUV420PFrame();void initializeGLByRGBFrame();void initTextureByRGBFrame();void releaseTextureByRGBFrame();void paintGLByRGBFrame();void initializeGLByRGB32Frame();void initTextureByRGB32Frame();void releaseTextureByRGB32Frame();void paintGLByRGB32Frame();private:QOpenGLShaderProgram m_program;GLuint m_yTexture = 0;GLuint m_uvTexture = 0;GLuint m_uTexture = 0;GLuint m_vTexture = 0;GLuint m_rgbTexture = 0;GLuint m_rgb32Texture = 0;uint8_t* m_yData = nullptr;uint8_t* m_uvData = nullptr;uint8_t* m_uData = nullptr;uint8_t* m_vData = nullptr;uint8_t* m_nv12Data = nullptr;uint8_t* m_rgbData = nullptr;uint8_t* m_rgb32Data = nullptr;int m_width = 0;int m_height = 0;QSize  m_size;QSizeF  m_zoomSize;QPointF m_pos;bool m_nFirstUpdateFrame = true;int  m_nInitFrameDataFormat = FrameDataFormat_RGB24;
};#endif // FRAMEOPENGLWIDGET_H

frameopenglwidget.cpp

#include "frameopenglwidget.h"FrameOpenGLWidget::FrameOpenGLWidget(QWidget *parent) : QOpenGLWidget(parent)
{m_size.setWidth(1);m_size.setHeight(1);m_zoomSize.setWidth(1);m_zoomSize.setHeight(1);m_pos.setX(0);m_pos.setY(0);m_nInitFrameDataFormat = FrameDataFormat_NV12;
}FrameOpenGLWidget::~FrameOpenGLWidget()
{if(m_yData != nullptr){delete []m_yData;m_yData = nullptr;}if(m_uData != nullptr){delete []m_uData;m_uData = nullptr;}if(m_vData != nullptr){delete []m_vData;m_vData = nullptr;}if(m_uvData != nullptr){delete []m_uvData;m_uvData = nullptr;}if(m_rgbData != nullptr){delete []m_rgbData;m_rgbData = nullptr;}if(m_rgb32Data != nullptr){delete []m_rgb32Data;m_rgb32Data = nullptr;}makeCurrent();switch (m_nInitFrameDataFormat) {case FrameDataFormat_RGB24:releaseTextureByRGBFrame();break;case FrameDataFormat_RGB32:releaseTextureByRGB32Frame();break;case FrameDataFormat_YUV420P:releaseTextureByYUV420PFrame();break;case FrameDataFormat_NV12:releaseTextureByNV12Frame();break;}doneCurrent();
}void  FrameOpenGLWidget::setFrameDataFormat(FrameDataFormat eFrameDataFormat)
{if(m_nInitFrameDataFormat != eFrameDataFormat){makeCurrent();m_nInitFrameDataFormat = eFrameDataFormat;switch (m_nInitFrameDataFormat) {case FrameDataFormat_RGB24:releaseTextureByRGBFrame();// 初始化紋理initTextureByRGBFrame();initializeGLByRGBFrame();break;case FrameDataFormat_RGB32:releaseTextureByRGB32Frame();// 初始化紋理initTextureByRGB32Frame();initializeGLByRGB32Frame();break;case FrameDataFormat_YUV420P:releaseTextureByYUV420PFrame();// 初始化紋理initTextureByYUV420PFrame();initializeGLByYUV420PFrame();break;case FrameDataFormat_NV12:releaseTextureByNV12Frame();// 初始化紋理initTextureByNV12Frame();initializeGLByNV12Frame();break;}doneCurrent();}
}
FrameDataFormat FrameOpenGLWidget::getFrameDataFormat()
{return (FrameDataFormat)m_nInitFrameDataFormat;
}//更新NV12幀數據
void FrameOpenGLWidget::updateNV12Frame(const uint8_t* yData, const uint8_t* uvData, int y_linesize, int uv_linesize, int width, int height)
{if(m_yData == nullptr || m_uvData == nullptr){if(m_yData != nullptr){delete []m_yData;}if(m_uvData != nullptr){delete []m_uvData;}m_yData = new uint8_t[y_linesize*height + 1];m_uvData = new uint8_t[uv_linesize*height/2 + 1];}else{if(width != m_width || height != m_height){if(m_yData != nullptr && m_uvData != nullptr){delete []m_yData;delete []m_uvData;m_yData = new uint8_t[y_linesize*height + 1];m_uvData = new uint8_t[uv_linesize*height/2 + 1];}}}memcpy(m_yData, yData, y_linesize*height);memcpy(m_uvData, uvData, uv_linesize*height/2);m_width = width;m_height = height;m_size.setWidth(width);m_size.setHeight(height);if(m_nFirstUpdateFrame){resizeGL(size().width(), size().height());m_nFirstUpdateFrame = false;}update();
}
//更新YUV420P數據
void FrameOpenGLWidget::updateYUV420PFrame(const uint8_t* yData, const uint8_t* uData, const uint8_t* vData, int y_linesize, int u_linesize,  int v_linesize, int width, int height)
{if(m_yData == nullptr || m_uData == nullptr || m_vData == nullptr){if(m_yData != nullptr){delete []m_yData;}if(m_uData != nullptr){delete []m_uData;}if(m_vData != nullptr){delete []m_vData;}m_yData = new uint8_t[y_linesize*height + 1];m_uData = new uint8_t[u_linesize*height/2 + 1];m_vData = new uint8_t[v_linesize*height/2 + 1];}else{if(width != m_width || height != m_height){if(m_yData != nullptr && m_uData != nullptr && m_vData != nullptr){delete []m_yData;delete []m_uData;delete []m_vData;m_yData = new uint8_t[y_linesize*height + 1];m_uData = new uint8_t[u_linesize*height/2 + 1];m_vData = new uint8_t[v_linesize*height/2 + 1];}}}memcpy(m_yData, yData, y_linesize*height);memcpy(m_uData, uData, u_linesize*height/2);memcpy(m_vData, vData, v_linesize*height/2);m_width = width;m_height = height;m_size.setWidth(width);m_size.setHeight(height);if(m_nFirstUpdateFrame){resizeGL(size().width(), size().height());m_nFirstUpdateFrame = false;}update();
}
//更新RGB24數據
void FrameOpenGLWidget::updateRGB24Frame(const uint8_t* rgbData, int width, int height, int linesize)
{if(m_rgbData == nullptr){m_rgbData = new uint8_t[linesize*height + 1000];}else{if(width != m_width || height != m_height){if(m_rgbData != nullptr){delete []m_rgbData;m_rgbData = new uint8_t[linesize*height + 1000];}}}memcpy(m_rgbData, rgbData, linesize*height);m_width = width;m_height = height;m_size.setWidth(width);m_size.setHeight(height);if(m_nFirstUpdateFrame){resizeGL(size().width(), size().height());m_nFirstUpdateFrame = false;}update();
}
//更新RGB32數據
void FrameOpenGLWidget::updateRGB32Frame(const uint8_t* rgbData, int width, int height, int linesize)
{if(m_rgb32Data == nullptr){m_rgb32Data = new uint8_t[linesize*height + 1000];}else{if(width != m_width || height != m_height){if(m_rgb32Data != nullptr){delete []m_rgb32Data;m_rgb32Data = new uint8_t[linesize*height + 1000];}}}memcpy(m_rgb32Data, rgbData, linesize*height);m_width = width;m_height = height;m_size.setWidth(width);m_size.setHeight(height);if(m_nFirstUpdateFrame){resizeGL(size().width(), size().height());m_nFirstUpdateFrame = false;}update();
}
//更新幀數據
void FrameOpenGLWidget::updateFrame(int eFrameDataFormat,  uint8_t **data, int *linesize, int width, int height)
{switch (eFrameDataFormat) {case FrameDataFormat_RGB24:updateRGB24Frame(data[0], width, height, linesize[0]);break;case FrameDataFormat_RGB32:updateRGB32Frame(data[0], width, height, linesize[0]);break;case FrameDataFormat_YUV420P:updateYUV420PFrame(data[0],data[1],data[2], linesize[0], linesize[1],linesize[2], width,height);break;case FrameDataFormat_NV12:updateNV12Frame(data[0],data[1],linesize[0], linesize[1],width,height);break;}
}void FrameOpenGLWidget::initializeGLByNV12Frame()
{m_program.removeAllShaders();// 編譯著色器m_program.addShaderFromSourceCode(QOpenGLShader::Vertex,"#version 330 core\n""layout(location = 0) in vec4 vertexIn;\n""layout(location = 1) in vec2 textureIn;\n""out vec2 textureCoord;\n""void main(void)\n""{\n""    gl_Position = vertexIn;\n""    textureCoord = textureIn;\n""}\n");m_program.addShaderFromSourceCode(QOpenGLShader::Fragment,"#version 330 core\n""in vec2 textureCoord;\n""out vec4 fragColor;\n""uniform sampler2D tex_y;\n""uniform sampler2D tex_uv;\n""void main()\n""{\n""    float y = texture(tex_y, textureCoord).r;\n""    vec2 uv = texture(tex_uv, textureCoord).rg;\n""    y = 1.1643 * (y - 0.0625);\n""    float u = uv.x - 0.5;\n""    float v = uv.y - 0.5;\n""    fragColor = vec4(\n""        y + 1.5958 * v,\n""        y - 0.39173 * u - 0.81290 * v,\n""        y + 2.017 * u,\n""        1.0\n""    );\n""}\n");m_program.link();
}
void FrameOpenGLWidget::initTextureByNV12Frame()
{// 初始化紋理glGenTextures(1, &m_yTexture);glGenTextures(1, &m_uvTexture);
}
void FrameOpenGLWidget::releaseTextureByNV12Frame()
{glDeleteTextures(1, &m_yTexture);glDeleteTextures(1, &m_uvTexture);
}
void FrameOpenGLWidget::paintGLByNV12Frame()
{if (!m_yData || !m_uvData) return;glClearColor(0.0f, 0.0f, 0.0f, 1.0f);glClear(GL_COLOR_BUFFER_BIT);#if 1glViewport(m_pos.x(), m_pos.y(), m_zoomSize.width(), m_zoomSize.height());  // 設置視圖大小實現圖片自適應
#endifm_program.bind();// 更新Y紋理glActiveTexture(GL_TEXTURE0);glBindTexture(GL_TEXTURE_2D, m_yTexture);glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, m_width, m_height, 0, GL_RED, GL_UNSIGNED_BYTE, m_yData);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);// 更新UV紋理glActiveTexture(GL_TEXTURE1);glBindTexture(GL_TEXTURE_2D, m_uvTexture);glTexImage2D(GL_TEXTURE_2D, 0, GL_RG, m_width/2, m_height/2, 0, GL_RG, GL_UNSIGNED_BYTE, m_uvData);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);// 設置紋理單元m_program.setUniformValue("tex_y", 0);m_program.setUniformValue("tex_uv", 1);// 繪制矩形static const GLfloat vertices[] = {-1.0f, -1.0f,1.0f, -1.0f,-1.0f, 1.0f,1.0f, 1.0f,};static const GLfloat texCoords[] = {0.0f, 1.0f,1.0f, 1.0f,0.0f, 0.0f,1.0f, 0.0f,};glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, vertices);glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, texCoords);glEnableVertexAttribArray(0);glEnableVertexAttribArray(1);glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);m_program.release();
}void FrameOpenGLWidget::initializeGLByYUV420PFrame()
{m_program.removeAllShaders();// 編譯著色器m_program.addShaderFromSourceCode(QOpenGLShader::Vertex,"#version 330 core\n""layout(location = 0) in vec4 vertexIn;\n""layout(location = 1) in vec2 textureIn;\n""out vec2 textureCoord;\n""void main() {\n""    gl_Position = vertexIn;\n""    textureCoord = textureIn;\n""}");m_program.addShaderFromSourceCode(QOpenGLShader::Fragment,"#version 330 core\n""in vec2 textureCoord;\n""out vec4 fragColor;\n""uniform sampler2D tex_y;\n""uniform sampler2D tex_u;\n""uniform sampler2D tex_v;\n""const mat3 yuv2rgb = mat3(\n""    1.164383, 1.164383, 1.164383,\n"        // Y系數"    0.0,     -0.391762, 2.017232,\n"         // U系數"    -0.812968, 0.0,      0.0);\n"            // V系數"void main() {\n""    float y = texture(tex_y, textureCoord).r;\n""    float u = texture(tex_u, textureCoord).r;\n""    float v = texture(tex_v, textureCoord).r;\n""    y = 1.1643 * (y - 0.0625);\n""    u = u - 0.5;\n""    v = v - 0.5;\n""    fragColor = vec4(\n""        y + 2.017232 * v,\n""        y - 0.391762 * u - 0.812968 * v,\n""        y + 1.164383 * u,\n""        1.0\n""    );\n""}");m_program.link();
}
void FrameOpenGLWidget::initTextureByYUV420PFrame()
{// 初始化三個紋理glGenTextures(1, &m_yTexture);glGenTextures(1, &m_uTexture);glGenTextures(1, &m_vTexture);
}
void FrameOpenGLWidget::releaseTextureByYUV420PFrame()
{glDeleteTextures(1, &m_yTexture);glDeleteTextures(1, &m_uTexture);glDeleteTextures(1, &m_vTexture);
}
void FrameOpenGLWidget::paintGLByYUV420PFrame()
{if (!m_yData || !m_uData || !m_vData) return;glClearColor(0.0f, 0.0f, 0.0f, 1.0f);glClear(GL_COLOR_BUFFER_BIT);#if 1glViewport(m_pos.x(), m_pos.y(), m_zoomSize.width(), m_zoomSize.height());  // 設置視圖大小實現圖片自適應
#endifm_program.bind();// 更新Y紋理glActiveTexture(GL_TEXTURE0);glBindTexture(GL_TEXTURE_2D, m_yTexture);glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, m_width, m_height, 0,GL_RED, GL_UNSIGNED_BYTE, m_yData);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);// 更新U紋理glActiveTexture(GL_TEXTURE1);glBindTexture(GL_TEXTURE_2D, m_uTexture);glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, m_width/2, m_height/2, 0,GL_RED, GL_UNSIGNED_BYTE, m_uData);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);// 更新V紋理glActiveTexture(GL_TEXTURE2);glBindTexture(GL_TEXTURE_2D, m_vTexture);glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, m_width/2, m_height/2, 0,GL_RED, GL_UNSIGNED_BYTE, m_vData);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);// 設置紋理單元m_program.setUniformValue("tex_y", 0);m_program.setUniformValue("tex_u", 1);m_program.setUniformValue("tex_v", 2);// 繪制矩形static const GLfloat vertices[] = {-1.0f, -1.0f,1.0f, -1.0f,-1.0f, 1.0f,1.0f, 1.0f,};static const GLfloat texCoords[] = {0.0f, 1.0f,1.0f, 1.0f,0.0f, 0.0f,1.0f, 0.0f,};glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, vertices);glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, texCoords);glEnableVertexAttribArray(0);glEnableVertexAttribArray(1);glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);m_program.release();
}void FrameOpenGLWidget::initializeGLByRGBFrame()
{m_program.removeAllShaders();// 編譯著色器m_program.addShaderFromSourceCode(QOpenGLShader::Vertex,"#version 330 core\n""layout(location = 0) in vec4 vertexIn;\n""layout(location = 1) in vec2 textureIn;\n""out vec2 textureCoord;\n""void main() {\n""    vec4 adjustedPos = vertexIn;\n""    gl_Position = adjustedPos;\n""    textureCoord = textureIn;\n""}");m_program.addShaderFromSourceCode(QOpenGLShader::Fragment,"#version 330 core\n""in vec2 textureCoord;\n""out vec4 fragColor;\n""uniform sampler2D tex_rgb;\n""void main() {\n""    fragColor = texture(tex_rgb, textureCoord);\n""}");m_program.link();}
void FrameOpenGLWidget::initTextureByRGBFrame()
{// 初始化紋理glGenTextures(1, &m_rgbTexture);
}
void FrameOpenGLWidget::releaseTextureByRGBFrame()
{glDeleteTextures(1, &m_rgbTexture);
}
void FrameOpenGLWidget::paintGLByRGBFrame()
{if (!m_rgbData || m_width <= 0 || m_height <= 0) return;glClearColor(0.0f, 0.0f, 0.0f, 1.0f);glClear(GL_COLOR_BUFFER_BIT);#if 1glViewport(m_pos.x(), m_pos.y(), m_zoomSize.width(), m_zoomSize.height());  // 設置視圖大小實現圖片自適應
#endifm_program.bind();// 設置像素存儲對齊(針對RGB24格式)glPixelStorei(GL_UNPACK_ALIGNMENT, 1);// 更新RGB紋理glActiveTexture(GL_TEXTURE0);glBindTexture(GL_TEXTURE_2D, m_rgbTexture);glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, m_width, m_height, 0,GL_RGB, GL_UNSIGNED_BYTE, m_rgbData);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);m_program.setUniformValue("tex_rgb", 0);// 頂點和紋理坐標static const GLfloat vertices[] = {-1.0f, -1.0f,1.0f, -1.0f,-1.0f, 1.0f,1.0f, 1.0f,};static const GLfloat texCoords[] = {0.0f, 1.0f,  // 左下1.0f, 1.0f,  // 右下0.0f, 0.0f,  // 左上1.0f, 0.0f   // 右上};glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, vertices);glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, texCoords);glEnableVertexAttribArray(0);glEnableVertexAttribArray(1);glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);m_program.release();
}void FrameOpenGLWidget::initializeGLByRGB32Frame()
{m_program.removeAllShaders();// 編譯著色器m_program.addShaderFromSourceCode(QOpenGLShader::Vertex,"#version 330 core\n""layout(location = 0) in vec4 vertexIn;\n""layout(location = 1) in vec2 textureIn;\n""out vec2 textureCoord;\n""void main() {\n""    vec4 pos = vertexIn;\n""    gl_Position = pos;\n""    textureCoord = textureIn;\n""}");m_program.addShaderFromSourceCode(QOpenGLShader::Fragment,"#version 330 core\n""in vec2 textureCoord;\n""out vec4 fragColor;\n""uniform sampler2D tex_rgb32;\n""void main() {\n""    fragColor = texture(tex, textureCoord);\n""}");m_program.link();
}
void FrameOpenGLWidget::initTextureByRGB32Frame()
{// 初始化紋理glGenTextures(1, &m_rgb32Texture);
}
void FrameOpenGLWidget::releaseTextureByRGB32Frame()
{glDeleteTextures(1, &m_rgb32Texture);
}
void FrameOpenGLWidget::paintGLByRGB32Frame()
{if (!m_rgb32Data || m_width <= 0 || m_height <= 0) return;glClearColor(0.0f, 0.0f, 0.0f, 1.0f);glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);#if 1glViewport(m_pos.x(), m_pos.y(), m_zoomSize.width(), m_zoomSize.height());  // 設置視圖大小實現圖片自適應
#endifm_program.bind();// 設置紋理參數glActiveTexture(GL_TEXTURE0);glBindTexture(GL_TEXTURE_2D, m_rgb32Texture);// 更新紋理數據(根據具體格式選擇)glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA,   // 內部格式m_width, m_height, 0,GL_BGRA,          // 數據格式(根據實際數據調整)GL_UNSIGNED_BYTE, m_rgb32Data);// 設置紋理參數glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);m_program.setUniformValue("tex_rgb32", 0);// 頂點和紋理坐標static const GLfloat vertices[] = {-1.0f, -1.0f,  // 左下1.0f, -1.0f,  // 右下-1.0f, 1.0f,   // 左上1.0f, 1.0f    // 右上};static const GLfloat texCoords[] = {0.0f, 1.0f,    // 左下1.0f, 1.0f,    // 右下0.0f, 0.0f,    // 左上1.0f, 0.0f     // 右上};// 設置頂點屬性glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, vertices);glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, texCoords);glEnableVertexAttribArray(0);glEnableVertexAttribArray(1);// 繪制glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);m_program.release();
}void FrameOpenGLWidget::initializeGL()
{initializeOpenGLFunctions();switch (m_nInitFrameDataFormat) {case FrameDataFormat_RGB24:initializeGLByRGBFrame();// 初始化紋理initTextureByRGBFrame();break;case FrameDataFormat_RGB32:initializeGLByRGB32Frame();// 初始化紋理initTextureByRGB32Frame();break;case FrameDataFormat_YUV420P:initializeGLByYUV420PFrame();// 初始化紋理initTextureByYUV420PFrame();break;case FrameDataFormat_NV12:initializeGLByNV12Frame();// 初始化紋理initTextureByNV12Frame();break;}// 指定顏色緩沖區的清除值(背景色)glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
}void FrameOpenGLWidget::resizeGL(int w, int h)
{
#if 1//計算需要顯示圖片的窗口大小,用于實現長寬等比自適應顯示if(m_size.width()  <= 0 || m_size.height() <= 0) return;if((double(w) / h) < (double(m_size.width()) / m_size.height())){m_zoomSize.setWidth(w);m_zoomSize.setHeight(((double(w) / m_size.width()) * m_size.height()));}else{m_zoomSize.setHeight(h);m_zoomSize.setWidth((double(h) / m_size.height()) * m_size.width());}m_pos.setX(double(w - m_zoomSize.width()) / 2);m_pos.setY(double(h - m_zoomSize.height()) / 2);this->update(QRect(0, 0, w, h));
#elseglViewport(0, 0, w, h);
#endif}void FrameOpenGLWidget::paintGL()
{switch (m_nInitFrameDataFormat) {case FrameDataFormat_RGB24:paintGLByRGBFrame();break;case FrameDataFormat_RGB32:paintGLByRGB32Frame();break;case FrameDataFormat_YUV420P:paintGLByYUV420PFrame();break;case FrameDataFormat_NV12:paintGLByNV12Frame();break;}
}
?4. 主窗口調用代碼
#include "mainwindow.h"
#include "ui_mainwindow.h"
#include <QFileDialog>
#include <QDebug>MainWindow::MainWindow(QWidget *parent): QMainWindow(parent), ui(new Ui::MainWindow)
{ui->setupUi(this);m_vdVideoDecode.globalInit();m_playImage = this->findChild<FrameOpenGLWidget *>("widget");connect(this, SIGNAL(sgnUpdateFrameFormat(int)), this, SLOT(sltUpdateFrameFormat(int)), Qt::BlockingQueuedConnection);
}MainWindow::~MainWindow()
{delete ui;m_vdVideoDecode.globalUnInit();
}void  MainWindow::sltUpdateFrameFormat(int nFormatType)
{if(m_vdVideoDecode.isClose())return;m_playImage->setFrameDataFormat((FrameDataFormat)nFormatType);
}void MainWindow::on_pushButtonOpenFile_clicked(bool checked)
{bool bIsSupportHardware = ui->checkBoxHW->checkState() == Qt::Checked ? true:false;QString filename = QFileDialog::getOpenFileName(nullptr, "Open Video File");if (!filename.isEmpty()){m_vdVideoDecode.setHWDecoder(bIsSupportHardware);m_vdVideoDecode.setStreamDecodeCallback([](int nStreamType, int nFormatType, long long llDecodeTs, long long llPlayTs, int width, int height, unsigned char ** pStreamData, int * linesize, void * pUserData){MainWindow *pMainWindow = (MainWindow *)pUserData;if(nFormatType !=  pMainWindow->m_playImage->getFrameDataFormat()){//發送信號出去,更新opengl初始化紋理。(因為opengl初始化紋理是非線程安全的,需要在UI或同FrameOpenGLWidget線程中處理。)emit pMainWindow->sgnUpdateFrameFormat(nFormatType);}pMainWindow->m_playImage->updateFrame(nFormatType, pStreamData, linesize, width, height);},this);m_vdVideoDecode.setFileStatusCallback([](int FileStatus, int nErrorCode, void * pUserData){qDebug()<<"file is end";},this);m_vdVideoDecode.open(filename);}
}void MainWindow::on_pushButtonCloseFile_clicked()
{m_vdVideoDecode.close();
}

完整代碼下載:https://gitee.com/byxdaz/ffmpeg--hw--opengl--qt?

本文來自互聯網用戶投稿,該文觀點僅代表作者本人,不代表本站立場。本站僅提供信息存儲空間服務,不擁有所有權,不承擔相關法律責任。
如若轉載,請注明出處:http://www.pswp.cn/bicheng/74346.shtml
繁體地址,請注明出處:http://hk.pswp.cn/bicheng/74346.shtml
英文地址,請注明出處:http://en.pswp.cn/bicheng/74346.shtml

如若內容造成侵權/違法違規/事實不符,請聯系多彩編程網進行投訴反饋email:809451989@qq.com,一經查實,立即刪除!

相關文章

深入解析嵌入式內核:從架構到實踐

一、嵌入式內核概述 嵌入式內核是嵌入式操作系統的核心組件&#xff0c;負責管理硬件資源、調度任務、處理中斷等關鍵功能。其核心目標是在資源受限的環境中提供高效、實時的控制能力。與通用操作系統不同&#xff0c;嵌入式內核通常具有高度可裁剪性、實時性和可靠性&#xff…

20250324-使用 `nltk` 的 `sent_tokenize`, `word_tokenize、WordNetLemmatizer` 方法時報錯

解決使用 nltk 的 sent_tokenize, word_tokenize、WordNetLemmatizer 方法時報錯問題 第 2 節的手動方法的法1可解決大部分問題&#xff0c;可首先嘗試章節 2 的方法 1. nltk.download(‘punkt_tab’) LookupError: *******************************************************…

『 C++ 』多線程同步:條件變量及其接口的應用實踐

文章目錄 條件變量概述條件變量簡介條件變量的基本用法 案例&#xff1a;兩個線程交替打印奇偶數代碼解釋 std::unique_lock::try_lock_until 介紹代碼示例代碼解釋注意事項 std::condition_variable::wait 詳細解析與示例std::condition_variable::wait 接口介紹代碼示例代碼解…

【VolView】純前端實現CT三維重建-CBCT

文章目錄 什么是CBCTCBCT技術路線使用第三方工具使用Python實現使用前端實現 純前端實現方案優缺點使用VolView實現CBCT VolView的使用1.克隆代碼2.配置依賴3.運行4.效果 進階&#xff1a;VolView配合Python解決卡頓1.修改VtkThreeView.vue2.新增Custom3DView.vue3.Python生成s…

debug - 安裝.msi時,為所有用戶安裝程序

文章目錄 debug - 安裝.msi時&#xff0c;為所有用戶安裝程序概述筆記試試在目標.msi后面直接加參數的測試 備注備注END debug - 安裝.msi時&#xff0c;為所有用戶安裝程序 概述 為了測試&#xff0c;裝了一個test.msi. 安裝時&#xff0c;只有安裝路徑的選擇&#xff0c;沒…

Java Stream兩種list判斷字符串是否存在方案

這里寫自定義目錄標題 背景初始化方法一、filter過濾方法二、anyMatch匹配 背景 在項目開發中&#xff0c;經常遇到篩選list中是否包含某個子字符串&#xff0c;有多種方式&#xff0c;本篇主要介紹stream流的filter和anyMatch兩種方案&#xff0c;記錄下來&#xff0c;方便備…

DeepSeek vs 通義大模型:誰將主導中國AI的未來戰場?

當你在深夜調試代碼時,是否幻想過AI伙伴能真正理解你的需求?當企業面對海量數據時,是否期待一個真正智能的決策大腦? 這場由DeepSeek和通義領銜的大模型之爭,正在重塑中國AI產業的競爭格局。本文將為你揭開兩大技術巨頭的終極對決! 一、顛覆認知的技術突破 1.1 改變游戲…

3. 軸指令(omron 機器自動化控制器)——>MC_SetOverride

機器自動化控制器——第三章 軸指令 12 MC_SetOverride變量?輸入變量?輸出變量?輸入輸出變量 功能說明?時序圖?重啟運動指令?多重啟動運動指令?異常 MC_SetOverride 變更軸的目標速度。 指令名稱FB/FUN圖形表現ST表現MC_SetOverride超調值設定FBMC_SetOverride_instan…

從像素到世界:自動駕駛視覺感知的坐標變換體系

接著上一篇 如何讓自動駕駛汽車“看清”世界?坐標映射與數據融合詳解的概述,這一篇詳細講解自動駕駛多目視覺系統設計原理,并給出應用示例。 摘要 在自動駕駛系統中,準確的環境感知是實現路徑規劃與決策控制的基礎。本文系統性地解析圖像坐標系、像素坐標系、相機坐標系與…

附錄B ISO15118-20測試命令

本章節給出ISO15118-20協議集的V2G命令&#xff0c;包含json、xml&#xff0c;并且根據exiCodec.jar編碼得到exi內容&#xff0c; 讀者可以參考使用&#xff0c;測試編解碼庫是否能正確編解碼。 B.1 supportedAppProtocolReq json: {"supportedAppProtocolReq": {…

VLAN章節學習

為什么會有vlan這個技術&#xff1f; 1.通過劃分廣播域來降低廣播風暴導致的設備性能下降&#xff1b; 2.提高網絡管理的靈活性和通過隔離網絡帶來的安全性&#xff1b; 3.在成本不變的情況下增加更多的功能性&#xff1b; VLAN又稱虛擬局域網&#xff08;再此擴展&#xf…

FPGA時鐘約束

提示&#xff1a;文章寫完后&#xff0c;目錄可以自動生成&#xff0c;如何生成可參考右邊的幫助文檔 目錄 前言 一、Create_clock 前言 時鐘周期約束&#xff0c;就是對時鐘進行約束。 一、Create_clock create_clock -name <name> -period <period> -waveform …

機房布局和布線的最佳實踐:如何打造高效、安全的機房環境

機房布局和布線的最佳實踐:如何打造高效、安全的機房環境 大家好,我是Echo_Wish。今天我們來聊聊機房布局和布線的問題,這可是數據中心和IT運維中的一個非常重要的環節。不管是剛剛接觸運維的新人,還是已經摸爬滾打多年的老兵,都應該對機房的布局和布線有一個清晰的認識。…

spring-security原理與應用系列:建造者

目錄 1.構建過程 AbstractSecurityBuilder AbstractConfiguredSecurityBuilder WebSecurity 2.建造者類圖 SecurityBuilder ???????AbstractSecurityBuilder ???????AbstractConfiguredSecurityBuilder ???????WebSecurity 3.小結 緊接上一篇文…

OpenHarmony子系統開發 - 電池管理(二)

OpenHarmony子系統開發 - 電池管理&#xff08;二&#xff09; 五、充電限流限壓定制開發指導 概述 簡介 OpenHarmony默認提供了充電限流限壓的特性。在對終端設備進行充電時&#xff0c;由于環境影響&#xff0c;可能會導致電池溫度過高&#xff0c;因此需要對充電電流或電…

xy軸不等比縮放問題——AUTOCAD c#二次開發

在 AutoCAD .net api里&#xff0c;部分實體&#xff0c;像文字、屬性、插入塊等&#xff0c;是不支持非等比縮放的。 如需對AutoCAD中圖形進行xyz方向不等比縮放&#xff0c;則需進行額外的函數封裝。 選擇圖元&#xff0c;指定縮放基準點&#xff0c;scaleX 0.5, scaleY …

如何在 HTML 中創建一個有序列表和無序列表,它們的語義有何不同?

大白話如何在 HTML 中創建一個有序列表和無序列表&#xff0c;它們的語義有何不同&#xff1f; 1. HTML 中有序列表和無序列表的基本概念 在 HTML 里&#xff0c;列表是一種用來組織信息的方式。有序列表就是帶有編號的列表&#xff0c;它可以讓內容按照一定的順序呈現&#…

kafka的文章

1.面試的問題 要點 至多一次、恰好一次數據一致性超時重試、冪等消息順序消息擠壓延時消息 1.1 kafaka 生產消息的過程。 在消息發送的過程中&#xff0c;涉及到了兩個線程&#xff0c;一個是main 線程&#xff0c;一個是sender 線程。在main 線程中創建了一個雙端隊列 Reco…

以mysql 為例,增刪改查語法及其他高級特性

以下是 MySQL 的 增刪改查語法及 高級特性的詳細整理&#xff0c;結合示例說明&#xff1a; 1. 基礎操作&#xff08;CRUD&#xff09; (1) 創建數據&#xff08;INSERT&#xff09; -- 單條插入 INSERT INTO users (id, name, email) VALUES (1, Alice, aliceexample.com);…

Postman最新詳細安裝及使用教程【附安裝包】

一、Postman介紹 ?Postman是一個功能強大的API測試工具&#xff0c;主要用于模擬和測試各種HTTP請求&#xff0c;支持GET、POST、PUT、DELETE等多種請求方法。?通過Postman&#xff0c;用戶可以發送請求并查看返回的響應&#xff0c;檢查響應的內容和狀態&#xff0c;從而驗…