若該文為原創文章,轉載請注明原文出處
本文章博客地址:https://blog.csdn.net/qq21497936/article/details/147714800
長沙紅胖子Qt(長沙創微智科)博文大全:開發技術集合(包含Qt實用技術、樹莓派、三維、OpenCV、OpenGL、ffmpeg、OSG、單片機、軟硬結合等等)持續更新中…
FFmpeg、SDL和流媒體開發專欄
上一篇:《GStreamer開發筆記(二):GStreamer在ubnutn平臺部署安裝,測試gstreamer/cheese/ffmpeg/fmplayer打攝像頭延遲和內存》
下一篇:持續補充中…
前言
??前面測試了多種技術路線,本篇補全剩下的2種主流技術,v4l2+sdl2(偏底層),v4l2+QtOpengl(應用),v4l2+ffmpeg+QtQImage(Image的方式轉圖低于1ms,但是從yuv格式轉到rgb格式需要ffmpeg進行轉碼耗時)。
Demo
??
注意
??存在色彩空間不準確,不進行細究。
延遲和內存對比
步驟一:v4l2代碼測試延遲和內存
??沒有找到命令行,只找到了v4l2-ctl可以查看和控制攝像頭的參數。
??看gsteamer的源頭就是v4l2src,隨手寫個代碼使用v4l2打開攝像頭查看延遲,其中v4l2是個框架負責操作和捕獲,無法直接進行渲染顯示,本次使用了SDL進行顯示。
??注意:這里不對v4l2介紹,會有專門的專欄去講解v4l2的多媒體開發,但是這里使用v4l2的代碼寫個簡單的程序來打開。
sudo apt-get install libsdl2-dev libsdl2-2.0-0
??然后寫代碼,代碼貼在Demo里面
??
??
步驟二:v4l2+QtOpenGL+memcpy復制一次
??
??查看內存:
??
步驟三:v4l2+QtOpenGL+共享內存
??
最終總結
??到這里,我們得出結論,gstreamer基本是最優秀的框架之一了,初步測試不是特別嚴謹,但是基本能反應情況(比如ffmpeg得fmplay本輪測試是最差,但是ffmpeg寫代碼可以進行ffmpeg源碼和編程代碼的優化,達到150ms左右,諸如這類情況不考慮)。
??V4l2+SDL優于gstreamer優于ffmplayer優于v4l2+QtOpenGL優于cheese優于ffmpeg。
??其中v4l2+SDL、gstreamer、fmplayer在內存占用上有點區別,延遲差不多130ms左右。Cheese和v4l2+QtOpenGL延遲差不多
到170ms。Ffmpeg的播放器延遲到500ms左右。
擴展
??這里要注意,大部分低延遲內窺鏡筆者接觸的都是buffer疊顯存的方式,少數廠家使用v4l2+QtOpenGL的方式,經過測試慢了一幀左右。
Demo:V4l2+SDL
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <fcntl.h>
#include <unistd.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <linux/videodev2.h>
#include <errno.h>
#include <SDL2/SDL.h>
#include <SDL2/SDL_pixels.h>#define WIDTH 640
#define HEIGHT 480int main() {setbuf(stdout, NULL);int fd;struct v4l2_format fmt;struct v4l2_requestbuffers req;struct v4l2_buffer buf;void *buffer_start;unsigned int buffer_length;// 打開攝像頭設備fd = open("/dev/video0", O_RDWR);if (fd == -1) {perror("打開攝像頭設備失敗");return EXIT_FAILURE;}// 設置視頻格式memset(&fmt, 0, sizeof(fmt));fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;fmt.fmt.pix.width = WIDTH;fmt.fmt.pix.height = HEIGHT;fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;if (ioctl(fd, VIDIOC_S_FMT, &fmt) == -1) {perror("設置視頻格式失敗");close(fd);return EXIT_FAILURE;}// 請求緩沖區memset(&req, 0, sizeof(req));req.count = 1;req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;req.memory = V4L2_MEMORY_MMAP;if (ioctl(fd, VIDIOC_REQBUFS, &req) == -1) {perror("請求緩沖區失敗");close(fd);return EXIT_FAILURE;}// 映射緩沖區memset(&buf, 0, sizeof(buf));buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;buf.memory = V4L2_MEMORY_MMAP;buf.index = 0;if (ioctl(fd, VIDIOC_QUERYBUF, &buf) == -1) {perror("查詢緩沖區失敗");close(fd);return EXIT_FAILURE;}buffer_length = buf.length;buffer_start = mmap(NULL, buffer_length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.offset);if (buffer_start == MAP_FAILED) {perror("映射緩沖區失敗");close(fd);return EXIT_FAILURE;}// 將緩沖區放入隊列if (ioctl(fd, VIDIOC_QBUF, &buf) == -1) {perror("緩沖區入隊失敗");munmap(buffer_start, buffer_length);close(fd);return EXIT_FAILURE;}// 開始視頻捕獲enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;if (ioctl(fd, VIDIOC_STREAMON, &type) == -1) {perror("開始視頻捕獲失敗");munmap(buffer_start, buffer_length);close(fd);return EXIT_FAILURE;}// 初始化 SDLif (SDL_Init(SDL_INIT_VIDEO) < 0) {fprintf(stderr, "SDL 初始化失敗: %s\n", SDL_GetError());munmap(buffer_start, buffer_length);close(fd);return EXIT_FAILURE;}SDL_Window *window = SDL_CreateWindow("V4L2 Camera", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, WIDTH, HEIGHT, 0);if (!window) {fprintf(stderr, "創建 SDL 窗口失敗: %s\n", SDL_GetError());SDL_Quit();munmap(buffer_start, buffer_length);close(fd);return EXIT_FAILURE;}SDL_Renderer *renderer = SDL_CreateRenderer(window, -1, 0);// SDL_PIXELFORMAT_YV12 = /**< Planar mode: Y + V + U (3 planes) */// SDL_PIXELFORMAT_IYUV = /**< Planar mode: Y + U + V (3 planes) */// SDL_PIXELFORMAT_YUY2 = /**< Packed mode: Y0+U0+Y1+V0 (1 plane) */// SDL_PIXELFORMAT_UYVY = /**< Packed mode: U0+Y0+V0+Y1 (1 plane) */// SDL_PIXELFORMAT_YVYU = /**< Packed mode: Y0+V0+Y1+U0 (1 plane) */// SDL_Texture *texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_YV12, SDL_TEXTUREACCESS_STREAMING, WIDTH, HEIGHT);
// SDL_Texture *texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING, WIDTH, HEIGHT);SDL_Texture *texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_YUY2, SDL_TEXTUREACCESS_STREAMING, WIDTH, HEIGHT);
// SDL_Texture *texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_UYVY, SDL_TEXTUREACCESS_STREAMING, WIDTH, HEIGHT);
// SDL_Texture *texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_YVYU, SDL_TEXTUREACCESS_STREAMING, WIDTH, HEIGHT);int running = 1;SDL_Event event;while (running) {// 處理事件while (SDL_PollEvent(&event)) {if (event.type == SDL_QUIT) {running = 0;}}// 捕獲幀memset(&buf, 0, sizeof(buf));buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;buf.memory = V4L2_MEMORY_MMAP;if (ioctl(fd, VIDIOC_DQBUF, &buf) == -1) {perror("出隊緩沖區失敗");break;}// 更新 SDL 紋理SDL_UpdateTexture(texture, NULL, buffer_start, WIDTH);// 渲染紋理SDL_RenderClear(renderer);SDL_RenderCopy(renderer, texture, NULL, NULL);SDL_RenderPresent(renderer);// 將緩沖區重新入隊if (ioctl(fd, VIDIOC_QBUF, &buf) == -1) {perror("緩沖區入隊失敗");break;}}// 清理資源SDL_DestroyTexture(texture);SDL_DestroyRenderer(renderer);SDL_DestroyWindow(window);SDL_Quit();munmap(buffer_start, buffer_length);close(fd);return EXIT_SUCCESS;
}
Demo:V4l2+QtOpenGL+共享內存
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <fcntl.h>
#include <unistd.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <linux/videodev2.h>
#include <errno.h>
#include "DisplayOpenGLWidget.h"
#include <QApplication>
#include <QElapsedTimer>#define WIDTH 640
#define HEIGHT 480#include <QDebug>
#include <QDateTime>
//#define LOG qDebug()<<__FILE__<<__LINE__
//#define LOG qDebug()<<__FILE__<<__LINE__<<__FUNCTION__
//#define LOG qDebug()<<__FILE__<<__LINE__<<QThread()::currentThread()
//#define LOG qDebug()<<__FILE__<<__LINE__<<QDateTime::currentDateTime().toString("yyyy-MM-dd")
#define LOG qDebug()<<__FILE__<<__LINE__<<QDateTime::currentDateTime().toString("yyyy-MM-dd hh:mm:ss:zzz")int main(int argc, char *argv[])
{QApplication a(argc, argv);DisplayOpenGLWidget displayOpenGLWidget;displayOpenGLWidget.show();setbuf(stdout, NULL);int fd;struct v4l2_format fmt;struct v4l2_requestbuffers req;struct v4l2_buffer buf;void *buffer_start;unsigned int buffer_length;// 打開攝像頭設備fd = open("/dev/video0", O_RDWR);if (fd == -1) {perror("打開攝像頭設備失敗");return EXIT_FAILURE;}// 設置視頻格式memset(&fmt, 0, sizeof(fmt));fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;fmt.fmt.pix.width = WIDTH;fmt.fmt.pix.height = HEIGHT;fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;if (ioctl(fd, VIDIOC_S_FMT, &fmt) == -1) {perror("設置視頻格式失敗");close(fd);return EXIT_FAILURE;}// 請求緩沖區memset(&req, 0, sizeof(req));req.count = 1;req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;req.memory = V4L2_MEMORY_MMAP;if (ioctl(fd, VIDIOC_REQBUFS, &req) == -1) {perror("請求緩沖區失敗");close(fd);return EXIT_FAILURE;}// 映射緩沖區memset(&buf, 0, sizeof(buf));buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;buf.memory = V4L2_MEMORY_MMAP;buf.index = 0;if (ioctl(fd, VIDIOC_QUERYBUF, &buf) == -1) {perror("查詢緩沖區失敗");close(fd);return EXIT_FAILURE;}buffer_length = buf.length;buffer_start = mmap(NULL, buffer_length, PROT_READ | PROT_WRITE, MAP_SHARED, fd, buf.m.offset);if (buffer_start == MAP_FAILED) {perror("映射緩沖區失敗");close(fd);return EXIT_FAILURE;}displayOpenGLWidget.initDrawBuffer(WIDTH, HEIGHT, true, (char *)buffer_start);// 將緩沖區放入隊列if (ioctl(fd, VIDIOC_QBUF, &buf) == -1) {perror("緩沖區入隊失敗");munmap(buffer_start, buffer_length);close(fd);return EXIT_FAILURE;}// 開始視頻捕獲enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;if (ioctl(fd, VIDIOC_STREAMON, &type) == -1) {perror("開始視頻捕獲失敗");munmap(buffer_start, buffer_length);close(fd);return EXIT_FAILURE;}int running = 1;while (running) {// 捕獲幀memset(&buf, 0, sizeof(buf));buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;buf.memory = V4L2_MEMORY_MMAP;if (ioctl(fd, VIDIOC_DQBUF, &buf) == -1) {perror("出隊緩沖區失敗");break;}// 渲染
// memcpy(drawBuffer, buffer_start, buffer_length);displayOpenGLWidget.displayVideoFrame();QApplication::processEvents();QApplication::processEvents();QApplication::processEvents();// 將緩沖區重新入隊if (ioctl(fd, VIDIOC_QBUF, &buf) == -1) {perror("緩沖區入隊失敗");break;}}// 清理資源munmap(buffer_start, buffer_length);close(fd);return EXIT_SUCCESS;
}
入坑
入坑一:v4l2打開視頻代碼不對
問題
??V4l2打開視頻代碼數據錯位
??
原因
??紋理格式不同,但是筆者測試了SDL所有支持的,都不行,不鉆了,是需要進行色彩空間轉換下才可以(會額外消耗一定延遲,預估10ms以內),我們選個可以的,測試延遲內存即可。
// SDL_PIXELFORMAT_YV12 = /**< Planar mode: Y + V + U (3 planes) */
// SDL_PIXELFORMAT_IYUV = /**< Planar mode: Y + U + V (3 planes) */
// SDL_PIXELFORMAT_YUY2 = /**< Packed mode: Y0+U0+Y1+V0 (1 plane) */
// SDL_PIXELFORMAT_UYVY = /**< Packed mode: U0+Y0+V0+Y1 (1 plane) */
// SDL_PIXELFORMAT_YVYU = /**< Packed mode: Y0+V0+Y1+U0 (1 plane) */// SDL_Texture *texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_YV12, SDL_TEXTUREACCESS_STREAMING, WIDTH, HEIGHT);
// SDL_Texture *texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING, WIDTH, HEIGHT);
SDL_Texture *texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_YUY2, SDL_TEXTUREACCESS_STREAMING, WIDTH, HEIGHT);
// SDL_Texture *texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_UYVY, SDL_TEXTUREACCESS_STREAMING, WIDTH, HEIGHT);
// SDL_Texture *texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_YVYU, SDL_TEXTUREACCESS_STREAMING, WIDTH, HEIGHT);
解決
??不解決,選擇能看清楚的就好了。
上一篇:《GStreamer開發筆記(二):GStreamer在ubnutn平臺部署安裝,測試gstreamer/cheese/ffmpeg/fmplayer打攝像頭延遲和內存》
下一篇:持續補充中…
本文章博客地址:https://blog.csdn.net/qq21497936/article/details/147714800