在部分瀏覽器環境或業務場景下,直接使用 <video> 標簽加載視頻會出現首幀延遲的情況。
以下方法通過 WebGPU + Canvas 2D 將視頻幀繪制到自定義 Canvas 上,讓 <video> 只做解碼,WebGPU 接管渲染,通過最小化對象創建 + 精準幀回調,實現高性能、可擴展、跨端一致的視頻播放管線。
HTML 部分
<video id="instructional_video_id_2" :src="instru_video_src" autoplay loop muted playsinlinestyle="display: none;"></video>
<canvas id="instructional_video_id_1" width="640" height="360"style="width: 32.3125rem; height: 18.25rem;"></canvas>
JS 代碼
import {WebGLVideoRenderer} from './video-canvas.js';const appInstance = createApp({data() {return {videoElement: null,isVideoLoading: false,lastVideoUrl: null,isRendering: false,renderer: null,}},mounted() {this.initRender()},methods: {initRender() {const canvas = document.getElementById('instructional_video_id_1');this.renderer = new WebGLVideoRenderer(canvas);this.videoElement = document.getElementById('instructional_video_id_2');if (!this.isVideoLoading) {this.isVideoLoading = truethis.videoElement.addEventListener('play', () => {// 視頻播放時開始繪制到 canvasthis.drawVideoFrame();});this.videoElement.addEventListener('pause', () => {this.stopRendering();});this.videoElement.addEventListener('ended', () => {this.stopRendering();// 視頻播放結束時重新播放// this.videoElement.currentTime = 0;// this.videoElement.play();});this.videoElement.addEventListener('error', () => {console.error('視頻加載失敗');});}},// 初始化視頻initVideo(src) {if (this.lastVideoUrl === src) {return}this.lastVideoUrl = srcif (src === null) {return}// 設置視頻源this.setVideoSource(src);},// 渲染單幀renderFrame() {// 直接調用 WebGL 渲染器this.renderer.render(this.videoElement);},// 繪制視頻幀到 canvasdrawVideoFrame() {if (this.isRendering) return;this.isRendering = true;const useRVFC = 'requestVideoFrameCallback' in this.videoElement;if (useRVFC) {const rvfcLoop = () => {if (!this.isRendering) return;this.renderFrame();this.videoElement.requestVideoFrameCallback(rvfcLoop);};this.videoElement.requestVideoFrameCallback(rvfcLoop);} else {const renderLoop = () => {if (!this.isRendering) return;if (this.videoElement && !this.videoElement.paused && !this.videoElement.ended) {this.renderFrame()}requestAnimationFrame(renderLoop);};requestAnimationFrame(renderLoop);}},// 停止渲染stopRendering() {this.isRendering = false;},// 設置視頻源setVideoSource(src) {this.videoElement.src = src;this.videoElement.load();// this.videoElement.play();},}
video-canvas.js代碼
// video-canvas.js
export class WebGLVideoRenderer {constructor(canvas) {this.canvas = canvas;this.device = null;this.pipeline = null;this.sampler = null;this.bindGroupLayout = null;this.context = null;// 新增:可復用的對象this.currentExternalTexture = null;this.currentBindGroup = null;this.renderPassDescriptor = null;this.init();}async init() {if (!navigator.gpu) throw new Error('WebGPU not supported');const adapter = await navigator.gpu.requestAdapter({ powerPreference: 'high-performance' });this.device = await adapter.requestDevice();this.context = this.canvas.getContext('webgpu');const format = navigator.gpu.getPreferredCanvasFormat();this.context.configure({device: this.device,format,alphaMode: 'opaque'});// 著色器不變const code = `@vertex fn vs(@builtin(vertex_index) i: u32) ->@builtin(position) vec4f {const pos = array(vec2f(-1, -3), vec2f(3, 1), vec2f(-1, 1));return vec4f(pos[i], 0, 1);}@group(0) @binding(0) var s: sampler;@group(0) @binding(1) var t: texture_external;@fragment fn fs(@builtin(position) p: vec4f) ->@location(0) vec4f {let uv = p.xy / vec2f(textureDimensions(t));return textureSampleBaseClampToEdge(t, s, uv);}`;const shader = this.device.createShaderModule({ code });this.bindGroupLayout = this.device.createBindGroupLayout({entries: [{ binding: 0, visibility: GPUShaderStage.FRAGMENT, sampler: { type: 'filtering' } },{ binding: 1, visibility: GPUShaderStage.FRAGMENT, externalTexture: {} }]});this.pipeline = this.device.createRenderPipeline({layout: this.device.createPipelineLayout({ bindGroupLayouts: [this.bindGroupLayout] }),vertex: { module: shader, entryPoint: 'vs' },fragment: { module: shader, entryPoint: 'fs', targets: [{ format }] },primitive: { topology: 'triangle-list' }});this.sampler = this.device.createSampler({magFilter: 'linear', minFilter: 'linear'});// RenderPassDescriptor 的骨架,view 每幀再填this.renderPassDescriptor = {colorAttachments: [{view: undefined, // 占位,下面會替換loadOp: 'clear',storeOp: 'store'}]};}render(video) {if (!this.device) return;// 1. 畫布尺寸變化時再改const { videoWidth, videoHeight } = video;if (this.canvas.width !== videoWidth || this.canvas.height !== videoHeight) {this.canvas.width = videoWidth;this.canvas.height = videoHeight;}// 2. 只有在必要時才重新生成 BindGroup// importExternalTexture 每次都會返回新對象,必須每幀調用const externalTexture = this.device.importExternalTexture({ source: video });if (this.currentExternalTexture !== externalTexture) {this.currentExternalTexture = externalTexture;this.currentBindGroup = this.device.createBindGroup({layout: this.bindGroupLayout,entries: [{ binding: 0, resource: this.sampler },{ binding: 1, resource: externalTexture }]});}// 3. 更新 colorAttachment.viewthis.renderPassDescriptor.colorAttachments[0].view =this.context.getCurrentTexture().createView();// 4. 復用 RenderPassDescriptor,不再每幀 newconst encoder = this.device.createCommandEncoder();const pass = encoder.beginRenderPass(this.renderPassDescriptor);pass.setPipeline(this.pipeline);pass.setBindGroup(0, this.currentBindGroup);pass.draw(3);pass.end();this.device.queue.submit([encoder.finish()]);}dispose() {this.device?.destroy();}
}
關鍵點
<video> 元素僅作解碼器,不可見 (display: none)。
每幀通過 requestVideoFrameCallback(優先)或 requestAnimationFrame 輪詢,把最新紋理塞進 WebGPU。
Canvas 尺寸動態跟隨 video.videoWidth / videoHeight,防止花屏。