在部分浏览器环境或业务场景下,直接使用 <video> 标签加载视频会出现首帧延迟的情况。
以下方法通过 WebGPU + Canvas 2D 将视频帧绘制到自定义 Canvas 上,让 <video> 只做解码,WebGPU 接管渲染,通过最小化对象创建 + 精准帧回调,实现高性能、可扩展、跨端一致的视频播放管线。
HTML 部分
<video id="instructional_video_id_2" :src="instru_video_src" autoplay loop muted playsinlinestyle="display: none;"></video>
<canvas id="instructional_video_id_1" width="640" height="360"style="width: 32.3125rem; height: 18.25rem;"></canvas>
JS 代码
import {WebGLVideoRenderer} from './video-canvas.js';const appInstance = createApp({data() {return {videoElement: null,isVideoLoading: false,lastVideoUrl: null,isRendering: false,renderer: null,}},mounted() {this.initRender()},methods: {initRender() {const canvas = document.getElementById('instructional_video_id_1');this.renderer = new WebGLVideoRenderer(canvas);this.videoElement = document.getElementById('instructional_video_id_2');if (!this.isVideoLoading) {this.isVideoLoading = truethis.videoElement.addEventListener('play', () => {// 视频播放时开始绘制到 canvasthis.drawVideoFrame();});this.videoElement.addEventListener('pause', () => {this.stopRendering();});this.videoElement.addEventListener('ended', () => {this.stopRendering();// 视频播放结束时重新播放// this.videoElement.currentTime = 0;// this.videoElement.play();});this.videoElement.addEventListener('error', () => {console.error('视频加载失败');});}},// 初始化视频initVideo(src) {if (this.lastVideoUrl === src) {return}this.lastVideoUrl = srcif (src === null) {return}// 设置视频源this.setVideoSource(src);},// 渲染单帧renderFrame() {// 直接调用 WebGL 渲染器this.renderer.render(this.videoElement);},// 绘制视频帧到 canvasdrawVideoFrame() {if (this.isRendering) return;this.isRendering = true;const useRVFC = 'requestVideoFrameCallback' in this.videoElement;if (useRVFC) {const rvfcLoop = () => {if (!this.isRendering) return;this.renderFrame();this.videoElement.requestVideoFrameCallback(rvfcLoop);};this.videoElement.requestVideoFrameCallback(rvfcLoop);} else {const renderLoop = () => {if (!this.isRendering) return;if (this.videoElement && !this.videoElement.paused && !this.videoElement.ended) {this.renderFrame()}requestAnimationFrame(renderLoop);};requestAnimationFrame(renderLoop);}},// 停止渲染stopRendering() {this.isRendering = false;},// 设置视频源setVideoSource(src) {this.videoElement.src = src;this.videoElement.load();// this.videoElement.play();},}
video-canvas.js代码
// video-canvas.js
export class WebGLVideoRenderer {constructor(canvas) {this.canvas = canvas;this.device = null;this.pipeline = null;this.sampler = null;this.bindGroupLayout = null;this.context = null;// 新增:可复用的对象this.currentExternalTexture = null;this.currentBindGroup = null;this.renderPassDescriptor = null;this.init();}async init() {if (!navigator.gpu) throw new Error('WebGPU not supported');const adapter = await navigator.gpu.requestAdapter({ powerPreference: 'high-performance' });this.device = await adapter.requestDevice();this.context = this.canvas.getContext('webgpu');const format = navigator.gpu.getPreferredCanvasFormat();this.context.configure({device: this.device,format,alphaMode: 'opaque'});// 着色器不变const code = `@vertex fn vs(@builtin(vertex_index) i: u32) ->@builtin(position) vec4f {const pos = array(vec2f(-1, -3), vec2f(3, 1), vec2f(-1, 1));return vec4f(pos[i], 0, 1);}@group(0) @binding(0) var s: sampler;@group(0) @binding(1) var t: texture_external;@fragment fn fs(@builtin(position) p: vec4f) ->@location(0) vec4f {let uv = p.xy / vec2f(textureDimensions(t));return textureSampleBaseClampToEdge(t, s, uv);}`;const shader = this.device.createShaderModule({ code });this.bindGroupLayout = this.device.createBindGroupLayout({entries: [{ binding: 0, visibility: GPUShaderStage.FRAGMENT, sampler: { type: 'filtering' } },{ binding: 1, visibility: GPUShaderStage.FRAGMENT, externalTexture: {} }]});this.pipeline = this.device.createRenderPipeline({layout: this.device.createPipelineLayout({ bindGroupLayouts: [this.bindGroupLayout] }),vertex: { module: shader, entryPoint: 'vs' },fragment: { module: shader, entryPoint: 'fs', targets: [{ format }] },primitive: { topology: 'triangle-list' }});this.sampler = this.device.createSampler({magFilter: 'linear', minFilter: 'linear'});// RenderPassDescriptor 的骨架,view 每帧再填this.renderPassDescriptor = {colorAttachments: [{view: undefined, // 占位,下面会替换loadOp: 'clear',storeOp: 'store'}]};}render(video) {if (!this.device) return;// 1. 画布尺寸变化时再改const { videoWidth, videoHeight } = video;if (this.canvas.width !== videoWidth || this.canvas.height !== videoHeight) {this.canvas.width = videoWidth;this.canvas.height = videoHeight;}// 2. 只有在必要时才重新生成 BindGroup// importExternalTexture 每次都会返回新对象,必须每帧调用const externalTexture = this.device.importExternalTexture({ source: video });if (this.currentExternalTexture !== externalTexture) {this.currentExternalTexture = externalTexture;this.currentBindGroup = this.device.createBindGroup({layout: this.bindGroupLayout,entries: [{ binding: 0, resource: this.sampler },{ binding: 1, resource: externalTexture }]});}// 3. 更新 colorAttachment.viewthis.renderPassDescriptor.colorAttachments[0].view =this.context.getCurrentTexture().createView();// 4. 复用 RenderPassDescriptor,不再每帧 newconst encoder = this.device.createCommandEncoder();const pass = encoder.beginRenderPass(this.renderPassDescriptor);pass.setPipeline(this.pipeline);pass.setBindGroup(0, this.currentBindGroup);pass.draw(3);pass.end();this.device.queue.submit([encoder.finish()]);}dispose() {this.device?.destroy();}
}
关键点
<video> 元素仅作解码器,不可见 (display: none)。
每帧通过 requestVideoFrameCallback(优先)或 requestAnimationFrame 轮询,把最新纹理塞进 WebGPU。
Canvas 尺寸动态跟随 video.videoWidth / videoHeight,防止花屏。