首先貼出實現代碼:
OcclusionFader.ts
import { AbstractEngine, Material, type Behavior, type Mesh, type PBRMetallicRoughnessMaterial, type Scene } from "@babylonjs/core";
import { OcclusionTester } from "../../OcclusionTester";export class OcclusionFader implements Behavior<Mesh>{name: string = "OcclusionFader";private _mesh:Mesh | null = null;private _scene:Scene | null = null;private _engine:AbstractEngine | null = null;private _meshes:Mesh[] = [];private _mat:PBRMetallicRoughnessMaterial | null = null;private _visibility:number = 0.1;private _occlusionTester : OcclusionTester | null = null;constructor(visibility:number = 0.1){this._visibility = visibility;}init(): void {}private _attached = false;attach(target: Mesh): void {if(this._attached)return;this._attached = true;this._mesh = target;this._scene = target.getScene();this._engine = this._scene.getEngine();this._mat = this._mesh.material?.clone(this._mesh.material.name + "_clone") as PBRMetallicRoughnessMaterial;this._mesh.material = this._mat;this._occlusionTester = new OcclusionTester(this._scene as Scene);this._occlusionTester.setDivisor(8);this._occlusionTester.updateMesh(this._meshes, this._mesh);this._occlusionTester.onFinishCheckOcclusion.add(this._setIsOccluded.bind(this));this._occlusionTester.startCheckOcclusion();this._scene.onBeforeRenderObservable.add(this._updateVisibility.bind(this));}detach(): void {this._attached = false;this._mesh = null;}public addMesh(mesh:Mesh):void{this._meshes.push(mesh);if(this._occlusionTester)this._occlusionTester.updateMesh(this._meshes, this._mesh as Mesh);}private _isOccluded:boolean = false;private _setIsOccluded(isOccluded:boolean):void{this._isOccluded = isOccluded;}private _vUse:number = 1;private _updateVisibility():void{if(!this._mat){console.log("mat is null!");return;}if(!this._occlusionTester){console.log("occlusionTester is null!");return;}this._mat.transparencyMode = Material.MATERIAL_ALPHABLEND;if(this._isOccluded){if(this._vUse > this._visibility){this._vUse -= this._engine!.getDeltaTime() * 0.005;}else{this._vUse = this._visibility;}}else{if(this._vUse < 1){this._vUse += this._engine!.getDeltaTime() * 0.005;}else{this._mat.transparencyMode = Material.MATERIAL_ALPHATEST;this._vUse = 1;}}this._mesh!.material!.alpha = this._vUse;}public dispose(): void {this._attached = false;this._mesh = null;this._occlusionTester?.dispose();}
}
OcclusionTester.ts
import { AbstractEngine, Color4, Engine, Mesh, Observable, RenderTargetTexture, Scene, ShaderMaterial, UniversalCamera } from "@babylonjs/core";export class OcclusionTester {private _engine: AbstractEngine;private _mainScene: Scene;private _tempScene: Scene; // 臨時場景(離屏渲染)private _tempCam: UniversalCamera;private _w: number = 8;private _h: number = 8;private _mat = this._createDepthMaterial(); private _depthTexA:RenderTargetTexture | null = null;private _depthTexB:RenderTargetTexture | null = null;private _divisor:number = 1;private options = {generateDepthBuffer: true, // 啟用深度緩沖generateStencilBuffer: false, // 禁用模板緩沖type: Engine.TEXTURETYPE_FLOAT // 浮點紋理}constructor(mainScene: Scene) {this._mainScene = mainScene;this._engine = mainScene.getEngine();// 創建臨時場景和相機this._tempScene = new Scene(this._engine);this._tempCam = mainScene.activeCamera!.clone("tempCamera") as UniversalCamera;this._mainScene.removeCamera(this._tempCam);this._tempScene.addCamera(this._tempCam);this._tempScene.activeCamera = this._tempCam;this._tempScene.clearColor = new Color4(0, 0, 0, 0);const size = this.resize();this._depthTexA = this.createDepthTex("depthTexA", size);this._depthTexB = this.createDepthTex("depthTexB", size);this._engine.onResizeObservable.add(()=>{const size = this.resize();if(this._depthTexA)this._depthTexA.resize(size);if(this._depthTexB)this._depthTexB.resize(size);});}public setDivisor(divisor:number):void{this._divisor = divisor < 1 ? 1 : divisor;}public getDivisor():number{return this._divisor; }private createDepthTex(name:string, size:{width: number, height: number}):RenderTargetTexture{const depthTex = new RenderTargetTexture(name, size, this._tempScene, this.options);depthTex.activeCamera = this._tempCam;this._tempScene.customRenderTargets.push(depthTex);return depthTex;}private resize = ():{width: number, height: number} => {this._w = Math.floor(this._engine.getRenderWidth() / this._divisor);this._h = Math.floor(this._engine.getRenderHeight() / this._divisor);return {width: this._w, height: this._h};};private _meshesCloned:Mesh[] = [];private _meshOccCloned:Mesh[] = [];public updateMesh(meshes: Mesh[], meshOcc: Mesh): void {if(!this._depthTexA)return;this._meshesCloned.forEach((mesh)=>{mesh.dispose();});this._meshesCloned.length = 0;meshes.forEach((mesh)=>{const meshClone = this._cloneMeshToTempScene(mesh);this._meshesCloned.push(meshClone);});this._depthTexA.renderList = this._meshesCloned;if(!this._depthTexB)return;this._meshOccCloned.forEach((mesh)=>{mesh.dispose();});this._meshOccCloned.length = 0;const meshOccClone = this._cloneMeshToTempScene(meshOcc);this._meshOccCloned.push(meshOccClone);this._depthTexB.renderList = this._meshOccCloned;}private _cloneMeshToTempScene(mesh:Mesh):Mesh{const meshClone = mesh.clone(mesh.name + "_Cloned");this._mainScene.removeMesh(meshClone);const occ = meshClone.getBehaviorByName("OcclusionFader");if(occ) meshClone.removeBehavior(occ);meshClone.material = this._mat;this._tempScene.addMesh(meshClone);return meshClone;};private checkEnabled:boolean = true;public startCheckOcclusion():void{this.checkEnabled = true;this.checkOcclusion();}public stopCheckOcclusion():void{this.checkEnabled = false;}private isOccluded:boolean = false;public getIsOccluded():boolean{return this.isOccluded;}public onFinishCheckOcclusion:Observable<boolean> = new Observable<boolean>();private async checkOcclusion(): Promise<void> {if(!this.checkEnabled)return;this.syncCam();// 在臨時場景中執行離屏渲染await new Promise<void>(resolve => {this._tempScene.executeWhenReady(() => {this._tempScene.render();resolve();});});// 讀取深度數據const depthBufA = await this._depthTexA!.readPixels(0, // faceIndex (立方體貼圖用,默認0)0, // level (mipmap級別,默認0)null, // buffer (不預分配緩沖區)true, // flushRenderer (強制刷新渲染器)false, // noDataConversion (允許數據轉換)0, // x (起始X坐標)0, // y (起始Y坐標)this._w,// width (讀取寬度)this._h // height (讀取高度)) as Float32Array; // 關鍵:聲明為Float32Arrayconst depthBufB = await this._depthTexB!.readPixels(0, // faceIndex (立方體貼圖用,默認0)0, // level (mipmap級別,默認0)null, // buffer (不預分配緩沖區)true, // flushRenderer (強制刷新渲染器)false, // noDataConversion (允許數據轉換)0, // x (起始X坐標)0, // y (起始Y坐標)this._w,// width (讀取寬度)this._h // height (讀取高度)) as Float32Array; // 關鍵:聲明為Float32Array// 檢查遮擋let isOccluded = false;for (let i = 0; i < depthBufA.length; i += 4) {if (depthBufA[i] > 0 && depthBufB[i] > 0){if(depthBufB[i] < depthBufA[i]) {isOccluded = true;break;}}}this.isOccluded = isOccluded;this.onFinishCheckOcclusion.notifyObservers(isOccluded);// 使用setTimeout來延遲下一次檢查,而不是直接遞歸setTimeout(() => this.checkOcclusion(), 0);}private syncCam() {const mainCam = this._mainScene.activeCamera as UniversalCamera;this._tempCam.position.copyFrom(mainCam.position);this._tempCam.rotation.copyFrom(mainCam.rotation);}// 創建深度寫入材質private _createDepthMaterial(): ShaderMaterial {const vertexShader = `precision highp float;attribute vec3 position;uniform mat4 worldViewProjection;varying float vDepth;void main() {vec4 pos = worldViewProjection * vec4(position, 1.0);gl_Position = pos;vDepth = pos.z / pos.w; // 透視除法后的歸一化深度}`;const fragmentShader = `precision highp float;varying float vDepth;void main() {gl_FragColor = vec4(vDepth, vDepth, vDepth, 1.0);}`;return new ShaderMaterial("depthMaterial",this._tempScene,{vertexSource: vertexShader,fragmentSource: fragmentShader},{attributes: ["position"],uniforms: ["worldViewProjection"]});}public dispose() {this._tempScene.dispose();this._tempScene.customRenderTargets.forEach(rt => rt.dispose());this._tempScene.customRenderTargets = [];this._tempScene.meshes.forEach(mesh => mesh.dispose());}
}
一、核心思路解析
本方案通過結合離屏渲染與深度檢測技術,實現了一個動態的物體遮擋透明度控制系統。主要分為兩大模塊:
-
OcclusionTester:負責執行遮擋檢測的核心邏輯
-
OcclusionFader:基于檢測結果控制物體透明度的行為組件
二、關鍵技術實現
1. 雙場景渲染機制
-
主場景:承載實際可見的3D物體
-
臨時場景:專門用于離屏深度渲染
-
優勢:避免對主場景渲染管線造成干擾
this._tempScene = new Scene(this._engine);
2. 深度信息采集
-
使用RenderTargetTexture生成兩張深度圖:
-
depthTexA:被檢測物體組的深度
-
depthTexB:目標物體的深度
-
// 創建深度紋理
createDepthTex(name: string, size: {width: number, height: number}){return new RenderTargetTexture(name, size, this._tempScene, {generateDepthBuffer: true,type: Engine.TEXTURETYPE_FLOAT});
}
3. 深度比較算法
for (let i = 0; i < depthBufA.length; i += 4) {if (depthBufA[i] > 0 && depthBufB[i] > 0){if(depthBufB[i] < depthBufA[i]) {isOccluded = true;break;}}
}
4. 透明度漸變控制
// 平滑過渡效果
this._vUse += this._engine!.getDeltaTime() * 0.005;
this._mesh!.material!.alpha = this._vUse;
三、實現步驟詳解
步驟1:場景初始化
-
克隆主場景相機到臨時場景
-
設置純黑色背景消除干擾
步驟2:物體克隆
-
克隆待檢測物體到臨時場景
-
替換為專用深度材質
private _cloneMeshToTempScene(mesh: Mesh){const clone = mesh.clone();clone.material = this._mat; // 使用深度材質return clone;
}
步驟3:異步深度檢測
-
使用requestAnimationFrame避免阻塞主線程
-
通過readPixels讀取深度緩沖
const depthBuf = await texture.readPixels() as Float32Array;
步驟4:結果反饋
-
通過Observable通知透明度控制器
-
實現檢測與渲染的解耦
四、性能優化策略
-
分辨率控制:通過divisor參數降低檢測精度
setDivisor(8); // 使用1/8分辨率檢測
-
異步檢測機制:使用setTimeout保持事件循環暢通
-
對象復用:緩存克隆物體避免重復創建
-
按需渲染:僅在需要時啟動檢測循環
五、應用場景示例
-
AR應用中重要物體的防遮擋
-
3D編輯器中的選中物體高亮
-
游戲中的動態場景元素管理
-
可視化大屏的重點信息保護
六、潛在優化方向
-
WebGL2特性利用:改用深度紋理格式
layout(depth) out float gl_FragDepth;
-
GPU加速計算:改用Compute Shader處理深度比較
-
空間分割優化:結合八叉樹空間劃分
-
LOD策略:動態調整檢測精度
七、總結
本方案通過創新的雙場景架構,在保證主場景渲染性能的同時,實現了精確的實時遮擋檢測。深度信息的對比算法與透明度控制的結合,展現了WebGL在復雜交互場景中的應用潛力。開發者可根據具體需求調整檢測精度和響應速度,在視覺效果與性能消耗之間找到最佳平衡點。