Threejs中頂視圖截圖
一般項目中的每個模型,都需要有一張對應的圖片,一般是頂視圖,在對應的2D場景場景中展示。以下分享一個實現方式,先將清空模型材質的紋理,把顏色設置為白色,使用正交相機截取頂視圖,生成一張圖片,作為模型在2D場景的圖標。
這個是截圖模型頂視圖的代碼:
import * as THREE from 'three';
import { OutlinePostProcess } from './OutlinePostProcess';export class ModelCapture {private renderer: THREE.WebGLRenderer;private scene: THREE.Scene;private camera: THREE.OrthographicCamera;private outlineProcess: OutlinePostProcess;private width: number = 240;private height: number = 260;constructor() {this.scene = new THREE.Scene();this.renderer = new THREE.WebGLRenderer({antialias: true,alpha: true,preserveDrawingBuffer: true});this.camera = new THREE.OrthographicCamera(0, 0, 0, 0, 0.1, 2000)this.camera.position.set(0, 100, 0);this.camera.lookAt(0, 0, 0);const ambientLight = new THREE.AmbientLight(0xffffff, 1);this.scene.add(ambientLight);this.outlineProcess = new OutlinePostProcess(this.renderer,this.scene,this.camera,this.width,this.height);this.outlineProcess.setDefaultEnabled(true);this.outlineProcess.setEnabled(true);this.outlineProcess.makeOutlineDirty();}public captureModel(model: THREE.Group): void {const root = model;this.scene.add(root);const boundingBox = new THREE.Box3().setFromObject(root);const size = new THREE.Vector3();boundingBox.getSize(size);this.updateSize(size.x, size.z);root.traverse((child: THREE.Object3D) => {if (child instanceof THREE.Mesh) {if (Array.isArray(child.material)) {child.material.forEach(material => {if (material.map) material.map = null;material.color = new THREE.Color(1, 1, 1);});} else if (child.material && child.material.map) {child.material.map = null;child.material.color = new THREE.Color(1, 1, 1);}}});this.outlineProcess.makeOutlineDirty();this.outlineProcess.render();const imageUrl = this.renderer.domElement.toDataURL('image/png');const img = document.createElement('img');img.id = 'model-capture';img.src = imageUrl;img.style.position = 'absolute';img.style.top = '0';img.style.right = '0';img.style.width = '20%';img.style.height = '20%';document.body.appendChild(img);}// 更新場景尺寸的函數public updateSize(width: number, height: number) {// 更新渲染器尺寸this.renderer.setSize(width, height)// 更新相機參數this.camera.left = width / -2this.camera.right = width / 2this.camera.top = height / 2this.camera.bottom = height / -2this.camera.updateProjectionMatrix()this.outlineProcess.onResize(width, height);}
}
為了方便頂視圖的效果,搭建了一個同樣的場景,來實時觀察相機截圖的內容,代碼如下:
import * as THREE from 'three'
import { OrbitControls } from 'three/examples/jsm/controls/OrbitControls.js';
import { GLTFLoader } from 'three/examples/jsm/loaders/GLTFLoader'
import { OutlinePostProcess } from './OutlinePostProcess';
import { ModelCapture } from './ModelCapture';export class TopView {private static initialized = false;private static renderer: THREE.WebGLRenderer;private static scene: THREE.Scene;private static camera: THREE.OrthographicCamera;private static outlineProcess: OutlinePostProcess;private static model: THREE.Group;private static modelCapture: ModelCapture;public static main() {TopView.modelCapture = new ModelCapture();if (TopView.initialized) {return;}TopView.initialized = true;console.log("TopView")this.scene = new THREE.Scene()const container = document.getElementById('main') as HTMLDivElementif (!container) {console.error('找不到容器元素')return}this.renderer = new THREE.WebGLRenderer({antialias: true,alpha: true,preserveDrawingBuffer: true})container.appendChild(this.renderer.domElement)this.camera = new THREE.OrthographicCamera(0, 0, 0, 0, 0.1, 2000)this.outlineProcess = new OutlinePostProcess(this.renderer, this.scene, this.camera, 240, 260);this.updateSize(240, 260)window.addEventListener('resize', ()=>{this.updateSize(240, 260);})this.camera.position.set(0, 100, 0);this.camera.lookAt(0, 0, 0);(globalThis as any).testCamera = TopView.camera;// 添加環境光const ambientLight = new THREE.AmbientLight(0xffffff, 1)this.scene.add(ambientLight)// 添加坐標軸輔助器const axesHelper = new THREE.AxesHelper(500)this.scene.add(axesHelper)// 添加網格輔助器const gridHelper = new THREE.GridHelper(1000, 20)this.scene.add(gridHelper)// 加載 GLB 模型const loader = new GLTFLoader()loader.load('/bed.glb', (gltf: any) => {let root = gltf.scene;root.scale.set(0.1, 0.1, 0.1);root.rotation.set(0, 0, 0);// 獲取模型的包圍盒const boundingBox = new THREE.Box3().setFromObject(root);const size = new THREE.Vector3();boundingBox.getSize(size);console.log('模型尺寸:', size);TopView.scene.add(root);TopView.model = root.clone();}, undefined, (error: any) => {console.error('加載模型出錯:', error)})// 添加場景控制器const controls = new OrbitControls(this.camera, this.renderer.domElement)controls.enableDamping = true // 啟用阻尼效果controls.dampingFactor = 0.05 // 阻尼系數controls.screenSpacePanning = false // 禁用屏幕空間平移controls.minDistance = 100 // 最小縮放距離controls.maxDistance = 500 // 最大縮放距離controls.maxPolarAngle = Math.PI / 2 // 限制垂直旋轉角度// 渲染場景function animate() {requestAnimationFrame(animate)controls.update() // 更新控制器// TopView.renderer.render(TopView.scene, TopView.camera)TopView.outlineProcess.makeOutlineDirty();TopView.outlineProcess.render();}animate()}// 更新場景尺寸的函數public static updateSize(width: number, height: number) {// 更新渲染器尺寸this.renderer.setSize(width, height)// 更新相機參數this.camera.left = width / -2this.camera.right = width / 2this.camera.top = height / 2this.camera.bottom = height / -2this.camera.updateProjectionMatrix()this.outlineProcess.onResize(width, height);}public static async captureScene() {this.outlineProcess.makeOutlineDirty();this.outlineProcess.render();let imageUrl = await this.renderer.domElement.toDataURL('image/png');const img = await document.createElement('img');img.id = 'scene-capture';img.src = imageUrl;img.style.position = 'absolute';img.style.top = '0';img.style.left = '0';img.style.width = '20%';img.style.height = '20%';document.body.appendChild(img);}// 創建一個函數來捕獲渲染結果public static async captureModel() {await TopView.modelCapture.captureModel(TopView.model.clone())}
}(globalThis as any).TopView = TopView;
可以在控制臺輸入如下代碼,調用 TopView
中的兩個方法,來測試:
// 截取當前場景
TopView.captureScene()
// 使用截圖工具類截圖
TopView.captureModel()
效果如下,左邊是截取的場景,右邊是截圖工具類截的圖。
其中,用到的描邊方式,我在上一篇博客中有介紹,代碼有一點修改,便于調式,也把源碼放在下面。
import * as THREE from "three";
import { EffectComposer, FXAAShader, GammaCorrectionShader, RenderPass, ShaderPass, SMAAPass } from "three/examples/jsm/Addons.js";export class OutlinePostProcess {private _composer!: EffectComposer;private _normalIdRenderTarget!: THREE.WebGLRenderTarget;private _renderPass!: RenderPass;private _outlinePass!: ShaderPass;private _fxaaPass!: ShaderPass;private _smaaPass!: SMAAPass;// 抗鋸齒模式,0: FXAA,1: SMAAprivate _aaMode: number = 0;private _defaultEnabled: boolean = true;private _enabled: boolean = true;private _isRenderingNormalId: boolean = false;private _normalIdMaterial!: THREE.ShaderMaterial;// 避免每幀都重復渲染一次描邊,場景沒變化時無需渲染private _outlineDirty: boolean = true;// 是否啟用對角線采樣private _enableDiagonalSampling: boolean = false;constructor(private renderer: THREE.WebGLRenderer,private scene: THREE.Scene,private _camera: THREE.Camera,private _width: number,private _height: number,) {this.initNormalIdMaterial();this.initRenderTarget();this.initComposer();}public set camera(camera: THREE.Camera) {this._camera = camera;this._renderPass.camera = camera;this.makeOutlineDirty();}public get width() {const pixelRatio = this.renderer.getPixelRatio();return this._width * pixelRatio;}public get height() {const pixelRatio = this.renderer.getPixelRatio();return this._height * pixelRatio;}private initNormalIdMaterial() {this._normalIdMaterial = new THREE.ShaderMaterial({uniforms: {meshID: { value: 0.0 }},vertexShader: `varying vec3 vNormal;void main() {vNormal = normalize(normalMatrix * normal);gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);}`,fragmentShader: `uniform float meshID;varying vec3 vNormal;vec2 encodeNormal(vec3 n) {vec2 enc = normalize(n.xy) * (sqrt(-n.z * 0.5 + 0.5));enc = enc * 0.5 + 0.5;return enc;}vec2 encodeID(float id) {float tempID = id / 255.0;float highID = floor(tempID);return vec2(highID / 255.0, tempID - highID);}void main() {vec2 encodedNormal = encodeNormal(normalize(vNormal));vec2 encodedID = encodeID(meshID);gl_FragColor = vec4(encodedNormal, encodedID);}`});}private switchMaterial(isNormalId: boolean) {if (isNormalId === this._isRenderingNormalId) {return;}let meshID = 1;const processMesh = (object: THREE.Object3D, parentSkipOutline: boolean = false) => {// 如果父級節點禁用描邊,則當前節點也禁用描邊const skipOutline = parentSkipOutline || object.userData.SkipOutline;// 檢查對象是否可見if (!object.visible) {return;}if (object instanceof THREE.Mesh ||object instanceof THREE.Line ||object instanceof THREE.Points ||object instanceof THREE.Sprite) {if (isNormalId) {object.userData.originalMaterial = object.material;let normalIdMaterial = object.userData.normalIdMaterial;if (!normalIdMaterial) {normalIdMaterial = this._normalIdMaterial.clone();object.userData.normalIdMaterial = normalIdMaterial;}normalIdMaterial.uniforms.meshID.value = skipOutline ? 0 : meshID++;object.material = normalIdMaterial;} else {object.material = object.userData.originalMaterial;}}// 遞歸處理所有子節點object.children.forEach(child => processMesh(child, skipOutline));};// 從場景根節點開始處理processMesh(this.scene);this._isRenderingNormalId = isNormalId;}private initRenderTarget() {this._normalIdRenderTarget = new THREE.WebGLRenderTarget(this.width,this.height,{format: THREE.RGBAFormat,type: THREE.FloatType,minFilter: THREE.NearestFilter,magFilter: THREE.NearestFilter,colorSpace: THREE.SRGBColorSpace,count: 1});}private initComposer() {this._composer = new EffectComposer(this.renderer);// 添加主渲染通道this._renderPass = new RenderPass(this.scene, this._camera);this._composer.addPass(this._renderPass);// 放在renderPass之后,修復渲染后顏色變暗的問題const gammaCorrectionShader = new ShaderPass(GammaCorrectionShader);this._composer.addPass(gammaCorrectionShader);// 添加輪廓后處理通道this._outlinePass = new ShaderPass({uniforms: {tDiffuse: { value: null },tNormalId: { value: null },resolution: { value: new THREE.Vector2(1 / this.width, 1 / this.height) },outlineColor: { value: new THREE.Vector4(0.0, 0.0, 0.0, 1.0) },lowIDConfig: { value: 1.0 },lowNormalConfig: { value: 0.8 },intensityConfig: { value: 0.3 },enableDiagonalSampling: { value: this._enableDiagonalSampling }},vertexShader: `varying vec2 vUv;void main() {vUv = uv;gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);}`,fragmentShader: `uniform sampler2D tDiffuse;uniform sampler2D tNormalId;uniform vec2 resolution;uniform vec4 outlineColor;uniform float lowIDConfig;uniform float lowNormalConfig;uniform float intensityConfig;uniform bool enableDiagonalSampling;varying vec2 vUv;vec3 decodeNormal(vec2 enc) {vec4 nn = vec4(enc, 0.0, 0.0) * vec4(2.0,2.0,0.0,0.0) + vec4(-1.0,-1.0,1.0,-1.0);float l = dot(nn.xyz,-nn.xyw);nn.z = l;nn.xy *= sqrt(l);return nn.xyz * 2.0 + vec3(0.0,0.0,-1.0);}float decodeID(vec2 enc) {return floor((enc.x * 255.0 + enc.y) * 255.0 + 0.5);}// 采樣輔助函數vec2 sampleDirection(vec2 uv, vec2 offset, vec3 currentNormal, float currentID) {vec4 texSample = texture2D(tNormalId, uv + offset);float id = decodeID(texSample.zw);if(id < 0.5) {return vec2(0.0);}vec3 normalSample = decodeNormal(texSample.xy);float normalDiff = 1.0 - abs(dot(currentNormal, normalSample));float idDiff = abs(currentID - id) < 0.0001 ? 0.0 : 1.0;return vec2(normalDiff, idDiff);}void main() {vec4 tex = texture2D(tNormalId, vUv);if(tex.x == 0.0 && tex.y == 0.0 && tex.z == 0.0) {gl_FragColor = texture2D(tDiffuse, vUv);return;}float currentID = decodeID(tex.zw);if(currentID < 0.5) {gl_FragColor = texture2D(tDiffuse, vUv);return;}vec3 currentNormal = decodeNormal(tex.xy);// 使用采樣輔助函數處理四個方向vec2 rightSample = sampleDirection(vUv, vec2(resolution.x, 0.0), currentNormal, currentID);vec2 leftSample = sampleDirection(vUv, vec2(-resolution.x, 0.0), currentNormal, currentID);vec2 downSample = sampleDirection(vUv, vec2(0.0, resolution.y), currentNormal, currentID);vec2 upSample = sampleDirection(vUv, vec2(0.0, -resolution.y), currentNormal, currentID);// 處理對角線方向的采樣float diagonalIdDiff = 0.0;float diagonalNormalDiff = 0.0;if(enableDiagonalSampling) {vec2 rightUpSample = sampleDirection(vUv, vec2(resolution.x, -resolution.y), currentNormal, currentID);vec2 rightDownSample = sampleDirection(vUv, vec2(resolution.x, resolution.y), currentNormal, currentID);vec2 leftUpSample = sampleDirection(vUv, vec2(-resolution.x, -resolution.y), currentNormal, currentID);vec2 leftDownSample = sampleDirection(vUv, vec2(-resolution.x, resolution.y), currentNormal, currentID);diagonalNormalDiff = rightUpSample.x + rightDownSample.x + leftUpSample.x + leftDownSample.x;diagonalIdDiff = rightUpSample.y + rightDownSample.y + leftUpSample.y + leftDownSample.y;}float totalIdDiff = rightSample.y + leftSample.y + downSample.y + upSample.y + diagonalIdDiff * 0.5;float totalNormalDiff = rightSample.x + leftSample.x + downSample.x + upSample.x + diagonalNormalDiff * 0.5;vec2 result = clamp(vec2(totalNormalDiff * lowNormalConfig, totalIdDiff * lowIDConfig) * intensityConfig,0.0,1.0);float outlineStrength = max(result.x, result.y);vec4 sceneColor = texture2D(tDiffuse, vUv);gl_FragColor = mix(sceneColor, outlineColor, outlineStrength * outlineColor.a);}`});this._composer.addPass(this._outlinePass);if (this._aaMode === 0) {// 添加FXAA抗鋸齒通道this._fxaaPass = new ShaderPass(FXAAShader);this._fxaaPass.material.uniforms.resolution.value.x = 1 / (this.width);this._fxaaPass.material.uniforms.resolution.value.y = 1 / (this.height);this._composer.addPass(this._fxaaPass);}else {// 創建 SMAA Passthis._smaaPass = new SMAAPass(this.width, this.height);this._composer.addPass(this._smaaPass);}}public setEnabled(enabled: boolean) {this._enabled = enabled;if (enabled) {this._outlineDirty = true;}}public setDefaultEnabled(t: boolean) {this._defaultEnabled = t;}public get isEnabled(): boolean {return this._enabled;}public get isDefaultEnabled() {return this._defaultEnabled;}public onResize(w: number, h: number) {this._width = w;this._height = h;// 更新渲染器尺寸this.renderer.setSize(this.width, this.height, false);// 更新后處理效果尺寸this._normalIdRenderTarget.setSize(this.width, this.height);this._composer.setSize(this.width, this.height);this._outlinePass.uniforms.resolution.value.set(1 / this.width, 1 / this.height);// 更新抗鋸齒通道尺寸if (this._aaMode === 0) {this._fxaaPass.material.uniforms.resolution.value.x = 1 / (this.width);this._fxaaPass.material.uniforms.resolution.value.y = 1 / (this.height);}else {this._smaaPass.setSize(this.width, this.height);}}public render() {if (!this._enabled) {// 如果禁用了描邊效果,直接進行普通渲染this.renderer.render(this.scene, this._camera);return;}// 渲染法線和ID到渲染目標if (this._outlineDirty) {this.switchMaterial(true);this.renderer.setRenderTarget(this._normalIdRenderTarget);this.renderer.render(this.scene, this._camera);this._outlineDirty = true;}// 更新輪廓通道的紋理this._outlinePass.uniforms.tNormalId.value = this._normalIdRenderTarget.texture;// this.showRenderTarget(this.renderer, this._normalIdRenderTarget, this.width, this.height);// 恢復正常渲染this.switchMaterial(false);this.renderer.setRenderTarget(null);// 執行后處理渲染this._composer.render();}public makeOutlineDirty() {this._outlineDirty = true;}public setLowIDConfig(value: number) {this._outlinePass.uniforms.lowIDConfig.value = value;this.makeOutlineDirty();}public getLowIDConfig() {return this._outlinePass.uniforms.lowIDConfig.value;}public setLowNormalConfig(value: number) {this._outlinePass.uniforms.lowNormalConfig.value = value;this.makeOutlineDirty();}public getLowNormalConfig() {return this._outlinePass.uniforms.lowNormalConfig.value;}public setIntensityConfig(value: number) {this._outlinePass.uniforms.intensityConfig.value = value;this.makeOutlineDirty();}public getIntensityConfig() {return this._outlinePass.uniforms.intensityConfig.value;}// 設置是否啟用對角線采樣public setEnableDiagonalSampling(enable: boolean) {this._enableDiagonalSampling = enable;this._outlinePass.uniforms.enableDiagonalSampling.value = enable;this.makeOutlineDirty();}// 獲取是否啟用對角線采樣public getEnableDiagonalSampling(): boolean {return this._enableDiagonalSampling;}public getOutlineColor(): THREE.Vector4 {return this._outlinePass.uniforms.outlineColor.value;}public setOutlineColor(x: number, y: number, z: number) {this._outlinePass.uniforms.outlineColor.value.set(x, y, z, 1);}public showRenderTarget(render: THREE.WebGLRenderer, target: THREE.WebGLRenderTarget, width: number, height: number) {// 根據渲染目標的格式選擇正確的數據類型let pixels;if (target.texture.type === THREE.FloatType) {pixels = new Float32Array(width * height * 4);} else {pixels = new Uint8Array(width * height * 4);}// 將 renderTarget 的紋理數據讀取到 Canvas 上render.setRenderTarget(target);render.readRenderTargetPixels(target, 0, 0, width, height, pixels);render.setRenderTarget(null);// 將 Canvas 數據展示到 img let imgElement = document.getElementById('normalIdTexture') as HTMLImageElement;if (!imgElement) {imgElement = document.createElement('img');imgElement.id = 'normalIdTexture';// 添加樣式使圖片可見imgElement.style.position = 'fixed';imgElement.style.top = '120px';imgElement.style.left = '10px';imgElement.style.width = '400px';imgElement.style.height = 'auto';imgElement.style.border = '1px solid #ccc';imgElement.style.zIndex = '100000';document.body.appendChild(imgElement);}const canvas = document.createElement('canvas');canvas.width = width;canvas.height = height;const ctx = canvas.getContext('2d');if (ctx) {let uint8ClampedArray;if (pixels instanceof Float32Array) {// 如果是 Float32Array,需要將數據轉換為 Uint8ClampedArrayuint8ClampedArray = new Uint8ClampedArray(width * height * 4);for (let i = 0; i < pixels.length; i++) {uint8ClampedArray[i] = Math.min(255, Math.max(0, pixels[i] * 255));}} else {uint8ClampedArray = new Uint8ClampedArray(pixels);}// 確保 alpha 通道不透明// for (let i = 3; i < pixels.length; i += 4) {// uint8ClampedArray[i] = 255;// }const imageData = new ImageData(uint8ClampedArray, width, height);// 創建臨時 Canvas 來存儲原始圖像const tempCanvas = document.createElement('canvas');tempCanvas.width = width;tempCanvas.height = height;const tempCtx = tempCanvas.getContext('2d');if (tempCtx) {tempCtx.putImageData(imageData, 0, 0);// 使用 GPU 加速的變換來翻轉圖像ctx.save();ctx.scale(1, -1);ctx.translate(0, -height);ctx.drawImage(tempCanvas, 0, 0);ctx.restore();}}imgElement.src = canvas.toDataURL();}
}