import * as THREE from "three";
import { ThreeHelper } from "@/src/ThreeHelper";
import { MethodBaseSceneSet, LoadGLTF } from "@/src/ThreeHelper/decorators";
import { MainScreen } from "./Canvas";
import type { GLTF } from "three/examples/jsm/loaders/GLTFLoader";
import { Injectable } from "@/src/ThreeHelper/decorators/DI";
import type { GUI } from "dat.gui";
import PMREMGenerator from "three/examples/jsm/renderers/common/extras/PMREMGenerator.js";
import WebGPURenderer from "three/examples/jsm/renderers/webgpu/WebGPURenderer";
import { MeshStandardNodeMaterial, normalWorld, pmremTexture, uniform } from "three/examples/jsm/nodes/Nodes";
import { RoomEnvironment } from "three/examples/jsm/environments/RoomEnvironment";
@Injectable
@ThreeHelper.useWebGPU
export class Main extends MainScreen {
static instance: Main;
dummy?: THREE.SkinnedMesh;
constructor(private helper: ThreeHelper) {
super(helper);
helper.main = this;
this.init();
Main.instance = this;
}
@MethodBaseSceneSet({
addAxis: true,
cameraPosition: new THREE.Vector3(0, 1, 3),
cameraTarget: new THREE.Vector3(0, 1, 0),
useRoomLight: false,
near: 0.1,
far: 100,
})
init() {
this.loadModel();
// this.helper.setBackgroundHDR("/public/env/sunflowers_puresky_2k/");
this.helper.setBackgroundNode("/public/env/industrial_sunset_puresky_2k.jpg");
// this.helper.setBackgroundNode("/public/env/rosendal_park_sunset_puresky_2k.jpg");
}
@LoadGLTF("/public/models/martelo.glb")
loadModel(gltf?: GLTF) {
if (gltf) {
this.helper.add(gltf.scene);
const model = this.helper.get("Ch03") as THREE.SkinnedMesh;
const mixer = new this.helper.AnimationPlayer({
root: gltf.scene,
animations: gltf.animations,
});
ThreeHelper.handles.push(() => {
mixer.update();
});
}
}
@ThreeHelper.InjectAnimation(Main)
animation() {}
@ThreeHelper.AddGUI(Main)
createEnvTexture(gui: GUI) {
gui.addFunction(async () => {
const renderer = this.helper.renderer as WebGPURenderer;
/**
* WebGPU使用 readRenderTargetPixelsAsync 方法获取纹理数据
* 内部是 Render的readRenderTargetPixelsAsync 方法 调用 backend(WebGPUBackend)的 copyTextureToBuffer 方法
* 调用 textureUtils 的 copyTextureToBuffer方法
* 将该方法copy一份 本地执行 绑定this => `this.copyTextureToBuffer.call(renderer.backend.textureUtils,...args)`
* 要获取cube贴图数据 需在目前copyTextureToBuffer方法中加入z选择 选择第几张图片
*
* 无关问题: 当前165版本 未深入
* 第二次调用此方法时 ‘const textureData = this.backend.get( texture );’
* 会运行‘this.data.set( object, map );’
* 报错‘Invalid value used as weak map key’
*
*/
const cubeRenderTarget = new THREE.WebGLCubeRenderTarget(2048, {
/**
* bug:目前 使用 THREE.SRGBColorSpace 回发出警告如下
* Texture view array layer range (baseArrayLayer: 1, arrayLayerCount: 1) exceeds the texture's array layer count (1).
* 猜测是提供的层数不对 应该是6不知three内部怎么处理可能是1 导致贴上贴图六个面都是一张图
* 暂时使用默认参数 NoColorSpace
*/
colorSpace: THREE.NoColorSpace,
});
// cubeRenderTarget.texture.type = THREE.HalfFloatType;
// cubeRenderTarget.texture.minFilter = THREE.LinearMipmapLinearFilter;
// cubeRenderTarget.texture.magFilter = THREE.LinearFilter;
// cubeRenderTarget.texture.generateMipmaps = true;
const cubeCamera = new THREE.CubeCamera(1, 1000, cubeRenderTarget);
cubeCamera.position.y = -3;
cubeCamera.update(renderer as any, this.helper.scene);
const sphere = this.helper.create.sphere(1, 32, 32);
this.helper.add(sphere.mesh);
sphere.material(
new THREE.MeshStandardMaterial({
envMap: cubeRenderTarget.texture,
envMapIntensity: 1,
roughness: 0,
metalness: 1,
side: THREE.DoubleSide,
})
);
// const pixelsAsync = await this.copyTextureToBuffer.call(
// renderer.backend.textureUtils,
// cubeRenderTarget.texture,
// 0,
// 0,
// 2048,
// 2048,
// 0
// );
// console.log(pixelsAsync);
for (let index = 0; index < 6; index++) {
try {
const pixelsAsync = await this.copyTextureToBuffer.call(
renderer.backend.textureUtils,
cubeRenderTarget.texture,
0,
0,
2048,
2048,
index
);
pixelsAsync && this.appendCanvas(pixelsAsync, index);
} catch (error) {
console.log("error:", error);
}
}
}, "创建全景图");
/**
* examples/jsm/renderer/webgpu/utils/WebGPUTextureUtils/WebGPUTextureUtils/copyTextureToBuffer
*
* readRenderTargetPixelsAsync( renderTarget, x, y, width, height, index = 0 ) {
*
* return this.backend.copyTextureToBuffer( renderTarget.textures[ index ], x, y, width, height );
*
* }
*/
async copyTextureToBuffer(
texture: THREE.Texture,
x: number,
y: number,
width: number,
height: number,
activeCubeFaceIndex = 0
) {
// 获取 原生的device对象
const device = this.backend.device;
// 获取纹理的数据 同 WebGL的 properties.get( renderTarget ).__webglFramebuffer 内部都是一个WeakMap对象保存纹理的相关数据
const textureData = this.backend.get(texture);
//获取原生的GPUTexture对象 也是实际的纹理对象
const textureGPU = textureData.texture;
// 获取格式 例如 rgba8unorm
const format = textureData.textureDescriptorGPU.format;
console.log("format", format);
// 获取格式对应的大小 例如 rgba8unorm 是8位的
const bytesPerTexel = this._getBytesPerTexel(format);
console.log("bytesPerTexel", bytesPerTexel);
//所需字节大小
let bytesPerRow = width * bytesPerTexel;
bytesPerRow = Math.ceil(bytesPerRow / 256) * 256; // Align to 256 bytes
//创建缓冲区
const readBuffer = device.createBuffer({
size: width * height * bytesPerTexel,
usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.MAP_READ,
});
// 创建命令解码器
const encoder = device.createCommandEncoder();
// 命令解码器复制纹理到缓冲区
encoder.copyTextureToBuffer(
{
texture: textureGPU,
origin: {
x,
y,
/** cube数组中的第几张 0-5 */
z: activeCubeFaceIndex,
},
mipLevel: 0,
aspect: "all",
},
{
buffer: readBuffer,
bytesPerRow: bytesPerRow,
},
{
width: width,
height: height,
// depthOrArrayLayers: 1,
}
);
// 获取保存纹理所需对应的数据格式 例如 Uint8Array Uint16Array Uint32Array等 根据textureData.textureDescriptorGPU.format
const typedArrayType = this._getTypedArrayType(format);
// 将 命令解码器提交到队列
device.queue.submit([encoder.finish()]);
// 将缓冲区进行映射
await readBuffer.mapAsync(GPUMapMode.READ);
// 获取缓冲区数据
const buffer = readBuffer.getMappedRange();
if (typedArrayType) {
// 创建对应的数据格式
return new typedArrayType(buffer);
}
}
appendCanvas(pixels: Uint8Array, index = 0) {
const _pixels = Uint8ClampedArray.from(pixels);
// new Uint8ClampedArray(2048 * 2048 * 4)
const canvas = document.createElement("canvas");
canvas.width = 2048;
canvas.height = 2048;
const ctx = canvas.getContext("2d");
if (ctx) {
// 如果不是 Uint8ClampedArray 则转换成 Uint8ClampedArray类型数据
// const _data = Uint8ClampedArray.from(pixels);
const imageData = new ImageData(_pixels, 2048, 2048);
ctx.putImageData(imageData, 0, 0);
document.body.appendChild(canvas);
canvas.style.position = "fixed";
canvas.style.zIndex = "999";
canvas.style.left = index * 200 + "px";
canvas.style.top = 200 + "px";
canvas.style.width = "200px";
canvas.style.height = "200px";
}
}
}
Three&WebGPU将场景生成全景图并导出
于 2024-07-26 21:22:37 首次发布