Last active
March 27, 2025 19:59
-
-
Save greggman/d67fcf75b048506195286f75c6a19c6d to your computer and use it in GitHub Desktop.
WebGPU SkyBox - with textureGatherEmu (2)
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
@import url(https://webgpufundamentals.org/webgpu/resources/webgpu-lesson.css); | |
html, body { | |
margin: 0; /* remove the default margin */ | |
height: 100%; /* make the html,body fill the page */ | |
} | |
canvas { | |
display: block; /* make the canvas act like a block */ | |
width: 100%; /* make the canvas fill its container */ | |
height: 100%; | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
<canvas></canvas> | |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// WebGPU SkyBox | |
// from https://webgpufundamentals.org/webgpu/webgpu-skybox-plus-environment-map.html | |
// see https://webgpufundamentals.org/webgpu/lessons/webgpu-utils.html#wgpu-matrix | |
import {mat4} from 'https://webgpufundamentals.org/3rdparty/wgpu-matrix.module.js'; | |
import {primitives} from 'https://greggman.github.io/webgpu-utils/dist/1.x/webgpu-utils.module.js'; | |
import GUI from 'https://muigui.org/dist/0.x/muigui.module.js'; | |
function createCubeVertices() { | |
const vertexData = new Float32Array([ | |
// position | normals | |
//-------------+---------------------- | |
// front face positive z | |
-1, 1, 1, 0, 0, 1, | |
-1, -1, 1, 0, 0, 1, | |
1, 1, 1, 0, 0, 1, | |
1, -1, 1, 0, 0, 1, | |
// right face positive x | |
1, 1, -1, 1, 0, 0, | |
1, 1, 1, 1, 0, 0, | |
1, -1, -1, 1, 0, 0, | |
1, -1, 1, 1, 0, 0, | |
// back face negative z | |
1, 1, -1, 0, 0, -1, | |
1, -1, -1, 0, 0, -1, | |
-1, 1, -1, 0, 0, -1, | |
-1, -1, -1, 0, 0, -1, | |
// left face negative x | |
-1, 1, 1, -1, 0, 0, | |
-1, 1, -1, -1, 0, 0, | |
-1, -1, 1, -1, 0, 0, | |
-1, -1, -1, -1, 0, 0, | |
// bottom face negative y | |
1, -1, 1, 0, -1, 0, | |
-1, -1, 1, 0, -1, 0, | |
1, -1, -1, 0, -1, 0, | |
-1, -1, -1, 0, -1, 0, | |
// top face positive y | |
-1, 1, 1, 0, 1, 0, | |
1, 1, 1, 0, 1, 0, | |
-1, 1, -1, 0, 1, 0, | |
1, 1, -1, 0, 1, 0, | |
]); | |
const indexData = new Uint16Array([ | |
0, 1, 2, 2, 1, 3, // front | |
4, 5, 6, 6, 5, 7, // right | |
8, 9, 10, 10, 9, 11, // back | |
12, 13, 14, 14, 13, 15, // left | |
16, 17, 18, 18, 17, 19, // bottom | |
20, 21, 22, 22, 21, 23, // top | |
]); | |
return { | |
vertexData, | |
indexData, | |
numVertices: indexData.length, | |
}; | |
} | |
async function main() { | |
const adapter = await navigator.gpu?.requestAdapter(); | |
const device = await adapter?.requestDevice(); | |
if (!device) { | |
fail('need a browser that supports WebGPU'); | |
return; | |
} | |
device.addEventListener('uncapturederror', e => console.error(e.error.message)); | |
// Get a WebGPU context from the canvas and configure it | |
const canvas = document.querySelector('canvas'); | |
const context = canvas.getContext('webgpu'); | |
const presentationFormat = navigator.gpu.getPreferredCanvasFormat(); | |
context.configure({ | |
device, | |
format: presentationFormat, | |
alphaMode: 'premultiplied', | |
}); | |
const textureGatherWGSL = ` | |
const faceNdx = array( | |
vec3i(-3, -2, 0), // 0 [-z, -y, aX] | |
vec3i( 3, -2, 1), // 1 [ z, -y, aX] | |
vec3i( 1, 3, 2), // 2 [ x, z, aY] | |
vec3i( 1, -3, 3), // 3 [ x, -z, aY] | |
vec3i( 1, -2, 4), // 4 [ x, -y, aZ] | |
vec3i(-1, -2, 5), // 5 [-x, -y, aZ] | |
); | |
fn convertCubeCoordToNormalized3DTextureCoord(v: vec3f) -> vec3f { | |
let r = normalize(v); | |
let absR = abs(r); | |
// +----+---- Each of these is the axis index (x =1, y = 2, z = 3) and the sign. | |
// | | | |
// | | +- Layer | |
// V V V | |
// 0 [-z, -y, aX] [-3, -2, 0] | |
// 1 [ z, -y, aX] [ 3, -2, 1] | |
// 2 [ x, z, aY] [ 1, 3, 2] | |
// 3 [ x, -z, aY] [ 1, -3, 3] | |
// 4 [ x, -y, aZ] [ 1, -2, 4] | |
// 5 [-x, -y, aZ] [-1, -2, 5] | |
var n: u32; | |
if (absR.x > absR.y && absR.x > absR.z) { | |
n = select(0u, 1u, r.x < 0.0); | |
} else if (absR.y > absR.z) { | |
n = select(2u, 3u, r.y < 0.0); | |
} else { | |
n = select(4u, 5u, r.z < 0.0); | |
} | |
let ndx = faceNdx[n]; | |
let i = abs(ndx.xy) - 1; | |
let uvw = vec3f(vec2f(r[i.x], r[i.y]) * vec2f(sign(ndx.xy)), absR[ndx.z / 2]); | |
return vec3f((uvw.xy / uvw.z + 1.0) * 0.5, (f32(ndx.z) + 0.5) / 6.0); | |
} | |
fn convertCubeCoordToNormalized3DTextureCoord2(v: vec3f) -> vec3f { | |
var uvw: vec3f; | |
var layer: f32; | |
let r = normalize(v); | |
let absR = abs(r); | |
// 0 [-z, -y, aX] | |
// 1 [ z, -y, aX] | |
// 2 [ x, z, aY] | |
// 3 [ x, -z, aY] | |
// 4 [ x, -y, aZ] | |
// 5 [-x, -y, aZ] | |
if (absR.x > absR.y && absR.x > absR.z) { | |
// x major | |
if (r.x >= 0.0) { | |
uvw = vec3f(-r.z, -r.y, absR.x); | |
layer = 0; | |
} else { | |
uvw = vec3f(r.z, -r.y, absR.x); | |
layer = 1; | |
} | |
} else if (absR.y > absR.z) { | |
// y major | |
if (r.y >= 0.0) { | |
uvw = vec3f(r.x, r.z, absR.y); | |
layer = 2; | |
} else { | |
uvw = vec3f(r.x, -r.z, absR.y); | |
layer = 3; | |
} | |
} else { | |
// z major | |
if (r.z >= 0.0) { | |
uvw = vec3f(r.x, -r.y, absR.z); | |
layer = 4; | |
} else { | |
uvw = vec3f(-r.x, -r.y, absR.z); | |
layer = 5; | |
} | |
} | |
return vec3f((uvw.xy / uvw.z + 1.0) * 0.5, (layer + 0.5) / 6.0); | |
} | |
const faceMat = array( | |
mat3x3f( 0, 0, -2, 0, -2, 0, 1, 1, 1), // +x | |
mat3x3f( 0, 0, 2, 0, -2, 0, -1, 1, -1), // -x | |
mat3x3f( 2, 0, 0, 0, 0, 2, -1, 1, -1), // +y | |
mat3x3f( 2, 0, 0, 0, 0, -2, -1, -1, 1), // -y | |
mat3x3f( 2, 0, 0, 0, -2, 0, -1, 1, 1), // +z | |
mat3x3f(-2, 0, 0, 0, -2, 0, 1, 1, -1)); // -z | |
fn convertNormalized3DTexCoordToCubeCoord(uvLayer: vec3f) -> vec3f { | |
let layer = u32(uvLayer.z * 6.0); | |
return normalize(faceMat[layer] * vec3f(uvLayer.xy, 1)); | |
} | |
fn textureGatherCubeEmu(component: u32, t: texture_cube<f32>, s: sampler, coord: vec3f) -> vec4f { | |
let uvLayer = convertCubeCoordToNormalized3DTextureCoord(coord); | |
let size = vec2f(textureDimensions(t)); | |
let texelCoord = uvLayer.xy * size - 0.5; | |
let lo = floor(texelCoord); | |
let hi = ceil(texelCoord); | |
var coords: array<vec2f, 4>; | |
coords[0] = vec2f(lo.x, hi.y); | |
coords[1] = hi; | |
coords[2] = vec2f(hi.x, lo.y); | |
coords[3] = lo; | |
var texel: vec4f; | |
for (var i = 0; i < 4; i++) { | |
let uv = (coords[i] + 0.5) / size; | |
let cubeCoord = convertNormalized3DTexCoordToCubeCoord(vec3f(uv, uvLayer.z)); | |
//let value = textureSampleLevel(t, s, cubeCoord, 0); | |
//let uvl = convertCubeCoordToNormalized3DTextureCoord(cubeCoord); | |
//let newCubeCoord = convertNormalized3DTexCoordToCubeCoord(uvl); | |
//let value = textureSampleLevel(t, s, newCubeCoord, 0); | |
// convert back to uvLayer so we can quantize | |
let uvl = convertCubeCoordToNormalized3DTextureCoord(cubeCoord); | |
let txlCoord = floor(uv.xy * size); | |
let newUv = (txlCoord + 0.5) / size; | |
let newCubeCoord = convertNormalized3DTexCoordToCubeCoord(vec3f(newUv, uvl.z)); | |
let value = textureSampleLevel(t, s, newCubeCoord, 0); | |
texel[i] = value[component]; | |
} | |
return texel; | |
} | |
fn avg(v: vec4f) -> f32 { | |
return (v.x + v.y + v.z + v.w) / 4.0; | |
} | |
fn textureSampleEmuOnGatherEmu(t: texture_cube<f32>, s: sampler, coord: vec3f) -> vec4f { | |
let r = textureGatherCubeEmu(0, ourTexture, ourSampler, coord); | |
let g = textureGatherCubeEmu(1, ourTexture, ourSampler, coord); | |
let b = textureGatherCubeEmu(2, ourTexture, ourSampler, coord); | |
let a = textureGatherCubeEmu(3, ourTexture, ourSampler, coord); | |
return vec4f(avg(r), avg(g), avg(b), avg(a)); | |
} | |
fn textureSampleEmuOnGather(t: texture_cube<f32>, s: sampler, coord: vec3f) -> vec4f { | |
let r = textureGather(0, ourTexture, ourSampler, coord); | |
let g = textureGather(1, ourTexture, ourSampler, coord); | |
let b = textureGather(2, ourTexture, ourSampler, coord); | |
let a = textureGather(3, ourTexture, ourSampler, coord); | |
return vec4f(avg(r), avg(g), avg(b), avg(a)); | |
} | |
`; | |
const skyBoxModule = device.createShaderModule({ | |
code: ` | |
${textureGatherWGSL} | |
struct Uniforms { | |
viewDirectionProjectionInverse: mat4x4f, | |
}; | |
struct VSOutput { | |
@builtin(position) position: vec4f, | |
@location(0) pos: vec4f, | |
}; | |
@group(0) @binding(0) var<uniform> uni: Uniforms; | |
@group(0) @binding(1) var ourSampler: sampler; | |
@group(0) @binding(2) var ourTexture: texture_cube<f32>; | |
@vertex fn vs(@builtin(vertex_index) vNdx: u32) -> VSOutput { | |
let pos = array( | |
vec2f(-1, 3), | |
vec2f(-1,-1), | |
vec2f( 3,-1), | |
); | |
var vsOut: VSOutput; | |
vsOut.position = vec4f(pos[vNdx], 1, 1); | |
vsOut.pos = vsOut.position; | |
return vsOut; | |
} | |
@fragment fn fs(vsOut: VSOutput) -> @location(0) vec4f { | |
let t = uni.viewDirectionProjectionInverse * vsOut.pos; | |
return textureSampleEmuOnGatherEmu(ourTexture, ourSampler, normalize(t.xyz / t.w) * vec3f(1, 1, -1)); | |
} | |
`, | |
}); | |
const skyBoxPipeline = device.createRenderPipeline({ | |
label: 'no attributes', | |
layout: 'auto', | |
vertex: { | |
module: skyBoxModule, | |
}, | |
fragment: { | |
module: skyBoxModule, | |
targets: [{ format: presentationFormat }], | |
}, | |
depthStencil: { | |
depthWriteEnabled: true, | |
depthCompare: 'less-equal', | |
format: 'depth24plus', | |
}, | |
}); | |
const envMapModule = device.createShaderModule({ | |
code: ` | |
${textureGatherWGSL} | |
struct Uniforms { | |
projection: mat4x4f, | |
view: mat4x4f, | |
world: mat4x4f, | |
cameraPosition: vec3f, | |
}; | |
struct Vertex { | |
@location(0) position: vec4f, | |
@location(1) normal: vec3f, | |
}; | |
struct VSOutput { | |
@builtin(position) position: vec4f, | |
@location(0) worldPosition: vec3f, | |
@location(1) worldNormal: vec3f, | |
}; | |
@group(0) @binding(0) var<uniform> uni: Uniforms; | |
@group(0) @binding(1) var ourSampler: sampler; | |
@group(0) @binding(2) var ourTexture: texture_cube<f32>; | |
@vertex fn vs(vert: Vertex) -> VSOutput { | |
var vsOut: VSOutput; | |
vsOut.position = uni.projection * uni.view * uni.world * vert.position; | |
vsOut.worldPosition = (uni.world * vert.position).xyz; | |
vsOut.worldNormal = (uni.world * vec4f(vert.normal, 0)).xyz; | |
return vsOut; | |
} | |
@fragment fn fs(vsOut: VSOutput) -> @location(0) vec4f { | |
let worldNormal = normalize(vsOut.worldNormal); | |
let eyeToSurfaceDir = normalize(vsOut.worldPosition - uni.cameraPosition); | |
let direction = reflect(eyeToSurfaceDir, worldNormal); | |
//return textureSample(ourTexture, ourSampler, direction * vec3f(1, 1, -1)); | |
//return textureSampleEmuOnGather(ourTexture, ourSampler, direction * vec3f(1, 1, -1)); | |
return textureSampleEmuOnGatherEmu(ourTexture, ourSampler, direction * vec3f(1, 1, -1)); | |
} | |
`, | |
}); | |
const envMapPipeline = device.createRenderPipeline({ | |
label: '2 attributes', | |
layout: 'auto', | |
vertex: { | |
module: envMapModule, | |
buffers: [ | |
{ | |
arrayStride: (3) * 4, // (3) floats 4 bytes each | |
attributes: [ | |
{shaderLocation: 0, offset: 0, format: 'float32x3'}, // position | |
], | |
}, | |
{ | |
arrayStride: (3) * 4, // (3) floats 4 bytes each | |
attributes: [ | |
{shaderLocation: 1, offset: 0, format: 'float32x3'}, // normal | |
], | |
}, | |
], | |
}, | |
fragment: { | |
module: envMapModule, | |
targets: [{ format: presentationFormat }], | |
}, | |
primitive: { | |
cullMode: 'back', | |
}, | |
depthStencil: { | |
depthWriteEnabled: true, | |
depthCompare: 'less', | |
format: 'depth24plus', | |
}, | |
}); | |
const numMipLevels = (...sizes) => { | |
const maxSize = Math.max(...sizes); | |
return 1 + Math.log2(maxSize) | 0; | |
}; | |
function copySourcesToTexture(device, texture, sources, {flipY} = {}) { | |
sources.forEach((source, layer) => { | |
device.queue.copyExternalImageToTexture( | |
{ source, flipY, }, | |
{ texture, origin: [0, 0, layer] }, | |
{ width: source.width, height: source.height }, | |
); | |
}); | |
if (texture.mipLevelCount > 1) { | |
generateMips(device, texture); | |
} | |
} | |
function createTextureFromSources(device, sources, options = {}) { | |
// Assume are sources all the same size so just use the first one for width and height | |
const source = sources[0]; | |
const texture = device.createTexture({ | |
format: 'rgba8unorm', | |
mipLevelCount: options.mips ? numMipLevels(source.width, source.height) : 1, | |
size: [source.width, source.height, sources.length], | |
usage: GPUTextureUsage.TEXTURE_BINDING | | |
GPUTextureUsage.COPY_DST | | |
GPUTextureUsage.RENDER_ATTACHMENT, | |
}); | |
copySourcesToTexture(device, texture, sources, options); | |
return texture; | |
} | |
const generateMips = (() => { | |
let sampler; | |
let module; | |
const pipelineByFormat = {}; | |
return function generateMips(device, texture) { | |
if (!module) { | |
module = device.createShaderModule({ | |
label: 'textured quad shaders for mip level generation', | |
code: ` | |
struct VSOutput { | |
@builtin(position) position: vec4f, | |
@location(0) texcoord: vec2f, | |
}; | |
@vertex fn vs( | |
@builtin(vertex_index) vertexIndex : u32 | |
) -> VSOutput { | |
let pos = array( | |
vec2f( 0.0, 0.0), // center | |
vec2f( 1.0, 0.0), // right, center | |
vec2f( 0.0, 1.0), // center, top | |
// 2st triangle | |
vec2f( 0.0, 1.0), // center, top | |
vec2f( 1.0, 0.0), // right, center | |
vec2f( 1.0, 1.0), // right, top | |
); | |
var vsOutput: VSOutput; | |
let xy = pos[vertexIndex]; | |
vsOutput.position = vec4f(xy * 2.0 - 1.0, 0.0, 1.0); | |
vsOutput.texcoord = vec2f(xy.x, 1.0 - xy.y); | |
return vsOutput; | |
} | |
@group(0) @binding(0) var ourSampler: sampler; | |
@group(0) @binding(1) var ourTexture: texture_2d<f32>; | |
@fragment fn fs(fsInput: VSOutput) -> @location(0) vec4f { | |
return textureSample(ourTexture, ourSampler, fsInput.texcoord); | |
} | |
`, | |
}); | |
sampler = device.createSampler({ | |
minFilter: 'linear', | |
magFilter: 'linear', | |
}); | |
} | |
if (!pipelineByFormat[texture.format]) { | |
pipelineByFormat[texture.format] = device.createRenderPipeline({ | |
label: 'mip level generator pipeline', | |
layout: 'auto', | |
vertex: { | |
module, | |
}, | |
fragment: { | |
module, | |
targets: [{ format: texture.format }], | |
}, | |
}); | |
} | |
const pipeline = pipelineByFormat[texture.format]; | |
const encoder = device.createCommandEncoder({ | |
label: 'mip gen encoder', | |
}); | |
for (let baseMipLevel = 1; baseMipLevel < texture.mipLevelCount; ++baseMipLevel) { | |
for (let layer = 0; layer < texture.depthOrArrayLayers; ++layer) { | |
const bindGroup = device.createBindGroup({ | |
layout: pipeline.getBindGroupLayout(0), | |
entries: [ | |
{ binding: 0, resource: sampler }, | |
{ | |
binding: 1, | |
resource: texture.createView({ | |
dimension: '2d', | |
baseMipLevel: baseMipLevel - 1, | |
mipLevelCount: 1, | |
baseArrayLayer: layer, | |
arrayLayerCount: 1, | |
}), | |
}, | |
], | |
}); | |
const renderPassDescriptor = { | |
label: 'our basic canvas renderPass', | |
colorAttachments: [ | |
{ | |
view: texture.createView({ | |
dimension: '2d', | |
baseMipLevel: baseMipLevel, | |
mipLevelCount: 1, | |
baseArrayLayer: layer, | |
arrayLayerCount: 1, | |
}), | |
loadOp: 'clear', | |
storeOp: 'store', | |
}, | |
], | |
}; | |
const pass = encoder.beginRenderPass(renderPassDescriptor); | |
pass.setPipeline(pipeline); | |
pass.setBindGroup(0, bindGroup); | |
pass.draw(6); // call our vertex shader 6 times | |
pass.end(); | |
} | |
} | |
const commandBuffer = encoder.finish(); | |
device.queue.submit([commandBuffer]); | |
}; | |
})(); | |
async function loadImageBitmap(url) { | |
const res = await fetch(url); | |
const blob = await res.blob(); | |
return await createImageBitmap(blob, { colorSpaceConversion: 'none' }); | |
} | |
async function createTextureFromImages(device, urls, options) { | |
const images = await Promise.all(urls.map(loadImageBitmap)); | |
return createTextureFromSources(device, images, options); | |
} | |
//const texture = await createTextureFromImages( | |
// device, | |
// [ | |
// 'https://webgpufundamentals.org/webgpu/resources/images/leadenhall_market/pos-x.jpg', | |
// 'https://webgpufundamentals.org/webgpu/resources/images/leadenhall_market/neg-x.jpg', | |
// 'https://webgpufundamentals.org/webgpu/resources/images/leadenhall_market/pos-y.jpg', | |
// 'https://webgpufundamentals.org/webgpu/resources/images/leadenhall_market/neg-y.jpg', | |
// 'https://webgpufundamentals.org/webgpu/resources/images/leadenhall_market/pos-z.jpg', | |
// 'https://webgpufundamentals.org/webgpu/resources/images/leadenhall_market/neg-z.jpg', | |
// ], | |
// {mips: true, flipY: false}, | |
//); | |
const texture = createTextureFromSources(device, [ | |
makeCanvasImage({ width: 64, height: 64, borderColor: '#f00', backgroundColor: '#f00', foregroundColor: '#fff', text: ['+x'] }), | |
makeCanvasImage({ width: 64, height: 64, borderColor: '#0f0', backgroundColor: '#0f0', foregroundColor: '#fff', text: ['-x'] }), | |
makeCanvasImage({ width: 64, height: 64, borderColor: '#00f', backgroundColor: '#00f', foregroundColor: '#fff', text: ['+y'] }), | |
makeCanvasImage({ width: 64, height: 64, borderColor: '#800', backgroundColor: '#800', foregroundColor: '#fff', text: ['-y'] }), | |
makeCanvasImage({ width: 64, height: 64, borderColor: '#080', backgroundColor: '#080', foregroundColor: '#fff', text: ['+z'] }), | |
makeCanvasImage({ width: 64, height: 64, borderColor: '#008', backgroundColor: '#008', foregroundColor: '#fff', text: ['-z'] }), | |
], { mips: true }) | |
const sampler = device.createSampler({ | |
magFilter: 'linear', | |
minFilter: 'linear', | |
mipmapFilter: 'linear', | |
}); | |
// viewDirectionProjectionInverse | |
const skyBoxUniformBufferSize = (16) * 4; | |
const skyBoxUniformBuffer = device.createBuffer({ | |
label: 'uniforms', | |
size: skyBoxUniformBufferSize, | |
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST, | |
}); | |
const skyBoxUniformValues = new Float32Array(skyBoxUniformBufferSize / 4); | |
// offsets to the various uniform values in float32 indices | |
const kViewDirectionProjectionInverseOffset = 0; | |
const viewDirectionProjectionInverseValue = skyBoxUniformValues.subarray( | |
kViewDirectionProjectionInverseOffset, | |
kViewDirectionProjectionInverseOffset + 16); | |
// projection, view, world, cameraPosition, pad | |
const envMapUniformBufferSize = (16 + 16 + 16 + 3 + 1) * 4; | |
const envMapUniformBuffer = device.createBuffer({ | |
label: 'uniforms', | |
size: envMapUniformBufferSize, | |
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST, | |
}); | |
const envMapUniformValues = new Float32Array(envMapUniformBufferSize / 4); | |
// offsets to the various uniform values in float32 indices | |
const kProjectionOffset = 0; | |
const kViewOffset = 16; | |
const kWorldOffset = 32; | |
const kCameraPositionOffset = 48; | |
const projectionValue = envMapUniformValues.subarray(kProjectionOffset, kProjectionOffset + 16); | |
const viewValue = envMapUniformValues.subarray(kViewOffset, kViewOffset + 16); | |
const worldValue = envMapUniformValues.subarray(kWorldOffset, kWorldOffset + 16); | |
const cameraPositionValue = envMapUniformValues.subarray( | |
kCameraPositionOffset, kCameraPositionOffset + 3); | |
//const { vertexData, indexData, numVertices } = createCubeVertices(); | |
const { position, normal, indices } = primitives.createSphereVertices({ | |
subdivisionsAxis: 64, | |
subdivisionsHeight: 32, | |
}); | |
const positionData = new Float32Array(position); | |
const normalData = new Float32Array(normal); | |
const indexData = new Uint16Array(indices); | |
const positionBuffer = device.createBuffer({ | |
label: 'position buffer vertices', | |
size: positionData.byteLength, | |
usage: GPUBufferUsage.VERTEX | GPUBufferUsage.COPY_DST, | |
}); | |
device.queue.writeBuffer(positionBuffer, 0, positionData); | |
const normalBuffer = device.createBuffer({ | |
label: 'normal buffer vertices', | |
size: normalData.byteLength, | |
usage: GPUBufferUsage.VERTEX | GPUBufferUsage.COPY_DST, | |
}); | |
device.queue.writeBuffer(normalBuffer, 0, normalData); | |
const indexBuffer = device.createBuffer({ | |
label: 'index buffer', | |
size: indexData.byteLength, | |
usage: GPUBufferUsage.INDEX | GPUBufferUsage.COPY_DST, | |
}); | |
device.queue.writeBuffer(indexBuffer, 0, indexData); | |
const envMapBindGroup = device.createBindGroup({ | |
label: 'bind group for object', | |
layout: envMapPipeline.getBindGroupLayout(0), | |
entries: [ | |
{ binding: 0, resource: { buffer: envMapUniformBuffer }}, | |
{ binding: 1, resource: sampler }, | |
{ binding: 2, resource: texture.createView({dimension: 'cube'}) }, | |
], | |
}); | |
const renderPassDescriptor = { | |
label: 'our basic canvas renderPass', | |
colorAttachments: [ | |
{ | |
// view: <- to be filled out when we render | |
loadOp: 'clear', | |
storeOp: 'store', | |
}, | |
], | |
depthStencilAttachment: { | |
// view: <- to be filled out when we render | |
depthClearValue: 1.0, | |
depthLoadOp: 'clear', | |
depthStoreOp: 'store', | |
}, | |
}; | |
let depthTexture; | |
const skyBoxBindGroups = new Map(); | |
const radToDegOptions = { min: -1, max: 179, step: 1, converters: GUI.converters.radToDeg }; | |
const settings = { | |
skyBoxBaseMipLevel: 3, | |
cameraRadius: 2, | |
fov: 120 * Math.PI / 180, | |
}; | |
const gui = new GUI(); | |
gui.add(settings, 'skyBoxBaseMipLevel', 0, 9, 1); | |
gui.add(settings, 'cameraRadius', 2, 10); | |
gui.add(settings, 'fov', radToDegOptions); | |
function render(time) { | |
time *= 0.001; | |
const baseMipLevel = Math.min(settings.skyBoxBaseMipLevel, texture.mipLevelCount - 1); | |
const skyBoxBindGroup = skyBoxBindGroups.get(baseMipLevel) ?? device.createBindGroup({ | |
label: 'bind group for object', | |
layout: skyBoxPipeline.getBindGroupLayout(0), | |
entries: [ | |
{ binding: 0, resource: { buffer: skyBoxUniformBuffer }}, | |
{ binding: 1, resource: sampler }, | |
{ binding: 2, resource: texture.createView({dimension: 'cube', baseMipLevel}) }, | |
], | |
}); | |
skyBoxBindGroups.set(baseMipLevel, skyBoxBindGroup); | |
// Get the current texture from the canvas context and | |
// set it as the texture to render to. | |
const canvasTexture = context.getCurrentTexture(); | |
renderPassDescriptor.colorAttachments[0].view = canvasTexture.createView(); | |
// If we don't have a depth texture OR if its size is different | |
// from the canvasTexture when make a new depth texture | |
if (!depthTexture || | |
depthTexture.width !== canvasTexture.width || | |
depthTexture.height !== canvasTexture.height) { | |
if (depthTexture) { | |
depthTexture.destroy(); | |
} | |
depthTexture = device.createTexture({ | |
size: [canvasTexture.width, canvasTexture.height], | |
format: 'depth24plus', | |
usage: GPUTextureUsage.RENDER_ATTACHMENT, | |
}); | |
} | |
renderPassDescriptor.depthStencilAttachment.view = depthTexture.createView(); | |
const encoder = device.createCommandEncoder(); | |
const pass = encoder.beginRenderPass(renderPassDescriptor); | |
const aspect = canvas.clientWidth / canvas.clientHeight; | |
mat4.perspective( | |
settings.fov, | |
aspect, | |
0.1, // zNear | |
10, // zFar | |
projectionValue, | |
); | |
// Camera going in circle from origin looking at origin | |
const r = settings.cameraRadius; | |
cameraPositionValue.set([Math.cos(time * .1) * r, 0, Math.sin(time * .1) * r]); | |
const view = mat4.lookAt( | |
cameraPositionValue, | |
[0, 0, 0], // target | |
[0, 1, 0], // up | |
); | |
// Copy the view into the viewValue since we're going | |
// to zero out the view's translation | |
viewValue.set(view); | |
// We only care about direction so remove the translation | |
view[12] = 0; | |
view[13] = 0; | |
view[14] = 0; | |
const viewProjection = mat4.multiply(projectionValue, view); | |
mat4.inverse(viewProjection, viewDirectionProjectionInverseValue); | |
// Rotate the cube | |
mat4.identity(worldValue); | |
mat4.rotateX(worldValue, time * -0.1, worldValue); | |
mat4.rotateY(worldValue, time * -0.2, worldValue); | |
// upload the uniform values to the uniform buffers | |
device.queue.writeBuffer(envMapUniformBuffer, 0, envMapUniformValues); | |
device.queue.writeBuffer(skyBoxUniformBuffer, 0, skyBoxUniformValues); | |
// Draw the cube | |
pass.setPipeline(envMapPipeline); | |
pass.setVertexBuffer(0, positionBuffer); | |
pass.setVertexBuffer(1, normalBuffer); | |
pass.setIndexBuffer(indexBuffer, 'uint16'); | |
pass.setBindGroup(0, envMapBindGroup); | |
pass.drawIndexed(indexBuffer.size / 2); | |
// Draw the skyBox | |
pass.setPipeline(skyBoxPipeline); | |
pass.setBindGroup(0, skyBoxBindGroup); | |
pass.draw(3); | |
pass.end(); | |
const commandBuffer = encoder.finish(); | |
device.queue.submit([commandBuffer]); | |
requestAnimationFrame(render); | |
} | |
requestAnimationFrame(render); | |
const observer = new ResizeObserver(entries => { | |
for (const entry of entries) { | |
const canvas = entry.target; | |
const width = entry.contentBoxSize[0].inlineSize; | |
const height = entry.contentBoxSize[0].blockSize; | |
canvas.width = Math.max(1, Math.min(width, device.limits.maxTextureDimension2D)); | |
canvas.height = Math.max(1, Math.min(height, device.limits.maxTextureDimension2D)); | |
} | |
}); | |
observer.observe(canvas); | |
} | |
function fail(msg) { | |
alert(msg); | |
} | |
function makeCanvasImage({ | |
width, | |
height, | |
borderColor, | |
backgroundColor, | |
foregroundColor, | |
font, | |
text, | |
}/*: { | |
width: number; | |
height: number; | |
borderColor: string; | |
backgroundColor: string; | |
foregroundColor: string; | |
font?: string; | |
text: string[]; | |
}*/) { | |
const canvas = new OffscreenCanvas(width, height); | |
const ctx = canvas.getContext('2d'); | |
ctx.fillStyle = borderColor; | |
ctx.fillRect(0, 0, width, height); | |
const borderSize = 10; | |
ctx.fillStyle = backgroundColor; | |
ctx.fillRect( | |
borderSize, | |
borderSize, | |
width - borderSize * 2, | |
height - borderSize * 2 | |
); | |
ctx.fillStyle = foregroundColor; | |
ctx.font = | |
font ?? `${Math.ceil(Math.min(width, height) * 0.8)}px bold monospace`; | |
ctx.textAlign = 'left'; | |
ctx.textBaseline = 'top'; | |
for (const t of text) { | |
const m = ctx.measureText(t); | |
ctx.fillText( | |
t, | |
(width - m.actualBoundingBoxRight + m.actualBoundingBoxLeft) / 2, | |
(height - m.actualBoundingBoxDescent + m.actualBoundingBoxAscent) / 2 | |
); | |
} | |
return canvas; | |
} | |
main(); | |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
{"name":"WebGPU SkyBox - with textureGatherEmu (2)","settings":{},"filenames":["index.html","index.css","index.js"]} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment