diff --git a/build/RayTracingRenderer.es5.js b/build/RayTracingRenderer.es5.js index 35cbf1c..69fe952 100644 --- a/build/RayTracingRenderer.es5.js +++ b/build/RayTracingRenderer.es5.js @@ -378,7 +378,7 @@ throw gl.getProgramInfoLog(program); } function getUniforms(gl, program) { - var uniforms = []; + var uniforms = {}; var count = gl.getProgramParameter(program, gl.ACTIVE_UNIFORMS); for (var i = 0; i < count; i++) { @@ -389,121 +389,126 @@ var location = gl.getUniformLocation(program, name); if (location) { - uniforms.push({ - name: name, + uniforms[name] = { type: type, location: location - }); + }; } } return uniforms; } - function makeUniformBuffer(gl, program, blockName) { - var blockIndex = gl.getUniformBlockIndex(program, blockName); - var blockSize = gl.getActiveUniformBlockParameter(program, blockIndex, gl.UNIFORM_BLOCK_DATA_SIZE); - var uniforms = getUniformBlockInfo(gl, program, blockIndex); - var buffer = gl.createBuffer(); - gl.bindBuffer(gl.UNIFORM_BUFFER, buffer); - gl.bufferData(gl.UNIFORM_BUFFER, blockSize, gl.STATIC_DRAW); - var data = new DataView(new ArrayBuffer(blockSize)); + function getAttributes(gl, program) { + var attributes = {}; + var count = gl.getProgramParameter(program, gl.ACTIVE_ATTRIBUTES); - function set(name, value) { - if (!uniforms[name]) { - // console.warn('No uniform property with name ', name); - return; - } + for (var i = 0; i < count; i++) { + var _gl$getActiveAttrib = gl.getActiveAttrib(program, i), + name = _gl$getActiveAttrib.name; - var _uniforms$name = uniforms[name], - type = _uniforms$name.type, - size = _uniforms$name.size, - offset = _uniforms$name.offset, - stride = _uniforms$name.stride; + if (name) { + attributes[name] = gl.getAttribLocation(program, name); + } + } - switch (type) { - case gl.FLOAT: - setData(data, 'setFloat32', size, offset, stride, 1, value); - break; + return attributes; + } - case gl.FLOAT_VEC2: - setData(data, 'setFloat32', size, offset, stride, 2, value); - break; + function decomposeScene(scene) { + var meshes = []; + var directionalLights = []; + var ambientLights = []; + var environmentLights = []; + scene.traverse(function (child) { + if (child.isMesh) { + if (!child.geometry || !child.geometry.getAttribute('position')) { + console.warn(child, 'must have a geometry property with a position attribute'); + } else if (!child.material.isMeshStandardMaterial) { + console.warn(child, 'must use MeshStandardMaterial in order to be rendered.'); + } else { + meshes.push(child); + } + } - case gl.FLOAT_VEC3: - setData(data, 'setFloat32', size, offset, stride, 3, value); - break; + if (child.isDirectionalLight) { + directionalLights.push(child); + } - case gl.FLOAT_VEC4: - setData(data, 'setFloat32', size, offset, stride, 4, value); - break; + if (child.isAmbientLight) { + ambientLights.push(child); + } - case gl.INT: - setData(data, 'setInt32', size, offset, stride, 1, value); - break; + if (child.isEnvironmentLight) { + if (environmentLights.length > 1) { + console.warn(environmentLights, 'only one environment light can be used per scene'); + } // Valid lights have HDR texture map in RGBEEncoding - case gl.INT_VEC2: - setData(data, 'setInt32', size, offset, stride, 2, value); - break; - case gl.INT_VEC3: - setData(data, 'setInt32', size, offset, stride, 3, value); - break; + if (isHDRTexture(child)) { + environmentLights.push(child); + } else { + console.warn(child, 'environment light does not use color value or map with THREE.RGBEEncoding'); + } + } + }); + var background = scene.background; + return { + background: background, + meshes: meshes, + directionalLights: directionalLights, + ambientLights: ambientLights, + environmentLights: environmentLights + }; + } - case gl.INT_VEC4: - setData(data, 'setInt32', size, offset, stride, 4, value); - break; + function isHDRTexture(texture) { + return texture.map && texture.map.image && (texture.map.encoding === THREE$1.RGBEEncoding || texture.map.encoding === THREE$1.LinearEncoding); + } - case gl.BOOL: - setData(data, 'setUint32', size, offset, stride, 1, value); - break; + function makeFramebuffer(gl, _ref) { + var color = _ref.color, + depth = _ref.depth; + var framebuffer = gl.createFramebuffer(); - default: - console.warn('UniformBuffer: Unsupported type'); - } + function bind() { + gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer); } - function bind(index) { - gl.bufferSubData(gl.UNIFORM_BUFFER, 0, data); - gl.bindBufferBase(gl.UNIFORM_BUFFER, index, buffer); + function unbind() { + gl.bindFramebuffer(gl.FRAMEBUFFER, null); } - return { - set: set, - bind: bind - }; - } - - function getUniformBlockInfo(gl, program, blockIndex) { - var indices = gl.getActiveUniformBlockParameter(program, blockIndex, gl.UNIFORM_BLOCK_ACTIVE_UNIFORM_INDICES); - var offset = gl.getActiveUniforms(program, indices, gl.UNIFORM_OFFSET); - var stride = gl.getActiveUniforms(program, indices, gl.UNIFORM_ARRAY_STRIDE); - var uniforms = {}; + function init() { + bind(); + var drawBuffers = []; - for (var i = 0; i < indices.length; i++) { - var _gl$getActiveUniform2 = gl.getActiveUniform(program, indices[i]), - name = _gl$getActiveUniform2.name, - type = _gl$getActiveUniform2.type, - size = _gl$getActiveUniform2.size; + for (var location in color) { + location = Number(location); - uniforms[name] = { - type: type, - size: size, - offset: offset[i], - stride: stride[i] - }; - } + if (location === undefined) { + console.error('invalid location'); + } - return uniforms; - } + var tex = color[location]; + gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0 + location, tex.target, tex.texture, 0); + drawBuffers.push(gl.COLOR_ATTACHMENT0 + location); + } - function setData(dataView, setter, size, offset, stride, components, value) { - var l = Math.min(value.length / components, size); + gl.drawBuffers(drawBuffers); - for (var i = 0; i < l; i++) { - for (var k = 0; k < components; k++) { - dataView[setter](offset + i * stride + k * 4, value[components * i + k], true); + if (depth) { + gl.framebufferRenderbuffer(gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, depth.target, depth.texture); } + + unbind(); } + + init(); + return { + color: color, + bind: bind, + unbind: unbind + }; } var vertex = { @@ -515,54 +520,32 @@ var uniformInfo = getUniforms(gl, program); var uniforms = {}; var needsUpload = []; - var _iteratorNormalCompletion = true; - var _didIteratorError = false; - var _iteratorError = undefined; - try { - for (var _iterator = uniformInfo[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) { - var _step$value = _step.value, - name = _step$value.name, - type = _step$value.type, - location = _step$value.location; - var uniform = { - type: type, - location: location, - v0: 0, - v1: 0, - v2: 0, - v3: 0 - }; - uniforms[name] = uniform; - } - } catch (err) { - _didIteratorError = true; - _iteratorError = err; - } finally { - try { - if (!_iteratorNormalCompletion && _iterator["return"] != null) { - _iterator["return"](); - } - } finally { - if (_didIteratorError) { - throw _iteratorError; - } - } + for (var name in uniformInfo) { + var _uniformInfo$name = uniformInfo[name], + type = _uniformInfo$name.type, + location = _uniformInfo$name.location; + var uniform = { + type: type, + location: location, + v0: 0, + v1: 0, + v2: 0, + v3: 0 + }; + uniforms[name] = uniform; } - var failedUnis = new Set(); - function setUniform(name, v0, v1, v2, v3) { // v0 - v4 are the values to be passed to the uniform // v0 can either be a number or an array, and v1-v3 are optional var uni = uniforms[name]; if (!uni) { - if (!failedUnis.has(name)) { - console.warn("Uniform \"".concat(name, "\" does not exist in shader")); - failedUnis.add(name); - } - + // if (!failedUnis.has(name)) { + // console.warn(`Uniform "${name}" does not exist in shader`); + // failedUnis.add(name); + // } return; } @@ -654,18 +637,20 @@ var nextTexUnit = 1; function setTexture(name, texture) { - var cachedTex = textures[name]; + if (!texture) { + return; + } - if (!cachedTex) { + if (!textures[name]) { var unit = nextTexUnit++; uniformSetter.setUniform(name, unit); - cachedTex = { - unit: unit + textures[name] = { + unit: unit, + tex: texture }; - textures[name] = cachedTex; + } else { + textures[name].tex = texture; } - - cachedTex.tex = texture; } function bindTextures() { @@ -689,6 +674,7 @@ } return { + attribLocs: getAttributes(gl, program), bindTextures: bindTextures, program: program, setTexture: setTexture, @@ -705,7 +691,7 @@ str += addDefines(defines); } - if (type === gl.FRAGMENT_SHADER) { + if (type === gl.FRAGMENT_SHADER && shader.outputs) { str += addOutputs(shader.outputs); } @@ -794,18 +780,21 @@ } function makeFullscreenQuad(gl) { - // TODO: use VAOs + var vao = gl.createVertexArray(); + gl.bindVertexArray(vao); gl.bindBuffer(gl.ARRAY_BUFFER, gl.createBuffer()); gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1]), gl.STATIC_DRAW); // vertex shader should set layout(location = 0) on position attribute var posLoc = 0; gl.enableVertexAttribArray(posLoc); gl.vertexAttribPointer(posLoc, 2, gl.FLOAT, false, 0, 0); + gl.bindVertexArray(null); var vertexShader = makeVertexShader(gl, { vertex: vertex }); function draw() { + gl.bindVertexArray(vao); gl.drawArrays(gl.TRIANGLES, 0, 6); } @@ -815,1133 +804,1680 @@ }; } - // Reorders the elements in the range [first, last) in such a way that - // all elements for which the comparator c returns true - // precede the elements for which comparator c returns false. - function partition(array, compare) { - var left = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0; - var right = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : array.length; + var vertex$1 = { + source: "\n in vec3 aPosition;\n in vec3 aNormal;\n in vec2 aUv;\n in ivec2 aMaterialMeshIndex;\n\n uniform mat4 projView;\n\n out vec3 vPosition;\n out vec3 vNormal;\n out vec2 vUv;\n flat out ivec2 vMaterialMeshIndex;\n\n void main() {\n vPosition = aPosition;\n vNormal = aNormal;\n vUv = aUv;\n vMaterialMeshIndex = aMaterialMeshIndex;\n gl_Position = projView * vec4(aPosition, 1);\n }\n" + }; - while (left !== right) { - while (compare(array[left])) { - left++; + var constants$1 = "\n #define PI 3.14159265359\n #define TWOPI 6.28318530718\n #define INVPI 0.31830988618\n #define INVPI2 0.10132118364\n #define EPS 0.0005\n #define INF 1.0e999\n\n #define ROUGHNESS_MIN 0.03\n"; - if (left === right) { - return left; - } - } + var materialBuffer = "\n\nuniform Materials {\n vec4 colorAndMaterialType[NUM_MATERIALS];\n vec4 roughnessMetalnessNormalScale[NUM_MATERIALS];\n\n #if defined(NUM_DIFFUSE_MAPS) || defined(NUM_NORMAL_MAPS) || defined(NUM_PBR_MAPS)\n ivec4 diffuseNormalRoughnessMetalnessMapIndex[NUM_MATERIALS];\n #endif\n\n #if defined(NUM_DIFFUSE_MAPS) || defined(NUM_NORMAL_MAPS)\n vec4 diffuseNormalMapSize[NUM_DIFFUSE_NORMAL_MAPS];\n #endif\n\n #if defined(NUM_PBR_MAPS)\n vec2 pbrMapSize[NUM_PBR_MAPS];\n #endif\n} materials;\n\n#ifdef NUM_DIFFUSE_MAPS\n uniform mediump sampler2DArray diffuseMap;\n#endif\n\n#ifdef NUM_NORMAL_MAPS\n uniform mediump sampler2DArray normalMap;\n#endif\n\n#ifdef NUM_PBR_MAPS\n uniform mediump sampler2DArray pbrMap;\n#endif\n\nfloat getMatType(int materialIndex) {\n return materials.colorAndMaterialType[materialIndex].w;\n}\n\nvec3 getMatColor(int materialIndex, vec2 uv) {\n vec3 color = materials.colorAndMaterialType[materialIndex].rgb;\n\n #ifdef NUM_DIFFUSE_MAPS\n int diffuseMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].x;\n if (diffuseMapIndex >= 0) {\n color *= texture(diffuseMap, vec3(uv * materials.diffuseNormalMapSize[diffuseMapIndex].xy, diffuseMapIndex)).rgb;\n }\n #endif\n\n return color;\n}\n\nfloat getMatRoughness(int materialIndex, vec2 uv) {\n float roughness = materials.roughnessMetalnessNormalScale[materialIndex].x;\n\n #ifdef NUM_PBR_MAPS\n int roughnessMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].z;\n if (roughnessMapIndex >= 0) {\n roughness *= texture(pbrMap, vec3(uv * materials.pbrMapSize[roughnessMapIndex].xy, roughnessMapIndex)).g;\n }\n #endif\n\n return roughness;\n}\n\nfloat getMatMetalness(int materialIndex, vec2 uv) {\n float metalness = materials.roughnessMetalnessNormalScale[materialIndex].y;\n\n #ifdef NUM_PBR_MAPS\n int metalnessMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].w;\n if (metalnessMapIndex >= 0) {\n metalness *= texture(pbrMap, vec3(uv * materials.pbrMapSize[metalnessMapIndex].xy, metalnessMapIndex)).b;\n }\n #endif\n\n return metalness;\n}\n\n#ifdef NUM_NORMAL_MAPS\nvec3 getMatNormal(int materialIndex, vec2 uv, vec3 normal, vec3 dp1, vec3 dp2, vec2 duv1, vec2 duv2) {\n int normalMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].y;\n if (normalMapIndex >= 0) {\n // http://www.thetenthplanet.de/archives/1180\n // Compute co-tangent and co-bitangent vectors\n vec3 dp2perp = cross(dp2, normal);\n vec3 dp1perp = cross(normal, dp1);\n vec3 dpdu = dp2perp * duv1.x + dp1perp * duv2.x;\n vec3 dpdv = dp2perp * duv1.y + dp1perp * duv2.y;\n float invmax = inversesqrt(max(dot(dpdu, dpdu), dot(dpdv, dpdv)));\n dpdu *= invmax;\n dpdv *= invmax;\n\n vec3 n = 2.0 * texture(normalMap, vec3(uv * materials.diffuseNormalMapSize[normalMapIndex].zw, normalMapIndex)).rgb - 1.0;\n n.xy *= materials.roughnessMetalnessNormalScale[materialIndex].zw;\n\n mat3 tbn = mat3(dpdu, dpdv, normal);\n\n return normalize(tbn * n);\n } else {\n return normal;\n }\n}\n#endif\n"; - do { - right--; + var fragment = { + outputs: ['position', 'normal', 'faceNormal', 'color', 'matProps'], + includes: [constants$1, materialBuffer], + source: "\n in vec3 vPosition;\n in vec3 vNormal;\n in vec2 vUv;\n flat in ivec2 vMaterialMeshIndex;\n\n vec3 faceNormals(vec3 pos) {\n vec3 fdx = dFdx(pos);\n vec3 fdy = dFdy(pos);\n return cross(fdx, fdy);\n }\n\n void main() {\n int materialIndex = vMaterialMeshIndex.x;\n int meshIndex = vMaterialMeshIndex.y;\n\n vec2 uv = fract(vUv);\n\n vec3 color = getMatColor(materialIndex, uv);\n float roughness = getMatRoughness(materialIndex, uv);\n float metalness = getMatMetalness(materialIndex, uv);\n float materialType = getMatType(materialIndex);\n\n roughness = clamp(roughness, ROUGHNESS_MIN, 1.0);\n metalness = clamp(metalness, 0.0, 1.0);\n\n vec3 normal = vNormal;\n vec3 faceNormal = faceNormals(vPosition);\n normal *= sign(dot(normal, faceNormal));\n\n #ifdef NUM_NORMAL_MAPS\n vec3 dp1 = dFdx(vPosition);\n vec3 dp2 = dFdy(vPosition);\n vec2 duv1 = dFdx(vUv);\n vec2 duv2 = dFdy(vUv);\n normal = getMatNormal(materialIndex, uv, normal, dp1, dp2, duv1, duv2);\n #endif\n\n out_position = vec4(vPosition, float(meshIndex) + EPS);\n out_normal = vec4(normal, materialType);\n out_faceNormal = vec4(faceNormal, 0);\n out_color = vec4(color, 0);\n out_matProps = vec4(roughness, metalness, 0, 0);\n }\n" + }; - if (left === right) { - return left; - } - } while (!compare(array[right])); + function makeGBufferPass(gl, _ref) { + var materialBuffer = _ref.materialBuffer, + mergedMesh = _ref.mergedMesh; + var renderPass = makeRenderPass(gl, { + defines: materialBuffer.defines, + vertex: vertex$1, + fragment: fragment + }); + renderPass.setTexture('diffuseMap', materialBuffer.textures.diffuseMap); + renderPass.setTexture('normalMap', materialBuffer.textures.normalMap); + renderPass.setTexture('pbrMap', materialBuffer.textures.pbrMap); + var geometry = mergedMesh.geometry; + var elementCount = geometry.getIndex().count; + var vao = gl.createVertexArray(); + gl.bindVertexArray(vao); + uploadAttributes(gl, renderPass, geometry); + gl.bindVertexArray(null); + var jitterX = 0; + var jitterY = 0; - swap(array, left, right); - left++; + function setJitter(x, y) { + jitterX = x; + jitterY = y; } - return left; - } // nth_element is a partial sorting algorithm that rearranges elements in [first, last) such that: - // The element pointed at by nth is changed to whatever element would occur in that position if [first, last) were sorted. - // All of the elements before this new nth element compare to true with elements after the nth element + var currentCamera; - function nthElement(array, compare) { - var left = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0; - var right = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : array.length; - var k = arguments.length > 4 && arguments[4] !== undefined ? arguments[4] : Math.floor((left + right) / 2); + function setCamera(camera) { + currentCamera = camera; + } - for (var i = left; i <= k; i++) { - var minIndex = i; - var minValue = array[i]; + function calcCamera() { + projView.copy(currentCamera.projectionMatrix); + projView.elements[8] += 2 * jitterX; + projView.elements[9] += 2 * jitterY; + projView.multiply(currentCamera.matrixWorldInverse); + renderPass.setUniform('projView', projView.elements); + } - for (var j = i + 1; j < right; j++) { - if (!compare(minValue, array[j])) { - minIndex = j; - minValue = array[j]; - swap(array, i, minIndex); - } - } + var projView = new THREE$1.Matrix4(); + + function draw() { + calcCamera(); + gl.bindVertexArray(vao); + renderPass.useProgram(); + gl.enable(gl.DEPTH_TEST); + gl.drawElements(gl.TRIANGLES, elementCount, gl.UNSIGNED_INT, 0); + gl.disable(gl.DEPTH_TEST); } - } - function swap(array, a, b) { - var x = array[b]; - array[b] = array[a]; - array[a] = x; + return { + draw: draw, + outputLocs: renderPass.outputLocs, + setCamera: setCamera, + setJitter: setJitter + }; } - // Create a bounding volume hierarchy of scene geometry - var size = new THREE$1.Vector3(); - function bvhAccel(geometry, materialIndices) { - var primitiveInfo = makePrimitiveInfo(geometry, materialIndices); - var node = recursiveBuild(primitiveInfo, 0, primitiveInfo.length); - return node; + function uploadAttributes(gl, renderPass, geometry) { + setAttribute(gl, renderPass.attribLocs.aPosition, geometry.getAttribute('position')); + setAttribute(gl, renderPass.attribLocs.aNormal, geometry.getAttribute('normal')); + setAttribute(gl, renderPass.attribLocs.aUv, geometry.getAttribute('uv')); + setAttribute(gl, renderPass.attribLocs.aMaterialMeshIndex, geometry.getAttribute('materialMeshIndex')); + gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, gl.createBuffer()); + gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, geometry.getIndex().array, gl.STATIC_DRAW); } - function flattenBvh(bvh) { - var flat = []; - var isBounds = []; - var splitAxisMap = { - x: 0, - y: 1, - z: 2 - }; - var maxDepth = 1; - var traverse = function traverse(node) { - var depth = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 1; - maxDepth = Math.max(depth, maxDepth); - - if (node.primitives) { - for (var i = 0; i < node.primitives.length; i++) { - var p = node.primitives[i]; - flat.push(p.indices[0], p.indices[1], p.indices[2], node.primitives.length, p.faceNormal.x, p.faceNormal.y, p.faceNormal.z, p.materialIndex); - isBounds.push(false); - } - } else { - var bounds = node.bounds; - flat.push(bounds.min.x, bounds.min.y, bounds.min.z, splitAxisMap[node.splitAxis], bounds.max.x, bounds.max.y, bounds.max.z, null // pointer to second shild - ); + function setAttribute(gl, location, bufferAttribute) { + var itemSize = bufferAttribute.itemSize, + array = bufferAttribute.array; + gl.enableVertexAttribArray(location); + gl.bindBuffer(gl.ARRAY_BUFFER, gl.createBuffer()); + gl.bufferData(gl.ARRAY_BUFFER, array, gl.STATIC_DRAW); - var _i = flat.length - 1; + if (array instanceof Float32Array) { + gl.vertexAttribPointer(location, itemSize, gl.FLOAT, false, 0, 0); + } else if (array instanceof Int32Array) { + gl.vertexAttribIPointer(location, itemSize, gl.INT, 0, 0); + } else { + throw 'Unsupported buffer type'; + } + } - isBounds.push(true); - traverse(node.child0, depth + 1); - flat[_i] = flat.length / 4; // pointer to second child + function makeUniformBuffer(gl, program, blockName) { + var blockIndex = gl.getUniformBlockIndex(program, blockName); + var blockSize = gl.getActiveUniformBlockParameter(program, blockIndex, gl.UNIFORM_BLOCK_DATA_SIZE); + var uniforms = getUniformBlockInfo(gl, program, blockIndex); + var buffer = gl.createBuffer(); + gl.bindBuffer(gl.UNIFORM_BUFFER, buffer); + gl.bufferData(gl.UNIFORM_BUFFER, blockSize, gl.STATIC_DRAW); + var data = new DataView(new ArrayBuffer(blockSize)); - traverse(node.child1, depth + 1); + function set(name, value) { + if (!uniforms[name]) { + // console.warn('No uniform property with name ', name); + return; } - }; - traverse(bvh); - var buffer = new ArrayBuffer(4 * flat.length); - var floatView = new Float32Array(buffer); - var intView = new Int32Array(buffer); + var _uniforms$name = uniforms[name], + type = _uniforms$name.type, + size = _uniforms$name.size, + offset = _uniforms$name.offset, + stride = _uniforms$name.stride; - for (var i = 0; i < isBounds.length; i++) { - var k = 8 * i; + switch (type) { + case gl.FLOAT: + setData(data, 'setFloat32', size, offset, stride, 1, value); + break; - if (isBounds[i]) { - floatView[k] = flat[k]; - floatView[k + 1] = flat[k + 1]; - floatView[k + 2] = flat[k + 2]; - intView[k + 3] = flat[k + 3]; - } else { - intView[k] = flat[k]; - intView[k + 1] = flat[k + 1]; - intView[k + 2] = flat[k + 2]; - intView[k + 3] = -flat[k + 3]; // negative signals to shader that this node is a triangle + case gl.FLOAT_VEC2: + setData(data, 'setFloat32', size, offset, stride, 2, value); + break; + + case gl.FLOAT_VEC3: + setData(data, 'setFloat32', size, offset, stride, 3, value); + break; + + case gl.FLOAT_VEC4: + setData(data, 'setFloat32', size, offset, stride, 4, value); + break; + + case gl.INT: + setData(data, 'setInt32', size, offset, stride, 1, value); + break; + + case gl.INT_VEC2: + setData(data, 'setInt32', size, offset, stride, 2, value); + break; + + case gl.INT_VEC3: + setData(data, 'setInt32', size, offset, stride, 3, value); + break; + + case gl.INT_VEC4: + setData(data, 'setInt32', size, offset, stride, 4, value); + break; + + case gl.BOOL: + setData(data, 'setUint32', size, offset, stride, 1, value); + break; + + default: + console.warn('UniformBuffer: Unsupported type'); } + } - floatView[k + 4] = flat[k + 4]; - floatView[k + 5] = flat[k + 5]; - floatView[k + 6] = flat[k + 6]; - intView[k + 7] = flat[k + 7]; + function bind(index) { + gl.bindBuffer(gl.UNIFORM_BUFFER, buffer); + gl.bufferSubData(gl.UNIFORM_BUFFER, 0, data); + gl.bindBufferBase(gl.UNIFORM_BUFFER, index, buffer); } return { - maxDepth: maxDepth, - count: flat.length / 4, - buffer: floatView + set: set, + bind: bind }; } - function makePrimitiveInfo(geometry, materialIndices) { - var primitiveInfo = []; - var indices = geometry.getIndex().array; - var position = geometry.getAttribute('position'); - var v0 = new THREE$1.Vector3(); - var v1 = new THREE$1.Vector3(); - var v2 = new THREE$1.Vector3(); - var e0 = new THREE$1.Vector3(); - var e1 = new THREE$1.Vector3(); + function getUniformBlockInfo(gl, program, blockIndex) { + var indices = gl.getActiveUniformBlockParameter(program, blockIndex, gl.UNIFORM_BLOCK_ACTIVE_UNIFORM_INDICES); + var offset = gl.getActiveUniforms(program, indices, gl.UNIFORM_OFFSET); + var stride = gl.getActiveUniforms(program, indices, gl.UNIFORM_ARRAY_STRIDE); + var uniforms = {}; - for (var i = 0; i < indices.length; i += 3) { - var bounds = new THREE$1.Box3(); - v0.fromBufferAttribute(position, indices[i]); - v1.fromBufferAttribute(position, indices[i + 1]); - v2.fromBufferAttribute(position, indices[i + 2]); - e0.subVectors(v2, v0); - e1.subVectors(v1, v0); - bounds.expandByPoint(v0); - bounds.expandByPoint(v1); - bounds.expandByPoint(v2); - var info = { - bounds: bounds, - center: bounds.getCenter(new THREE$1.Vector3()), - indices: [indices[i], indices[i + 1], indices[i + 2]], - faceNormal: new THREE$1.Vector3().crossVectors(e1, e0).normalize(), - materialIndex: materialIndices[i / 3] + for (var i = 0; i < indices.length; i++) { + var _gl$getActiveUniform = gl.getActiveUniform(program, indices[i]), + name = _gl$getActiveUniform.name, + type = _gl$getActiveUniform.type, + size = _gl$getActiveUniform.size; + + uniforms[name] = { + type: type, + size: size, + offset: offset[i], + stride: stride[i] }; - primitiveInfo.push(info); } - return primitiveInfo; + return uniforms; } - function recursiveBuild(primitiveInfo, start, end) { - var bounds = new THREE$1.Box3(); + function setData(dataView, setter, size, offset, stride, components, value) { + var l = Math.min(value.length / components, size); - for (var i = start; i < end; i++) { - bounds.union(primitiveInfo[i].bounds); + for (var i = 0; i < l; i++) { + for (var k = 0; k < components; k++) { + dataView[setter](offset + i * stride + k * 4, value[components * i + k], true); + } } + } - var nPrimitives = end - start; + function clamp(x, min, max) { + return Math.min(Math.max(x, min), max); + } + function shuffle(arr) { + for (var i = arr.length - 1; i > 0; i--) { + var j = Math.floor(Math.random() * (i + 1)); + var x = arr[i]; + arr[i] = arr[j]; + arr[j] = x; + } - if (nPrimitives === 1) { - return makeLeafNode(primitiveInfo.slice(start, end), bounds); - } else { - var centroidBounds = new THREE$1.Box3(); + return arr; + } + function numberArraysEqual(a, b) { + var eps = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 1e-4; - for (var _i2 = start; _i2 < end; _i2++) { - centroidBounds.expandByPoint(primitiveInfo[_i2].center); + for (var i = 0; i < a.length; i++) { + if (Math.abs(a[i] - b[i]) > eps) { + return false; } + } - var dim = maximumExtent(centroidBounds); - var mid = Math.floor((start + end) / 2); // middle split method - // const dimMid = (centroidBounds.max[dim] + centroidBounds.min[dim]) / 2; - // mid = partition(primitiveInfo, p => p.center[dim] < dimMid, start, end); - // if (mid === start || mid === end) { - // mid = Math.floor((start + end) / 2); - // nthElement(primitiveInfo, (a, b) => a.center[dim] < b.center[dim], start, end, mid); - // } - // surface area heuristic method + return true; + } - if (nPrimitives <= 4) { - nthElement(primitiveInfo, function (a, b) { - return a.center[dim] < b.center[dim]; - }, start, end, mid); - } else { - var buckets = []; + function makeTexture(gl, params) { + var _params$width = params.width, + width = _params$width === void 0 ? null : _params$width, + _params$height = params.height, + height = _params$height === void 0 ? null : _params$height, + _params$data = params.data, + data = _params$data === void 0 ? null : _params$data, + _params$length = params.length, + length = _params$length === void 0 ? 1 : _params$length, + _params$channels = params.channels, + channels = _params$channels === void 0 ? null : _params$channels, + _params$storage = params.storage, + storage = _params$storage === void 0 ? null : _params$storage, + _params$flipY = params.flipY, + flipY = _params$flipY === void 0 ? false : _params$flipY, + _params$gammaCorrecti = params.gammaCorrection, + gammaCorrection = _params$gammaCorrecti === void 0 ? false : _params$gammaCorrecti, + _params$wrapS = params.wrapS, + wrapS = _params$wrapS === void 0 ? gl.CLAMP_TO_EDGE : _params$wrapS, + _params$wrapT = params.wrapT, + wrapT = _params$wrapT === void 0 ? gl.CLAMP_TO_EDGE : _params$wrapT, + _params$minFilter = params.minFilter, + minFilter = _params$minFilter === void 0 ? gl.NEAREST : _params$minFilter, + _params$magFilter = params.magFilter, + magFilter = _params$magFilter === void 0 ? gl.NEAREST : _params$magFilter; + width = width || data.width || 0; + height = height || data.height || 0; + var texture = gl.createTexture(); + var target; + var dataArray; // if data is a JS array but not a TypedArray, assume data is an array of images and create a GL Array Texture - for (var _i3 = 0; _i3 < 12; _i3++) { - buckets.push({ - bounds: new THREE$1.Box3(), - count: 0 - }); - } + if (Array.isArray(data)) { + dataArray = data; + data = dataArray[0]; + } - for (var _i4 = start; _i4 < end; _i4++) { - var b = Math.floor(buckets.length * boxOffset(centroidBounds, dim, primitiveInfo[_i4].center)); + target = dataArray || length > 1 ? gl.TEXTURE_2D_ARRAY : gl.TEXTURE_2D; + gl.activeTexture(gl.TEXTURE0); + gl.bindTexture(target, texture); + gl.texParameteri(target, gl.TEXTURE_WRAP_S, wrapS); + gl.texParameteri(target, gl.TEXTURE_WRAP_T, wrapT); + gl.texParameteri(target, gl.TEXTURE_MIN_FILTER, minFilter); + gl.texParameteri(target, gl.TEXTURE_MAG_FILTER, magFilter); - if (b === buckets.length) { - b = buckets.length - 1; - } + if (!channels) { + if (data && data.length) { + channels = data.length / (width * height); // infer number of channels from data size + } else { + channels = 4; + } + } - buckets[b].count++; - buckets[b].bounds.union(primitiveInfo[_i4].bounds); - } + channels = clamp(channels, 1, 4); - var cost = []; + var _getTextureFormat = getTextureFormat(gl, channels, storage, data, gammaCorrection), + type = _getTextureFormat.type, + format = _getTextureFormat.format, + internalFormat = _getTextureFormat.internalFormat; - for (var _i5 = 0; _i5 < buckets.length - 1; _i5++) { - var b0 = new THREE$1.Box3(); - var b1 = new THREE$1.Box3(); - var count0 = 0; - var count1 = 0; - - for (var j = 0; j <= _i5; j++) { - b0.union(buckets[j].bounds); - count0 += buckets[j].count; - } - - for (var _j = _i5 + 1; _j < buckets.length; _j++) { - b1.union(buckets[_j].bounds); - count1 += buckets[_j].count; - } - - cost.push(0.1 + (count0 * surfaceArea(b0) + count1 * surfaceArea(b1)) / surfaceArea(bounds)); - } - - var minCost = cost[0]; - var minCostSplitBucket = 0; - - for (var _i6 = 1; _i6 < cost.length; _i6++) { - if (cost[_i6] < minCost) { - minCost = cost[_i6]; - minCostSplitBucket = _i6; - } - } - - mid = partition(primitiveInfo, function (p) { - var b = Math.floor(buckets.length * boxOffset(centroidBounds, dim, p.center)); + if (dataArray) { + gl.texStorage3D(target, 1, internalFormat, width, height, dataArray.length); - if (b === buckets.length) { - b = buckets.length - 1; - } + for (var i = 0; i < dataArray.length; i++) { + // if layer is an HTMLImageElement, use the .width and .height properties of each layer + // otherwise use the max size of the array texture + var layerWidth = dataArray[i].width || width; + var layerHeight = dataArray[i].height || height; + gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, Array.isArray(flipY) ? flipY[i] : flipY); + gl.texSubImage3D(target, 0, 0, 0, i, layerWidth, layerHeight, 1, format, type, dataArray[i]); + } + } else if (length > 1) { + // create empty array texture + gl.texStorage3D(target, 1, internalFormat, width, height, length); + } else { + gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, flipY); + gl.texStorage2D(target, 1, internalFormat, width, height); - return b <= minCostSplitBucket; - }, start, end); + if (data) { + gl.texSubImage2D(target, 0, 0, 0, width, height, format, type, data); } + } // return state to default - return makeInteriorNode(dim, recursiveBuild(primitiveInfo, start, mid), recursiveBuild(primitiveInfo, mid, end)); - } - } - function makeLeafNode(primitives, bounds) { + gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false); return { - primitives: primitives, - bounds: bounds + target: target, + texture: texture }; } - - function makeInteriorNode(splitAxis, child0, child1) { + function makeDepthTarget(gl, width, height) { + var texture = gl.createRenderbuffer(); + var target = gl.RENDERBUFFER; + gl.bindRenderbuffer(target, texture); + gl.renderbufferStorage(gl.RENDERBUFFER, gl.DEPTH_COMPONENT24, width, height); + gl.bindRenderbuffer(target, null); return { - child0: child0, - child1: child1, - bounds: new THREE$1.Box3().union(child0.bounds).union(child1.bounds), - splitAxis: splitAxis + target: target, + texture: texture }; } - function maximumExtent(box3) { - box3.getSize(size); - - if (size.x > size.z) { - return size.x > size.y ? 'x' : 'y'; - } else { - return size.z > size.y ? 'z' : 'y'; - } - } - - function boxOffset(box3, dim, v) { - var offset = v[dim] - box3.min[dim]; - - if (box3.max[dim] > box3.min[dim]) { - offset /= box3.max[dim] - box3.min[dim]; + function getTextureFormat(gl, channels, storage, data, gammaCorrection) { + var type; + var internalFormat; + var isByteArray = data instanceof Uint8Array || data instanceof HTMLImageElement || data instanceof HTMLCanvasElement || data instanceof ImageData; + var isFloatArray = data instanceof Float32Array; + + if (storage === 'byte' || !storage && isByteArray) { + internalFormat = { + 1: gl.R8, + 2: gl.RG8, + 3: gammaCorrection ? gl.SRGB8 : gl.RGB8, + 4: gammaCorrection ? gl.SRGB8_ALPHA8 : gl.RGBA8 + }[channels]; + type = gl.UNSIGNED_BYTE; + } else if (storage === 'float' || !storage && isFloatArray) { + internalFormat = { + 1: gl.R32F, + 2: gl.RG32F, + 3: gl.RGB32F, + 4: gl.RGBA32F + }[channels]; + type = gl.FLOAT; + } else if (storage === 'halfFloat') { + internalFormat = { + 1: gl.R16F, + 2: gl.RG16F, + 3: gl.RGB16F, + 4: gl.RGBA16F + }[channels]; + type = gl.FLOAT; + } else if (storage === 'snorm') { + internalFormat = { + 1: gl.R8_SNORM, + 2: gl.RG8_SNORM, + 3: gl.RGB8_SNORM, + 4: gl.RGBA8_SNORM + }[channels]; + type = gl.UNSIGNED_BYTE; } - return offset; - } - - function surfaceArea(box3) { - box3.getSize(size); - return 2 * (size.x * size.z + size.x * size.y + size.z * size.y); + var format = { + 1: gl.RED, + 2: gl.RG, + 3: gl.RGB, + 4: gl.RGBA + }[channels]; + return { + format: format, + internalFormat: internalFormat, + type: type + }; } - // Convert image data from the RGBE format to a 32-bit floating point format - // See https://www.cg.tuwien.ac.at/research/theses/matkovic/node84.html for a description of the RGBE format - // Optional multiplier argument for performance optimization - function rgbeToFloat(buffer) { - var intensity = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 1; - var texels = buffer.length / 4; - var floatBuffer = new Float32Array(texels * 3); - var expTable = []; + // retrieve textures used by meshes, grouping textures from meshes shared by *the same* mesh property + function getTexturesFromMaterials(meshes, textureNames) { + var textureMap = {}; + var _iteratorNormalCompletion = true; + var _didIteratorError = false; + var _iteratorError = undefined; - for (var i = 0; i < 255; i++) { - expTable[i] = intensity * Math.pow(2, i - 128) / 255; + try { + for (var _iterator = textureNames[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) { + var name = _step.value; + var textures = []; + textureMap[name] = { + indices: texturesFromMaterials(meshes, name, textures), + textures: textures + }; + } + } catch (err) { + _didIteratorError = true; + _iteratorError = err; + } finally { + try { + if (!_iteratorNormalCompletion && _iterator["return"] != null) { + _iterator["return"](); + } + } finally { + if (_didIteratorError) { + throw _iteratorError; + } + } } - for (var _i = 0; _i < texels; _i++) { - var r = buffer[4 * _i]; - var g = buffer[4 * _i + 1]; - var b = buffer[4 * _i + 2]; - var a = buffer[4 * _i + 3]; - var e = expTable[a]; - floatBuffer[3 * _i] = r * e; - floatBuffer[3 * _i + 1] = g * e; - floatBuffer[3 * _i + 2] = b * e; - } + return textureMap; + } // retrieve textures used by meshes, grouping textures from meshes shared *across all* mesh properties - return floatBuffer; - } + function mergeTexturesFromMaterials(meshes, textureNames) { + var textureMap = { + textures: [], + indices: {} + }; + var _iteratorNormalCompletion2 = true; + var _didIteratorError2 = false; + var _iteratorError2 = undefined; - function clamp(x, min, max) { - return Math.min(Math.max(x, min), max); - } - function shuffle(arr) { - for (var i = arr.length - 1; i > 0; i--) { - var j = Math.floor(Math.random() * (i + 1)); - var x = arr[i]; - arr[i] = arr[j]; - arr[j] = x; + try { + for (var _iterator2 = textureNames[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) { + var name = _step2.value; + textureMap.indices[name] = texturesFromMaterials(meshes, name, textureMap.textures); + } + } catch (err) { + _didIteratorError2 = true; + _iteratorError2 = err; + } finally { + try { + if (!_iteratorNormalCompletion2 && _iterator2["return"] != null) { + _iterator2["return"](); + } + } finally { + if (_didIteratorError2) { + throw _iteratorError2; + } + } } - return arr; + return textureMap; } - function numberArraysEqual(a, b) { - var eps = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 1e-4; - for (var i = 0; i < a.length; i++) { - if (Math.abs(a[i] - b[i]) > eps) { - return false; - } - } + function texturesFromMaterials(materials, textureName, textures) { + var indices = []; + var _iteratorNormalCompletion3 = true; + var _didIteratorError3 = false; + var _iteratorError3 = undefined; - return true; - } + try { + for (var _iterator3 = materials[Symbol.iterator](), _step3; !(_iteratorNormalCompletion3 = (_step3 = _iterator3.next()).done); _iteratorNormalCompletion3 = true) { + var material = _step3.value; - // Convert image data from the RGBE format to a 32-bit floating point format - var DEFAULT_MAP_RESOLUTION = { - width: 2048, - height: 1024 - }; // Tools for generating and modify env maps for lighting from scene component data + if (!material[textureName]) { + indices.push(-1); + } else { + var index = textures.length; - function generateBackgroundMapFromSceneBackground(background) { - var backgroundImage; + for (var i = 0; i < textures.length; i++) { + if (textures[i] === material[textureName]) { + // Reuse existing duplicate texture. + index = i; + break; + } + } - if (background.isColor) { - backgroundImage = generateSolidMap(1, 1, background); - } else if (background.encoding === THREE$1.RGBEEncoding) { - backgroundImage = { - width: background.image.width, - height: background.image.height, - data: background.image.data - }; - backgroundImage.data = rgbeToFloat(backgroundImage.data); + if (index === textures.length) { + // New texture. Add texture to list. + textures.push(material[textureName]); + } + + indices.push(index); + } + } + } catch (err) { + _didIteratorError3 = true; + _iteratorError3 = err; + } finally { + try { + if (!_iteratorNormalCompletion3 && _iterator3["return"] != null) { + _iterator3["return"](); + } + } finally { + if (_didIteratorError3) { + throw _iteratorError3; + } + } } - return backgroundImage; + return indices; } - function generateEnvMapFromSceneComponents(directionalLights, ambientLights, environmentLights) { - var envImage = initializeEnvMap(environmentLights); - ambientLights.forEach(function (light) { - addAmbientLightToEnvMap(light, envImage); + + function makeMaterialBuffer(gl, materials) { + var maps = getTexturesFromMaterials(materials, ['map', 'normalMap']); + var pbrMap = mergeTexturesFromMaterials(materials, ['roughnessMap', 'metalnessMap']); + var textures = {}; + var bufferData = {}; + bufferData.color = materials.map(function (m) { + return m.color; }); - directionalLights.forEach(function (light) { - envImage.data = addDirectionalLightToEnvMap(light, envImage); + bufferData.roughness = materials.map(function (m) { + return m.roughness; }); - return envImage; - } - function initializeEnvMap(environmentLights) { - var envImage; // Initialize map from environment light if present + bufferData.metalness = materials.map(function (m) { + return m.metalness; + }); + bufferData.normalScale = materials.map(function (m) { + return m.normalScale; + }); + bufferData.type = materials.map(function (m) { + if (m.shadowCatcher) { + return ShadowCatcherMaterial; + } - if (environmentLights.length > 0) { - // TODO: support multiple environment lights (what if they have different resolutions?) - var environmentLight = environmentLights[0]; - envImage = { - width: environmentLight.map.image.width, - height: environmentLight.map.image.height, - data: environmentLight.map.image.data - }; - envImage.data = rgbeToFloat(envImage.data, environmentLight.intensity); - } else { - // initialize blank map - envImage = generateSolidMap(DEFAULT_MAP_RESOLUTION.width, DEFAULT_MAP_RESOLUTION.height); + if (m.transparent) { + return m.solid ? ThickMaterial : ThinMaterial; + } + }); + + if (maps.map.textures.length > 0) { + var _makeTextureArray = makeTextureArray(gl, maps.map.textures, true), + relativeSizes = _makeTextureArray.relativeSizes, + texture = _makeTextureArray.texture; + + textures.diffuseMap = texture; + bufferData.diffuseMapSize = relativeSizes; + bufferData.diffuseMapIndex = maps.map.indices; } - return envImage; - } - function generateSolidMap(width, height, color, intensity) { - var texels = width * height; - var floatBuffer = new Float32Array(texels * 3); + if (maps.normalMap.textures.length > 0) { + var _makeTextureArray2 = makeTextureArray(gl, maps.normalMap.textures, false), + _relativeSizes = _makeTextureArray2.relativeSizes, + _texture = _makeTextureArray2.texture; - if (color && color.isColor) { - setBufferToColor(floatBuffer, color, intensity); + textures.normalMap = _texture; + bufferData.normalMapSize = _relativeSizes; + bufferData.normalMapIndex = maps.normalMap.indices; + } + + if (pbrMap.textures.length > 0) { + var _makeTextureArray3 = makeTextureArray(gl, pbrMap.textures, false), + _relativeSizes2 = _makeTextureArray3.relativeSizes, + _texture2 = _makeTextureArray3.texture; + + textures.pbrMap = _texture2; + bufferData.pbrMapSize = _relativeSizes2; + bufferData.roughnessMapIndex = pbrMap.indices.roughnessMap; + bufferData.metalnessMapIndex = pbrMap.indices.metalnessMap; } + var defines = { + NUM_MATERIALS: materials.length, + NUM_DIFFUSE_MAPS: maps.map.textures.length, + NUM_NORMAL_MAPS: maps.normalMap.textures.length, + NUM_DIFFUSE_NORMAL_MAPS: Math.max(maps.map.textures.length, maps.normalMap.textures.length), + NUM_PBR_MAPS: pbrMap.textures.length + }; // create temporary shader program including the Material uniform buffer + // used to query the compiled structure of the uniform buffer + + var renderPass = makeRenderPass(gl, { + vertex: { + source: "void main() {}" + }, + fragment: { + includes: [materialBuffer], + source: "void main() {}" + }, + defines: defines + }); + uploadToUniformBuffer(gl, renderPass.program, bufferData); return { - width: width, - height: height, - data: floatBuffer + defines: defines, + textures: textures }; } - function setBufferToColor(buffer, color) { - var intensity = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 1; - buffer.forEach(function (part, index) { - var component = index % 3; + function makeTextureArray(gl, textures) { + var gammaCorrection = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : false; + var images = textures.map(function (t) { + return t.image; + }); + var flipY = textures.map(function (t) { + return t.flipY; + }); - if (component === 0) { - buffer[index] = color.r * intensity; - } else if (component === 1) { - buffer[index] = color.g * intensity; - } else if (component === 2) { - buffer[index] = color.b * intensity; - } + var _maxImageSize = maxImageSize(images), + maxSize = _maxImageSize.maxSize, + relativeSizes = _maxImageSize.relativeSizes; // create GL Array Texture from individual textures + + + var texture = makeTexture(gl, { + width: maxSize.width, + height: maxSize.height, + gammaCorrection: gammaCorrection, + data: images, + flipY: flipY, + channels: 3, + minFilter: gl.LINEAR, + magFilter: gl.LINEAR }); - return buffer; + return { + texture: texture, + relativeSizes: relativeSizes + }; } - function addAmbientLightToEnvMap(light, image) { - var color = light.color; - image.data.forEach(function (part, index) { - var component = index % 3; + function maxImageSize(images) { + var maxSize = { + width: 0, + height: 0 + }; + var _iteratorNormalCompletion = true; + var _didIteratorError = false; + var _iteratorError = undefined; - if (component === 0) { - image.data[index] += color.r * light.intensity; - } else if (component === 1) { - image.data[index] += color.g * light.intensity; - } else if (component === 2) { - image.data[index] += color.b * light.intensity; + try { + for (var _iterator = images[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) { + var image = _step.value; + maxSize.width = Math.max(maxSize.width, image.width); + maxSize.height = Math.max(maxSize.height, image.height); } - }); - } - function addDirectionalLightToEnvMap(light, image) { - var sphericalCoords = new THREE$1.Spherical(); - var lightDirection = light.position.clone().sub(light.target.position); - sphericalCoords.setFromVector3(lightDirection); - sphericalCoords.theta = Math.PI * 3 / 2 - sphericalCoords.theta; - sphericalCoords.makeSafe(); - return addLightAtCoordinates(light, image, sphericalCoords); - } // Perform modifications on env map to match input scene + } catch (err) { + _didIteratorError = true; + _iteratorError = err; + } finally { + try { + if (!_iteratorNormalCompletion && _iterator["return"] != null) { + _iterator["return"](); + } + } finally { + if (_didIteratorError) { + throw _iteratorError; + } + } + } - function addLightAtCoordinates(light, image, originCoords) { - var floatBuffer = image.data; - var width = image.width; - var height = image.height; - var xTexels = floatBuffer.length / (3 * height); - var yTexels = floatBuffer.length / (3 * width); // default softness for standard directional lights is 0.01, i.e. a hard shadow + var relativeSizes = []; + var _iteratorNormalCompletion2 = true; + var _didIteratorError2 = false; + var _iteratorError2 = undefined; - var softness = light.softness || 0.01; // angle from center of light at which no more contributions are projected + try { + for (var _iterator2 = images[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) { + var _image = _step2.value; + relativeSizes.push(_image.width / maxSize.width); + relativeSizes.push(_image.height / maxSize.height); + } + } catch (err) { + _didIteratorError2 = true; + _iteratorError2 = err; + } finally { + try { + if (!_iteratorNormalCompletion2 && _iterator2["return"] != null) { + _iterator2["return"](); + } + } finally { + if (_didIteratorError2) { + throw _iteratorError2; + } + } + } - var threshold = findThreshold(softness); // if too few texels are rejected by the threshold then the time to evaluate it is no longer worth it + return { + maxSize: maxSize, + relativeSizes: relativeSizes + }; + } // Upload arrays to uniform buffer objects + // Packs different arrays into vec4's to take advantage of GLSL's std140 memory layout - var useThreshold = threshold < Math.PI / 5; // functional trick to keep the conditional check out of the main loop - var intensityFromAngleFunction = useThreshold ? getIntensityFromAngleDifferentialThresholded : getIntensityFromAngleDifferential; - var begunAddingContributions = false; - var currentCoords = new THREE$1.Spherical(); // Iterates over each row from top to bottom + function uploadToUniformBuffer(gl, program, bufferData) { + var _ref, _ref2; - for (var i = 0; i < xTexels; i++) { - var encounteredInThisRow = false; // Iterates over each texel in row + var materialBuffer = makeUniformBuffer(gl, program, 'Materials'); + materialBuffer.set('Materials.colorAndMaterialType[0]', interleave({ + data: (_ref = []).concat.apply(_ref, _toConsumableArray(bufferData.color.map(function (d) { + return d.toArray(); + }))), + channels: 3 + }, { + data: bufferData.type, + channels: 1 + })); + materialBuffer.set('Materials.roughnessMetalnessNormalScale[0]', interleave({ + data: bufferData.roughness, + channels: 1 + }, { + data: bufferData.metalness, + channels: 1 + }, { + data: (_ref2 = []).concat.apply(_ref2, _toConsumableArray(bufferData.normalScale.map(function (d) { + return d.toArray(); + }))), + channels: 2 + })); + materialBuffer.set('Materials.diffuseNormalRoughnessMetalnessMapIndex[0]', interleave({ + data: bufferData.diffuseMapIndex, + channels: 1 + }, { + data: bufferData.normalMapIndex, + channels: 1 + }, { + data: bufferData.roughnessMapIndex, + channels: 1 + }, { + data: bufferData.metalnessMapIndex, + channels: 1 + })); + materialBuffer.set('Materials.diffuseNormalMapSize[0]', interleave({ + data: bufferData.diffuseMapSize, + channels: 2 + }, { + data: bufferData.normalMapSize, + channels: 2 + })); + materialBuffer.set('Materials.pbrMapSize[0]', bufferData.pbrMapSize); + materialBuffer.bind(0); + } - for (var j = 0; j < yTexels; j++) { - var bufferIndex = j * width + i; - currentCoords = equirectangularToSpherical(i, j, width, height, currentCoords); - var falloff = intensityFromAngleFunction(originCoords, currentCoords, softness, threshold); + function interleave() { + var maxLength = 0; - if (falloff > 0) { - encounteredInThisRow = true; - begunAddingContributions = true; - } + for (var i = 0; i < arguments.length; i++) { + var a = i < 0 || arguments.length <= i ? undefined : arguments[i]; + var l = a.data ? a.data.length / a.channels : 0; + maxLength = Math.max(maxLength, l); + } - var intensity = light.intensity * falloff; - floatBuffer[bufferIndex * 3] += intensity * light.color.r; - floatBuffer[bufferIndex * 3 + 1] += intensity * light.color.g; - floatBuffer[bufferIndex * 3 + 2] += intensity * light.color.b; - } // First row to not add a contribution since adding began - // This means the entire light has been added and we can exit early + var interleaved = []; + for (var _i = 0; _i < maxLength; _i++) { + for (var j = 0; j < arguments.length; j++) { + var _ref3 = j < 0 || arguments.length <= j ? undefined : arguments[j], + _ref3$data = _ref3.data, + data = _ref3$data === void 0 ? [] : _ref3$data, + channels = _ref3.channels; - if (!encounteredInThisRow && begunAddingContributions) { - return floatBuffer; + for (var c = 0; c < channels; c++) { + interleaved.push(data[_i * channels + c]); + } } } - return floatBuffer; + return interleaved; + } + + function mergeMeshesToGeometry(meshes) { + var vertexCount = 0; + var indexCount = 0; + var geometryAndMaterialIndex = []; + var materialIndexMap = new Map(); + var _iteratorNormalCompletion = true; + var _didIteratorError = false; + var _iteratorError = undefined; + + try { + for (var _iterator = meshes[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) { + var mesh = _step.value; + + var _geometry = cloneBufferGeometry(mesh.geometry, ['position', 'normal', 'uv']); + + var index = _geometry.getIndex(); + + if (!index) { + addFlatGeometryIndices(_geometry); + } + + _geometry.applyMatrix(mesh.matrixWorld); + + if (!_geometry.getAttribute('normal')) { + _geometry.computeVertexNormals(); + } else { + _geometry.normalizeNormals(); + } + + vertexCount += _geometry.getAttribute('position').count; + indexCount += _geometry.getIndex().count; + var material = mesh.material; + var materialIndex = materialIndexMap.get(material); + + if (materialIndex === undefined) { + materialIndex = materialIndexMap.size; + materialIndexMap.set(material, materialIndex); + } + + geometryAndMaterialIndex.push({ + geometry: _geometry, + materialIndex: materialIndex + }); + } + } catch (err) { + _didIteratorError = true; + _iteratorError = err; + } finally { + try { + if (!_iteratorNormalCompletion && _iterator["return"] != null) { + _iterator["return"](); + } + } finally { + if (_didIteratorError) { + throw _iteratorError; + } + } + } + + var geometry = mergeGeometry(geometryAndMaterialIndex, vertexCount, indexCount); + return { + geometry: geometry, + materials: Array.from(materialIndexMap.keys()) + }; + } + + function mergeGeometry(geometryAndMaterialIndex, vertexCount, indexCount) { + var positionAttrib = new THREE$1.BufferAttribute(new Float32Array(3 * vertexCount), 3, false); + var normalAttrib = new THREE$1.BufferAttribute(new Float32Array(3 * vertexCount), 3, false); + var uvAttrib = new THREE$1.BufferAttribute(new Float32Array(2 * vertexCount), 2, false); + var materialMeshIndexAttrib = new THREE$1.BufferAttribute(new Int32Array(2 * vertexCount), 2, false); + var indexAttrib = new THREE$1.BufferAttribute(new Uint32Array(indexCount), 1, false); + var mergedGeometry = new THREE$1.BufferGeometry(); + mergedGeometry.addAttribute('position', positionAttrib); + mergedGeometry.addAttribute('normal', normalAttrib); + mergedGeometry.addAttribute('uv', uvAttrib); + mergedGeometry.addAttribute('materialMeshIndex', materialMeshIndexAttrib); + mergedGeometry.setIndex(indexAttrib); + var currentVertex = 0; + var currentIndex = 0; + var currentMesh = 1; + var _iteratorNormalCompletion2 = true; + var _didIteratorError2 = false; + var _iteratorError2 = undefined; + + try { + for (var _iterator2 = geometryAndMaterialIndex[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) { + var _step2$value = _step2.value, + geometry = _step2$value.geometry, + materialIndex = _step2$value.materialIndex; + var _vertexCount = geometry.getAttribute('position').count; + mergedGeometry.merge(geometry, currentVertex); + var meshIndex = geometry.getIndex(); + + for (var i = 0; i < meshIndex.count; i++) { + indexAttrib.setX(currentIndex + i, currentVertex + meshIndex.getX(i)); + } + + for (var _i = 0; _i < _vertexCount; _i++) { + materialMeshIndexAttrib.setXY(currentVertex + _i, materialIndex, currentMesh); + } + + currentVertex += _vertexCount; + currentIndex += meshIndex.count; + currentMesh++; + } + } catch (err) { + _didIteratorError2 = true; + _iteratorError2 = err; + } finally { + try { + if (!_iteratorNormalCompletion2 && _iterator2["return"] != null) { + _iterator2["return"](); + } + } finally { + if (_didIteratorError2) { + throw _iteratorError2; + } + } + } + + return mergedGeometry; + } // Similar to buffergeometry.clone(), except we only copy + // specific attributes instead of everything + + + function cloneBufferGeometry(bufferGeometry, attributes) { + var newGeometry = new THREE$1.BufferGeometry(); + var _iteratorNormalCompletion3 = true; + var _didIteratorError3 = false; + var _iteratorError3 = undefined; + + try { + for (var _iterator3 = attributes[Symbol.iterator](), _step3; !(_iteratorNormalCompletion3 = (_step3 = _iterator3.next()).done); _iteratorNormalCompletion3 = true) { + var name = _step3.value; + var attrib = bufferGeometry.getAttribute(name); + + if (attrib) { + newGeometry.addAttribute(name, attrib.clone()); + } + } + } catch (err) { + _didIteratorError3 = true; + _iteratorError3 = err; + } finally { + try { + if (!_iteratorNormalCompletion3 && _iterator3["return"] != null) { + _iterator3["return"](); + } + } finally { + if (_didIteratorError3) { + throw _iteratorError3; + } + } + } + + var index = bufferGeometry.getIndex(); + + if (index) { + newGeometry.setIndex(index); + } + + return newGeometry; + } + + function addFlatGeometryIndices(geometry) { + var position = geometry.getAttribute('position'); + + if (!position) { + console.warn('No position attribute'); + return; + } + + var index = new Uint32Array(position.count); + + for (var i = 0; i < index.length; i++) { + index[i] = i; + } + + geometry.setIndex(new THREE$1.BufferAttribute(index, 1, false)); + return geometry; + } + + // Reorders the elements in the range [first, last) in such a way that + // all elements for which the comparator c returns true + // precede the elements for which comparator c returns false. + function partition(array, compare) { + var left = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0; + var right = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : array.length; + + while (left !== right) { + while (compare(array[left])) { + left++; + + if (left === right) { + return left; + } + } + + do { + right--; + + if (left === right) { + return left; + } + } while (!compare(array[right])); + + swap(array, left, right); + left++; + } + + return left; + } // nth_element is a partial sorting algorithm that rearranges elements in [first, last) such that: + // The element pointed at by nth is changed to whatever element would occur in that position if [first, last) were sorted. + // All of the elements before this new nth element compare to true with elements after the nth element + + function nthElement(array, compare) { + var left = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0; + var right = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : array.length; + var k = arguments.length > 4 && arguments[4] !== undefined ? arguments[4] : Math.floor((left + right) / 2); + + for (var i = left; i <= k; i++) { + var minIndex = i; + var minValue = array[i]; + + for (var j = i + 1; j < right; j++) { + if (!compare(minValue, array[j])) { + minIndex = j; + minValue = array[j]; + swap(array, i, minIndex); + } + } + } + } + + function swap(array, a, b) { + var x = array[b]; + array[b] = array[a]; + array[a] = x; + } + + // Create a bounding volume hierarchy of scene geometry + var size = new THREE$1.Vector3(); + function bvhAccel(geometry) { + var primitiveInfo = makePrimitiveInfo(geometry); + var node = recursiveBuild(primitiveInfo, 0, primitiveInfo.length); + return node; + } + function flattenBvh(bvh) { + var flat = []; + var isBounds = []; + var splitAxisMap = { + x: 0, + y: 1, + z: 2 + }; + var maxDepth = 1; + + var traverse = function traverse(node) { + var depth = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 1; + maxDepth = Math.max(depth, maxDepth); + + if (node.primitives) { + for (var i = 0; i < node.primitives.length; i++) { + var p = node.primitives[i]; + flat.push(p.indices[0], p.indices[1], p.indices[2], node.primitives.length, p.faceNormal.x, p.faceNormal.y, p.faceNormal.z, p.materialIndex); + isBounds.push(false); + } + } else { + var bounds = node.bounds; + flat.push(bounds.min.x, bounds.min.y, bounds.min.z, splitAxisMap[node.splitAxis], bounds.max.x, bounds.max.y, bounds.max.z, null // pointer to second shild + ); + + var _i = flat.length - 1; + + isBounds.push(true); + traverse(node.child0, depth + 1); + flat[_i] = flat.length / 4; // pointer to second child + + traverse(node.child1, depth + 1); + } + }; + + traverse(bvh); + var buffer = new ArrayBuffer(4 * flat.length); + var floatView = new Float32Array(buffer); + var intView = new Int32Array(buffer); + + for (var i = 0; i < isBounds.length; i++) { + var k = 8 * i; + + if (isBounds[i]) { + floatView[k] = flat[k]; + floatView[k + 1] = flat[k + 1]; + floatView[k + 2] = flat[k + 2]; + intView[k + 3] = flat[k + 3]; + } else { + intView[k] = flat[k]; + intView[k + 1] = flat[k + 1]; + intView[k + 2] = flat[k + 2]; + intView[k + 3] = -flat[k + 3]; // negative signals to shader that this node is a triangle + } + + floatView[k + 4] = flat[k + 4]; + floatView[k + 5] = flat[k + 5]; + floatView[k + 6] = flat[k + 6]; + intView[k + 7] = flat[k + 7]; + } + + return { + maxDepth: maxDepth, + count: flat.length / 4, + buffer: floatView + }; + } + + function makePrimitiveInfo(geometry) { + var primitiveInfo = []; + var indices = geometry.getIndex().array; + var position = geometry.getAttribute('position'); + var materialMeshIndex = geometry.getAttribute('materialMeshIndex'); + var v0 = new THREE$1.Vector3(); + var v1 = new THREE$1.Vector3(); + var v2 = new THREE$1.Vector3(); + var e0 = new THREE$1.Vector3(); + var e1 = new THREE$1.Vector3(); + + for (var i = 0; i < indices.length; i += 3) { + var i0 = indices[i]; + var i1 = indices[i + 1]; + var i2 = indices[i + 2]; + var bounds = new THREE$1.Box3(); + v0.fromBufferAttribute(position, i0); + v1.fromBufferAttribute(position, i1); + v2.fromBufferAttribute(position, i2); + e0.subVectors(v2, v0); + e1.subVectors(v1, v0); + bounds.expandByPoint(v0); + bounds.expandByPoint(v1); + bounds.expandByPoint(v2); + var info = { + bounds: bounds, + center: bounds.getCenter(new THREE$1.Vector3()), + indices: [i0, i1, i2], + faceNormal: new THREE$1.Vector3().crossVectors(e1, e0).normalize(), + materialIndex: materialMeshIndex.getX(i0) + }; + primitiveInfo.push(info); + } + + return primitiveInfo; } - function findThreshold(softness) { - var step = Math.PI / 128; - var maxSteps = 2.0 * Math.PI / step; + function recursiveBuild(primitiveInfo, start, end) { + var bounds = new THREE$1.Box3(); - for (var i = 0; i < maxSteps; i++) { - var angle = i * step; - var falloff = getFalloffAtAngle(angle, softness); + for (var i = start; i < end; i++) { + bounds.union(primitiveInfo[i].bounds); + } - if (falloff <= 0.0001) { - return angle; + var nPrimitives = end - start; + + if (nPrimitives === 1) { + return makeLeafNode(primitiveInfo.slice(start, end), bounds); + } else { + var centroidBounds = new THREE$1.Box3(); + + for (var _i2 = start; _i2 < end; _i2++) { + centroidBounds.expandByPoint(primitiveInfo[_i2].center); } - } - } - function getIntensityFromAngleDifferentialThresholded(originCoords, currentCoords, softness, threshold) { - var deltaPhi = getAngleDelta(originCoords.phi, currentCoords.phi); - var deltaTheta = getAngleDelta(originCoords.theta, currentCoords.theta); + var dim = maximumExtent(centroidBounds); + var mid = Math.floor((start + end) / 2); // middle split method + // const dimMid = (centroidBounds.max[dim] + centroidBounds.min[dim]) / 2; + // mid = partition(primitiveInfo, p => p.center[dim] < dimMid, start, end); + // if (mid === start || mid === end) { + // mid = Math.floor((start + end) / 2); + // nthElement(primitiveInfo, (a, b) => a.center[dim] < b.center[dim], start, end, mid); + // } + // surface area heuristic method - if (deltaTheta > threshold && deltaPhi > threshold) { - return 0; - } + if (nPrimitives <= 4) { + nthElement(primitiveInfo, function (a, b) { + return a.center[dim] < b.center[dim]; + }, start, end, mid); + } else { + var buckets = []; - var angle = angleBetweenSphericals(originCoords, currentCoords); - return getFalloffAtAngle(angle, softness); - } + for (var _i3 = 0; _i3 < 12; _i3++) { + buckets.push({ + bounds: new THREE$1.Box3(), + count: 0 + }); + } - function getIntensityFromAngleDifferential(originCoords, currentCoords, softness) { - var angle = angleBetweenSphericals(originCoords, currentCoords); - return getFalloffAtAngle(angle, softness); - } + for (var _i4 = start; _i4 < end; _i4++) { + var b = Math.floor(buckets.length * boxOffset(centroidBounds, dim, primitiveInfo[_i4].center)); - function getAngleDelta(angleA, angleB) { - var diff = Math.abs(angleA - angleB) % (2 * Math.PI); - return diff > Math.PI ? 2 * Math.PI - diff : diff; - } + if (b === buckets.length) { + b = buckets.length - 1; + } - var angleBetweenSphericals = function () { - var originVector = new THREE$1.Vector3(); - var currentVector = new THREE$1.Vector3(); - return function (originCoords, currentCoords) { - originVector.setFromSpherical(originCoords); - currentVector.setFromSpherical(currentCoords); - return originVector.angleTo(currentVector); - }; - }(); // TODO: possibly clean this up and optimize it - // - // This function was arrived at through experimentation, it provides good - // looking results with percieved softness that scale relatively linearly with - // the softness value in the 0 - 1 range - // - // For now it doesn't incur too much of a performance penalty because for most of our use cases (lights without too much softness) - // the threshold cutoff in getIntensityFromAngleDifferential stops us from running it too many times + buckets[b].count++; + buckets[b].bounds.union(primitiveInfo[_i4].bounds); + } + var cost = []; - function getFalloffAtAngle(angle, softness) { - var softnessCoefficient = Math.pow(2, 14.5 * Math.max(0.001, 1.0 - clamp(softness, 0.0, 1.0))); - var falloff = Math.pow(softnessCoefficient, 1.1) * Math.pow(8, -softnessCoefficient * Math.pow(angle, 1.8)); - return falloff; - } + for (var _i5 = 0; _i5 < buckets.length - 1; _i5++) { + var b0 = new THREE$1.Box3(); + var b1 = new THREE$1.Box3(); + var count0 = 0; + var count1 = 0; - function equirectangularToSpherical(x, y, width, height, target) { - target.phi = Math.PI * y / height; - target.theta = 2.0 * Math.PI * x / width; - return target; - } + for (var j = 0; j <= _i5; j++) { + b0.union(buckets[j].bounds); + count0 += buckets[j].count; + } - // Create a piecewise 2D cumulative distribution function of light intensity from an envmap - // http://www.pbr-book.org/3ed-2018/Monte_Carlo_Integration/2D_Sampling_with_Multidimensional_Transformations.html#Piecewise-Constant2DDistributions - function envmapDistribution(image) { - var data = image.data; - var cdfImage = { - width: image.width + 2, - height: image.height + 1 - }; - var cdf = makeTextureArray(cdfImage.width, cdfImage.height, 2); + for (var _j = _i5 + 1; _j < buckets.length; _j++) { + b1.union(buckets[_j].bounds); + count1 += buckets[_j].count; + } - for (var y = 0; y < image.height; y++) { - var sinTheta = Math.sin(Math.PI * (y + 0.5) / image.height); + cost.push(0.1 + (count0 * surfaceArea(b0) + count1 * surfaceArea(b1)) / surfaceArea(bounds)); + } - for (var x = 0; x < image.width; x++) { - var i = 3 * (y * image.width + x); - var r = data[i]; - var g = data[i + 1]; - var b = data[i + 2]; - var luminance = 0.2126 * r + 0.7152 * g + 0.0722 * b; - luminance *= sinTheta; - cdf.set(x + 2, y, 0, cdf.get(x + 1, y, 0) + luminance / image.width); - cdf.set(x + 1, y, 1, luminance); - } + var minCost = cost[0]; + var minCostSplitBucket = 0; - var rowIntegral = cdf.get(cdfImage.width - 1, y, 0); + for (var _i6 = 1; _i6 < cost.length; _i6++) { + if (cost[_i6] < minCost) { + minCost = cost[_i6]; + minCostSplitBucket = _i6; + } + } - for (var _x = 1; _x < cdf.width; _x++) { - cdf.set(_x, y, 0, cdf.get(_x, y, 0) / rowIntegral); - cdf.set(_x, y, 1, cdf.get(_x, y, 1) / rowIntegral); - } + mid = partition(primitiveInfo, function (p) { + var b = Math.floor(buckets.length * boxOffset(centroidBounds, dim, p.center)); - cdf.set(0, y + 1, 0, cdf.get(0, y, 0) + rowIntegral / image.height); - cdf.set(0, y, 1, rowIntegral); - } + if (b === buckets.length) { + b = buckets.length - 1; + } - var integral = cdf.get(0, cdf.height - 1, 0); + return b <= minCostSplitBucket; + }, start, end); + } - for (var _y = 0; _y < cdf.height; _y++) { - cdf.set(0, _y, 0, cdf.get(0, _y, 0) / integral); - cdf.set(0, _y, 1, cdf.get(0, _y, 1) / integral); + return makeInteriorNode(dim, recursiveBuild(primitiveInfo, start, mid), recursiveBuild(primitiveInfo, mid, end)); } + } - cdfImage.data = cdf.array; - return cdfImage; + function makeLeafNode(primitives, bounds) { + return { + primitives: primitives, + bounds: bounds + }; } - function makeTextureArray(width, height, channels) { - var array = new Float32Array(channels * width * height); + function makeInteriorNode(splitAxis, child0, child1) { return { - set: function set(x, y, channel, val) { - array[channels * (y * width + x) + channel] = val; - }, - get: function get(x, y, channel) { - return array[channels * (y * width + x) + channel]; - }, - width: width, - height: height, - channels: channels, - array: array + child0: child0, + child1: child1, + bounds: new THREE$1.Box3().union(child0.bounds).union(child1.bounds), + splitAxis: splitAxis }; } - function unrollLoop(indexName, start, limit, step, code) { - var unrolled = "int ".concat(indexName, ";\n"); + function maximumExtent(box3) { + box3.getSize(size); - for (var i = start; step > 0 && i < limit || step < 0 && i > limit; i += step) { - unrolled += "".concat(indexName, " = ").concat(i, ";\n"); - unrolled += code; + if (size.x > size.z) { + return size.x > size.y ? 'x' : 'y'; + } else { + return size.z > size.y ? 'z' : 'y'; } - - return unrolled; } - var core = "\n #define PI 3.14159265359\n #define TWOPI 6.28318530718\n #define INVPI 0.31830988618\n #define INVPI2 0.10132118364\n #define EPS 0.0005\n #define INF 1.0e999\n #define RAY_MAX_DISTANCE 9999.0\n\n #define STANDARD 0\n #define THIN_GLASS 1\n #define THICK_GLASS 2\n #define SHADOW_CATCHER 3\n\n #define SAMPLES_PER_MATERIAL 8\n\n const float IOR = 1.5;\n const float INV_IOR = 1.0 / IOR;\n\n const float IOR_THIN = 1.015;\n const float INV_IOR_THIN = 1.0 / IOR_THIN;\n\n const float R0 = (1.0 - IOR) * (1.0 - IOR) / ((1.0 + IOR) * (1.0 + IOR));\n\n // https://www.w3.org/WAI/GL/wiki/Relative_luminance\n const vec3 luminance = vec3(0.2126, 0.7152, 0.0722);\n\n struct Ray {\n vec3 o;\n vec3 d;\n vec3 invD;\n float tMax;\n };\n\n struct SurfaceInteraction {\n bool hit;\n vec3 position;\n vec3 normal; // smoothed normal from the three triangle vertices\n vec3 faceNormal; // normal of the triangle\n vec3 color;\n float roughness;\n float metalness;\n int materialType;\n int meshId;\n };\n\n struct Camera {\n mat4 transform;\n float aspect;\n float fov;\n float focus;\n float aperture;\n };\n\n void initRay(inout Ray ray, vec3 origin, vec3 direction) {\n ray.o = origin;\n ray.d = direction;\n ray.invD = 1.0 / ray.d;\n ray.tMax = RAY_MAX_DISTANCE;\n }\n\n // given the index from a 1D array, retrieve corresponding position from packed 2D texture\n ivec2 unpackTexel(int i, int columnsLog2) {\n ivec2 u;\n u.y = i >> columnsLog2; // equivalent to (i / 2^columnsLog2)\n u.x = i - (u.y << columnsLog2); // equivalent to (i % 2^columnsLog2)\n return u;\n }\n\n vec4 fetchData(sampler2D s, int i, int columnsLog2) {\n return texelFetch(s, unpackTexel(i, columnsLog2), 0);\n }\n\n ivec4 fetchData(isampler2D s, int i, int columnsLog2) {\n return texelFetch(s, unpackTexel(i, columnsLog2), 0);\n }\n\n struct Path {\n Ray ray;\n vec3 li;\n vec3 albedo;\n float alpha;\n vec3 beta;\n bool specularBounce;\n bool abort;\n };\n\n uniform Camera camera;\n uniform vec2 pixelSize; // 1 / screenResolution\n uniform vec2 jitter;\n\n in vec2 vCoord;\n"; - - // Manually performs linear filtering if the extension OES_texture_float_linear is not supported - var textureLinear = "\nvec4 textureLinear(sampler2D map, vec2 uv) {\n #ifdef OES_texture_float_linear\n return texture(map, uv);\n #else\n vec2 size = vec2(textureSize(map, 0));\n vec2 texelSize = 1.0 / size;\n\n uv = uv * size - 0.5;\n vec2 f = fract(uv);\n uv = floor(uv) + 0.5;\n\n vec4 s1 = texture(map, (uv + vec2(0, 0)) * texelSize);\n vec4 s2 = texture(map, (uv + vec2(1, 0)) * texelSize);\n vec4 s3 = texture(map, (uv + vec2(0, 1)) * texelSize);\n vec4 s4 = texture(map, (uv + vec2(1, 1)) * texelSize);\n\n return mix(mix(s1, s2, f.x), mix(s3, s4, f.x), f.y);\n #endif\n}\n"; - - var intersect = "\n\nuniform highp isampler2D indices;\nuniform sampler2D positions;\nuniform sampler2D normals;\nuniform sampler2D uvs;\nuniform sampler2D bvh;\n\nuniform Materials {\n vec4 colorAndMaterialType[NUM_MATERIALS];\n vec4 roughnessMetalnessNormalScale[NUM_MATERIALS];\n\n #if defined(NUM_DIFFUSE_MAPS) || defined(NUM_NORMAL_MAPS) || defined(NUM_PBR_MAPS)\n ivec4 diffuseNormalRoughnessMetalnessMapIndex[NUM_MATERIALS];\n #endif\n\n #if defined(NUM_DIFFUSE_MAPS) || defined(NUM_NORMAL_MAPS)\n vec4 diffuseNormalMapSize[NUM_DIFFUSE_NORMAL_MAPS];\n #endif\n\n #if defined(NUM_PBR_MAPS)\n vec2 pbrMapSize[NUM_PBR_MAPS];\n #endif\n} materials;\n\n#ifdef NUM_DIFFUSE_MAPS\n uniform mediump sampler2DArray diffuseMap;\n#endif\n\n#ifdef NUM_NORMAL_MAPS\n uniform mediump sampler2DArray normalMap;\n#endif\n\n#ifdef NUM_PBR_MAPS\n uniform mediump sampler2DArray pbrMap;\n#endif\n\nstruct Triangle {\n vec3 p0;\n vec3 p1;\n vec3 p2;\n};\n\nvoid surfaceInteractionFromIntersection(inout SurfaceInteraction si, Triangle tri, vec3 barycentric, ivec3 index, vec3 faceNormal, int materialIndex) {\n si.hit = true;\n si.faceNormal = faceNormal;\n si.position = barycentric.x * tri.p0 + barycentric.y * tri.p1 + barycentric.z * tri.p2;\n ivec2 i0 = unpackTexel(index.x, VERTEX_COLUMNS);\n ivec2 i1 = unpackTexel(index.y, VERTEX_COLUMNS);\n ivec2 i2 = unpackTexel(index.z, VERTEX_COLUMNS);\n\n vec3 n0 = texelFetch(normals, i0, 0).xyz;\n vec3 n1 = texelFetch(normals, i1, 0).xyz;\n vec3 n2 = texelFetch(normals, i2, 0).xyz;\n si.normal = normalize(barycentric.x * n0 + barycentric.y * n1 + barycentric.z * n2);\n\n si.color = materials.colorAndMaterialType[materialIndex].xyz;\n si.roughness = materials.roughnessMetalnessNormalScale[materialIndex].x;\n si.metalness = materials.roughnessMetalnessNormalScale[materialIndex].y;\n\n si.materialType = int(materials.colorAndMaterialType[materialIndex].w);\n\n // TODO: meshId should be the actual mesh id instead of the material id, which can be shared amoung meshes.\n // This will involve storing the mesh id AND the material id in the BVH texture\n si.meshId = materialIndex + 1; // +1 so that the mesh id is never 0\n\n #if defined(NUM_DIFFUSE_MAPS) || defined(NUM_NORMAL_MAPS) || defined(NUM_PBR_MAPS)\n vec2 uv0 = texelFetch(uvs, i0, 0).xy;\n vec2 uv1 = texelFetch(uvs, i1, 0).xy;\n vec2 uv2 = texelFetch(uvs, i2, 0).xy;\n vec2 uv = fract(barycentric.x * uv0 + barycentric.y * uv1 + barycentric.z * uv2);\n #endif\n\n #ifdef NUM_DIFFUSE_MAPS\n int diffuseMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].x;\n if (diffuseMapIndex >= 0) {\n si.color *= texture(diffuseMap, vec3(uv * materials.diffuseNormalMapSize[diffuseMapIndex].xy, diffuseMapIndex)).rgb;\n }\n #endif\n\n #ifdef NUM_NORMAL_MAPS\n int normalMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].y;\n if (normalMapIndex >= 0) {\n vec2 duv02 = uv0 - uv2;\n vec2 duv12 = uv1 - uv2;\n vec3 dp02 = tri.p0 - tri.p2;\n vec3 dp12 = tri.p1 - tri.p2;\n\n // Method One\n // http://www.pbr-book.org/3ed-2018/Shapes/Triangle_Meshes.html#fragment-Computetrianglepartialderivatives-0\n // Compute tangent vectors relative to the face normal. These vectors won't necessarily be orthogonal to the smoothed normal\n // This means the TBN matrix won't be orthogonal which is technically incorrect.\n // This is Three.js's method (https://github.com/mrdoob/three.js/blob/dev/src/renderers/shaders/ShaderChunk/normalmap_pars_fragment.glsl.js)\n // --------------\n // float scale = sign(duv02.x * duv12.y - duv02.y * duv12.x);\n // vec3 dpdu = normalize((duv12.y * dp02 - duv02.y * dp12) * scale);\n // vec3 dpdv = normalize((-duv12.x * dp02 + duv02.x * dp12) * scale);\n\n // Method Two\n // Compute tangent vectors as in Method One but apply Gram-Schmidt process to make vectors orthogonal to smooth normal\n // This might inadvertently flip coordinate space orientation\n // --------------\n // float scale = sign(duv02.x * duv12.y - duv02.y * duv12.x);\n // vec3 dpdu = normalize((duv12.y * dp02 - duv02.y * dp12) * scale);\n // dpdu = (dpdu - dot(dpdu, si.normal) * si.normal); // Gram-Schmidt process\n // vec3 dpdv = cross(si.normal, dpdu) * scale;\n\n // Method Three\n // http://www.thetenthplanet.de/archives/1180\n // Compute co-tangent and co-bitangent vectors\n // These vectors are orthongal and maintain a consistent coordinate space\n // --------------\n vec3 dp12perp = cross(dp12, si.normal);\n vec3 dp02perp = cross(si.normal, dp02);\n vec3 dpdu = dp12perp * duv02.x + dp02perp * duv12.x;\n vec3 dpdv = dp12perp * duv02.y + dp02perp * duv12.y;\n float invmax = inversesqrt(max(dot(dpdu, dpdu), dot(dpdv, dpdv)));\n dpdu *= invmax;\n dpdv *= invmax;\n\n vec3 n = 2.0 * texture(normalMap, vec3(uv * materials.diffuseNormalMapSize[normalMapIndex].zw, normalMapIndex)).rgb - 1.0;\n n.xy *= materials.roughnessMetalnessNormalScale[materialIndex].zw;\n\n mat3 tbn = mat3(dpdu, dpdv, si.normal);\n\n si.normal = normalize(tbn * n);\n }\n #endif\n\n #ifdef NUM_PBR_MAPS\n int roughnessMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].z;\n int metalnessMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].w;\n if (roughnessMapIndex >= 0) {\n si.roughness *= texture(pbrMap, vec3(uv * materials.pbrMapSize[roughnessMapIndex].xy, roughnessMapIndex)).g;\n }\n if (metalnessMapIndex >= 0) {\n si.metalness *= texture(pbrMap, vec3(uv * materials.pbrMapSize[metalnessMapIndex].xy, metalnessMapIndex)).b;\n }\n #endif\n}\n\nstruct TriangleIntersect {\n float t;\n vec3 barycentric;\n};\n\n// Triangle-ray intersection\n// Faster than the classic M\xF6ller\u2013Trumbore intersection algorithm\n// http://www.pbr-book.org/3ed-2018/Shapes/Triangle_Meshes.html#TriangleIntersection\nTriangleIntersect intersectTriangle(Ray r, Triangle tri, int maxDim, vec3 shear) {\n TriangleIntersect ti;\n vec3 d = r.d;\n\n // translate vertices based on ray origin\n vec3 p0t = tri.p0 - r.o;\n vec3 p1t = tri.p1 - r.o;\n vec3 p2t = tri.p2 - r.o;\n\n // permute components of triangle vertices\n if (maxDim == 0) {\n p0t = p0t.yzx;\n p1t = p1t.yzx;\n p2t = p2t.yzx;\n } else if (maxDim == 1) {\n p0t = p0t.zxy;\n p1t = p1t.zxy;\n p2t = p2t.zxy;\n }\n\n // apply shear transformation to translated vertex positions\n p0t.xy += shear.xy * p0t.z;\n p1t.xy += shear.xy * p1t.z;\n p2t.xy += shear.xy * p2t.z;\n\n // compute edge function coefficients\n vec3 e = vec3(\n p1t.x * p2t.y - p1t.y * p2t.x,\n p2t.x * p0t.y - p2t.y * p0t.x,\n p0t.x * p1t.y - p0t.y * p1t.x\n );\n\n // check if intersection is inside triangle\n if (any(lessThan(e, vec3(0))) && any(greaterThan(e, vec3(0)))) {\n return ti;\n }\n\n float det = e.x + e.y + e.z;\n\n // not needed?\n // if (det == 0.) {\n // return ti;\n // }\n\n p0t.z *= shear.z;\n p1t.z *= shear.z;\n p2t.z *= shear.z;\n float tScaled = (e.x * p0t.z + e.y * p1t.z + e.z * p2t.z);\n\n // not needed?\n // if (sign(det) != sign(tScaled)) {\n // return ti;\n // }\n\n // check if closer intersection already exists\n if (abs(tScaled) > abs(r.tMax * det)) {\n return ti;\n }\n\n float invDet = 1. / det;\n ti.t = tScaled * invDet;\n ti.barycentric = e * invDet;\n\n return ti;\n}\n\nstruct Box {\n vec3 min;\n vec3 max;\n};\n\n// Branchless ray/box intersection\n// https://tavianator.com/fast-branchless-raybounding-box-intersections/\nfloat intersectBox(Ray r, Box b) {\n vec3 tBot = (b.min - r.o) * r.invD;\n vec3 tTop = (b.max - r.o) * r.invD;\n vec3 tNear = min(tBot, tTop);\n vec3 tFar = max(tBot, tTop);\n float t0 = max(tNear.x, max(tNear.y, tNear.z));\n float t1 = min(tFar.x, min(tFar.y, tFar.z));\n\n return (t0 > t1 || t0 > r.tMax) ? -1.0 : (t0 > 0.0 ? t0 : t1);\n}\n\nint maxDimension(vec3 v) {\n return v.x > v.y ? (v.x > v.z ? 0 : 2) : (v.y > v.z ? 1 : 2);\n}\n\n// Traverse BVH, find closest triangle intersection, and return surface information\nSurfaceInteraction intersectScene(inout Ray ray) {\n SurfaceInteraction si;\n\n int maxDim = maxDimension(abs(ray.d));\n\n // Permute space so that the z dimension is the one where the absolute value of the ray's direction is largest.\n // Then create a shear transformation that aligns ray direction with the +z axis\n vec3 shear;\n if (maxDim == 0) {\n shear = vec3(-ray.d.y, -ray.d.z, 1.0) * ray.invD.x;\n } else if (maxDim == 1) {\n shear = vec3(-ray.d.z, -ray.d.x, 1.0) * ray.invD.y;\n } else {\n shear = vec3(-ray.d.x, -ray.d.y, 1.0) * ray.invD.z;\n }\n\n int nodesToVisit[STACK_SIZE];\n int stack = 0;\n\n nodesToVisit[0] = 0;\n\n while(stack >= 0) {\n int i = nodesToVisit[stack--];\n\n vec4 r1 = fetchData(bvh, i, BVH_COLUMNS);\n vec4 r2 = fetchData(bvh, i + 1, BVH_COLUMNS);\n\n int splitAxisOrNumPrimitives = floatBitsToInt(r1.w);\n\n if (splitAxisOrNumPrimitives >= 0) {\n // Intersection is a bounding box. Test for box intersection and keep traversing BVH\n int splitAxis = splitAxisOrNumPrimitives;\n\n Box bbox = Box(r1.xyz, r2.xyz);\n\n if (intersectBox(ray, bbox) > 0.0) {\n // traverse near node to ray first, and far node to ray last\n if (ray.d[splitAxis] > 0.0) {\n nodesToVisit[++stack] = floatBitsToInt(r2.w);\n nodesToVisit[++stack] = i + 2;\n } else {\n nodesToVisit[++stack] = i + 2;\n nodesToVisit[++stack] = floatBitsToInt(r2.w);\n }\n }\n } else {\n ivec3 index = floatBitsToInt(r1.xyz);\n Triangle tri = Triangle(\n fetchData(positions, index.x, VERTEX_COLUMNS).xyz,\n fetchData(positions, index.y, VERTEX_COLUMNS).xyz,\n fetchData(positions, index.z, VERTEX_COLUMNS).xyz\n );\n TriangleIntersect hit = intersectTriangle(ray, tri, maxDim, shear);\n\n if (hit.t > 0.0) {\n ray.tMax = hit.t;\n int materialIndex = floatBitsToInt(r2.w);\n vec3 faceNormal = r2.xyz;\n surfaceInteractionFromIntersection(si, tri, hit.barycentric, index, faceNormal, materialIndex);\n }\n }\n }\n\n // Values must be clamped outside of intersection loop. Clamping inside the loop produces incorrect numbers on some devices.\n si.roughness = clamp(si.roughness, 0.03, 1.0);\n si.metalness = clamp(si.metalness, 0.0, 1.0);\n\n return si;\n}\n\nbool intersectSceneShadow(inout Ray ray) {\n int maxDim = maxDimension(abs(ray.d));\n\n // Permute space so that the z dimension is the one where the absolute value of the ray's direction is largest.\n // Then create a shear transformation that aligns ray direction with the +z axis\n vec3 shear;\n if (maxDim == 0) {\n shear = vec3(-ray.d.y, -ray.d.z, 1.0) * ray.invD.x;\n } else if (maxDim == 1) {\n shear = vec3(-ray.d.z, -ray.d.x, 1.0) * ray.invD.y;\n } else {\n shear = vec3(-ray.d.x, -ray.d.y, 1.0) * ray.invD.z;\n }\n\n int nodesToVisit[STACK_SIZE];\n int stack = 0;\n\n nodesToVisit[0] = 0;\n\n while(stack >= 0) {\n int i = nodesToVisit[stack--];\n\n vec4 r1 = fetchData(bvh, i, BVH_COLUMNS);\n vec4 r2 = fetchData(bvh, i + 1, BVH_COLUMNS);\n\n int splitAxisOrNumPrimitives = floatBitsToInt(r1.w);\n\n if (splitAxisOrNumPrimitives >= 0) {\n int splitAxis = splitAxisOrNumPrimitives;\n\n Box bbox = Box(r1.xyz, r2.xyz);\n\n if (intersectBox(ray, bbox) > 0.0) {\n if (ray.d[splitAxis] > 0.0) {\n nodesToVisit[++stack] = floatBitsToInt(r2.w);\n nodesToVisit[++stack] = i + 2;\n } else {\n nodesToVisit[++stack] = i + 2;\n nodesToVisit[++stack] = floatBitsToInt(r2.w);\n }\n }\n } else {\n ivec3 index = floatBitsToInt(r1.xyz);\n Triangle tri = Triangle(\n fetchData(positions, index.x, VERTEX_COLUMNS).xyz,\n fetchData(positions, index.y, VERTEX_COLUMNS).xyz,\n fetchData(positions, index.z, VERTEX_COLUMNS).xyz\n );\n\n if (intersectTriangle(ray, tri, maxDim, shear).t > 0.0) {\n return true;\n }\n }\n }\n\n return false;\n}\n\n"; - - var random = "\n\n// Noise texture used to generate a different random number for each pixel.\n// We use blue noise in particular, but any type of noise will work.\nuniform sampler2D noise;\n\nuniform float stratifiedSamples[SAMPLING_DIMENSIONS];\nuniform float strataSize;\n\n// Every time we call randomSample() in the shader, and for every call to render,\n// we want that specific bit of the shader to fetch a sample from the same position in stratifiedSamples\n// This allows us to use stratified sampling for each random variable in our path tracing\nint sampleIndex = 0;\n\nconst highp float maxUint = 1.0 / 4294967295.0;\n\nfloat pixelSeed;\n\nvoid initRandom() {\n vec2 noiseSize = vec2(textureSize(noise, 0));\n\n // tile the small noise texture across the entire screen\n pixelSeed = texture(noise, vCoord / (pixelSize * noiseSize)).r;\n}\n\nfloat randomSample() {\n float stratifiedSample = stratifiedSamples[sampleIndex++];\n\n float random = fract((stratifiedSample + pixelSeed) * strataSize); // blue noise + stratified samples\n\n // transform random number between [0, 1] to (0, 1)\n return EPS + (1.0 - 2.0 * EPS) * random;\n}\n\nvec2 randomSampleVec2() {\n return vec2(randomSample(), randomSample());\n}\n"; - - // Sample the environment map using a cumulative distribution function as described in - // http://www.pbr-book.org/3ed-2018/Light_Transport_I_Surface_Reflection/Sampling_Light_Sources.html#InfiniteAreaLights - var envmap = "\n\nuniform sampler2D envmap;\nuniform sampler2D envmapDistribution;\nuniform sampler2D backgroundMap;\n\nvec2 cartesianToEquirect(vec3 pointOnSphere) {\n float phi = mod(atan(-pointOnSphere.z, -pointOnSphere.x), TWOPI);\n float theta = acos(pointOnSphere.y);\n return vec2(phi * 0.5 * INVPI, theta * INVPI);\n}\n\nfloat getEnvmapV(float u, out int vOffset, out float pdf) {\n ivec2 size = textureSize(envmap, 0);\n\n int left = 0;\n int right = size.y + 1; // cdf length is the length of the envmap + 1\n while (left < right) {\n int mid = (left + right) >> 1;\n float s = texelFetch(envmapDistribution, ivec2(0, mid), 0).x;\n if (s <= u) {\n left = mid + 1;\n } else {\n right = mid;\n }\n }\n vOffset = left - 1;\n\n // x channel is cumulative distribution of envmap luminance\n // y channel is partial probability density of envmap luminance\n vec2 s0 = texelFetch(envmapDistribution, ivec2(0, vOffset), 0).xy;\n vec2 s1 = texelFetch(envmapDistribution, ivec2(0, vOffset + 1), 0).xy;\n\n pdf = s0.y;\n\n return (float(vOffset) + (u - s0.x) / (s1.x - s0.x)) / float(size.y);\n}\n\nfloat getEnvmapU(float u, int vOffset, out float pdf) {\n ivec2 size = textureSize(envmap, 0);\n\n int left = 0;\n int right = size.x + 1; // cdf length is the length of the envmap + 1\n while (left < right) {\n int mid = (left + right) >> 1;\n float s = texelFetch(envmapDistribution, ivec2(1 + mid, vOffset), 0).x;\n if (s <= u) {\n left = mid + 1;\n } else {\n right = mid;\n }\n }\n int uOffset = left - 1;\n\n // x channel is cumulative distribution of envmap luminance\n // y channel is partial probability density of envmap luminance\n vec2 s0 = texelFetch(envmapDistribution, ivec2(1 + uOffset, vOffset), 0).xy;\n vec2 s1 = texelFetch(envmapDistribution, ivec2(1 + uOffset + 1, vOffset), 0).xy;\n\n pdf = s0.y;\n\n return (float(uOffset) + (u - s0.x) / (s1.x - s0.x)) / float(size.x);\n}\n\n// Perform two binary searches to find light direction.\nvec3 sampleEnvmap(vec2 random, out vec2 uv, out float pdf) {\n vec2 partialPdf;\n int vOffset;\n\n uv.y = getEnvmapV(random.x, vOffset, partialPdf.y);\n uv.x = getEnvmapU(random.y, vOffset, partialPdf.x);\n\n float phi = uv.x * TWOPI;\n float theta = uv.y * PI;\n float cosTheta = cos(theta);\n float sinTheta = sin(theta);\n float cosPhi = cos(phi);\n float sinPhi = sin(phi);\n\n vec3 dir = vec3(-sinTheta * cosPhi, cosTheta, -sinTheta * sinPhi);\n\n pdf = partialPdf.x * partialPdf.y * INVPI2 / (2.0 * sinTheta);\n\n return dir;\n}\n\nfloat envmapPdf(vec2 uv) {\n vec2 size = vec2(textureSize(envmap, 0));\n\n float sinTheta = sin(uv.y * PI);\n\n uv *= size;\n\n float partialX = texelFetch(envmapDistribution, ivec2(1.0 + uv.x, uv.y), 0).y;\n float partialY = texelFetch(envmapDistribution, ivec2(0, uv.y), 0).y;\n\n return partialX * partialY * INVPI2 / (2.0 * sinTheta);\n}\n\nvec3 sampleEnvmapFromDirection(vec3 d) {\n vec2 uv = cartesianToEquirect(d);\n return textureLinear(envmap, uv).rgb;\n}\n\nvec3 sampleBackgroundFromDirection(vec3 d) {\n vec2 uv = cartesianToEquirect(d);\n return textureLinear(backgroundMap, uv).rgb;\n}\n\n"; - - var bsdf = "\n\n// Computes the exact value of the Fresnel factor\n// https://seblagarde.wordpress.com/2013/04/29/memo-on-fresnel-equations/\nfloat fresnel(float cosTheta, float eta, float invEta) {\n eta = cosTheta > 0.0 ? eta : invEta;\n cosTheta = abs(cosTheta);\n\n float gSquared = eta * eta + cosTheta * cosTheta - 1.0;\n\n if (gSquared < 0.0) {\n return 1.0;\n }\n\n float g = sqrt(gSquared);\n\n float a = (g - cosTheta) / (g + cosTheta);\n float b = (cosTheta * (g + cosTheta) - 1.0) / (cosTheta * (g - cosTheta) + 1.0);\n\n return 0.5 * a * a * (1.0 + b * b);\n}\n\nfloat fresnelSchlickWeight(float cosTheta) {\n float w = 1.0 - cosTheta;\n return (w * w) * (w * w) * w;\n}\n\n// Computes Schlick's approximation of the Fresnel factor\n// Assumes ray is moving from a less dense to a more dense medium\nfloat fresnelSchlick(float cosTheta, float r0) {\n return mix(fresnelSchlickWeight(cosTheta), 1.0, r0);\n}\n\n// Computes Schlick's approximation of Fresnel factor\n// Accounts for total internal reflection if ray is moving from a more dense to a less dense medium\nfloat fresnelSchlickTIR(float cosTheta, float r0, float ni) {\n\n // moving from a more dense to a less dense medium\n if (cosTheta < 0.0) {\n float inv_eta = ni;\n float SinT2 = inv_eta * inv_eta * (1.0f - cosTheta * cosTheta);\n if (SinT2 > 1.0) {\n return 1.0; // total internal reflection\n }\n cosTheta = sqrt(1.0f - SinT2);\n }\n\n return mix(fresnelSchlickWeight(cosTheta), 1.0, r0);\n}\n\nfloat trowbridgeReitzD(float cosTheta, float alpha2) {\n float e = cosTheta * cosTheta * (alpha2 - 1.0) + 1.0;\n return alpha2 / (PI * e * e);\n}\n\nfloat trowbridgeReitzLambda(float cosTheta, float alpha2) {\n float cos2Theta = cosTheta * cosTheta;\n float tan2Theta = (1.0 - cos2Theta) / cos2Theta;\n return 0.5 * (-1.0 + sqrt(1.0 + alpha2 * tan2Theta));\n}\n\n// An implementation of Disney's principled BRDF\n// https://disney-animation.s3.amazonaws.com/library/s2012_pbs_disney_brdf_notes_v2.pdf\nvec3 materialBrdf(SurfaceInteraction si, vec3 viewDir, vec3 lightDir, float cosThetaL, float diffuseWeight, out float pdf) {\n vec3 halfVector = normalize(viewDir + lightDir);\n\n cosThetaL = abs(cosThetaL);\n float cosThetaV = abs(dot(si.normal, viewDir));\n float cosThetaH = abs(dot(si.normal, halfVector));\n float cosThetaD = abs(dot(lightDir, halfVector));\n\n float alpha2 = (si.roughness * si.roughness) * (si.roughness * si.roughness);\n\n float F = fresnelSchlick(cosThetaD, mix(R0, 0.6, si.metalness));\n float D = trowbridgeReitzD(cosThetaH, alpha2);\n\n float roughnessRemapped = 0.5 + 0.5 * si.roughness;\n float alpha2Remapped = (roughnessRemapped * roughnessRemapped) * (roughnessRemapped * roughnessRemapped);\n\n float G = 1.0 / (1.0 + trowbridgeReitzLambda(cosThetaV, alpha2Remapped) + trowbridgeReitzLambda(cosThetaL, alpha2Remapped));\n\n float specular = F * D * G / (4.0 * cosThetaV * cosThetaL);\n float specularPdf = D * cosThetaH / (4.0 * cosThetaD);\n\n float f = -0.5 + 2.0 * cosThetaD * cosThetaD * si.roughness;\n float diffuse = diffuseWeight * INVPI * (1.0 + f * fresnelSchlickWeight(cosThetaL)) * (1.0 + f * fresnelSchlickWeight(cosThetaV));\n float diffusePdf = cosThetaL * INVPI;\n\n pdf = mix(0.5 * (specularPdf + diffusePdf), specularPdf, si.metalness);\n\n return mix(si.color * diffuse + specular, si.color * specular, si.metalness);\n}\n\n"; - - var sample = "\n\n// https://graphics.pixar.com/library/OrthonormalB/paper.pdf\nmat3 orthonormalBasis(vec3 n) {\n float zsign = n.z >= 0.0 ? 1.0 : -1.0;\n float a = -1.0 / (zsign + n.z);\n float b = n.x * n.y * a;\n vec3 s = vec3(1.0 + zsign * n.x * n.x * a, zsign * b, -zsign * n.x);\n vec3 t = vec3(b, zsign + n.y * n.y * a, -n.y);\n return mat3(s, t, n);\n}\n\n// http://www.pbr-book.org/3ed-2018/Monte_Carlo_Integration/2D_Sampling_with_Multidimensional_Transformations.html#SamplingaUnitDisk\nvec2 sampleCircle(vec2 p) {\n p = 2.0 * p - 1.0;\n\n bool greater = abs(p.x) > abs(p.y);\n\n float r = greater ? p.x : p.y;\n float theta = greater ? 0.25 * PI * p.y / p.x : PI * (0.5 - 0.25 * p.x / p.y);\n\n return r * vec2(cos(theta), sin(theta));\n}\n\n// http://www.pbr-book.org/3ed-2018/Monte_Carlo_Integration/2D_Sampling_with_Multidimensional_Transformations.html#Cosine-WeightedHemisphereSampling\nvec3 cosineSampleHemisphere(vec2 p) {\n vec2 h = sampleCircle(p);\n float z = sqrt(max(0.0, 1.0 - h.x * h.x - h.y * h.y));\n return vec3(h, z);\n}\n\n\n// http://www.pbr-book.org/3ed-2018/Light_Transport_I_Surface_Reflection/Sampling_Reflection_Functions.html#MicrofacetBxDFs\n// Instead of Beckmann distrubtion, we use the GTR2 (GGX) distrubtion as covered in Disney's Principled BRDF paper\nvec3 lightDirSpecular(vec3 faceNormal, vec3 viewDir, mat3 basis, float roughness, vec2 random) {\n float phi = TWOPI * random.y;\n float alpha = roughness * roughness;\n float cosTheta = sqrt((1.0 - random.x) / (1.0 + (alpha * alpha - 1.0) * random.x));\n float sinTheta = sqrt(1.0 - cosTheta * cosTheta);\n\n vec3 halfVector = basis * sign(dot(faceNormal, viewDir)) * vec3(sinTheta * cos(phi), sinTheta * sin(phi), cosTheta);\n\n vec3 lightDir = reflect(-viewDir, halfVector);\n\n return lightDir;\n}\n\nvec3 lightDirDiffuse(vec3 faceNormal, vec3 viewDir, mat3 basis, vec2 random) {\n return basis * sign(dot(faceNormal, viewDir)) * cosineSampleHemisphere(random);\n}\n\nfloat powerHeuristic(float f, float g) {\n return (f * f) / (f * f + g * g);\n}\n\n"; - - // Estimate the direct lighting integral using multiple importance sampling - // http://www.pbr-book.org/3ed-2018/Light_Transport_I_Surface_Reflection/Direct_Lighting.html#EstimatingtheDirectLightingIntegral - var sampleMaterial = "\n\nvec3 importanceSampleLight(SurfaceInteraction si, vec3 viewDir, bool lastBounce, vec2 random) {\n vec3 li;\n\n float lightPdf;\n vec2 uv;\n vec3 lightDir = sampleEnvmap(random, uv, lightPdf);\n\n float cosThetaL = dot(si.normal, lightDir);\n\n float orientation = dot(si.faceNormal, viewDir) * cosThetaL;\n if (orientation < 0.0) {\n return li;\n }\n\n float diffuseWeight = 1.0;\n Ray ray;\n initRay(ray, si.position + EPS * lightDir, lightDir);\n if (intersectSceneShadow(ray)) {\n if (lastBounce) {\n diffuseWeight = 0.0;\n } else {\n return li;\n }\n }\n\n vec3 irr = textureLinear(envmap, uv).xyz;\n\n float scatteringPdf;\n vec3 brdf = materialBrdf(si, viewDir, lightDir, cosThetaL, diffuseWeight, scatteringPdf);\n\n float weight = powerHeuristic(lightPdf, scatteringPdf);\n\n li = brdf * irr * abs(cosThetaL) * weight / lightPdf;\n\n return li;\n}\n\nvec3 importanceSampleMaterial(SurfaceInteraction si, vec3 viewDir, bool lastBounce, vec3 lightDir) {\n vec3 li;\n\n float cosThetaL = dot(si.normal, lightDir);\n\n float orientation = dot(si.faceNormal, viewDir) * cosThetaL;\n if (orientation < 0.0) {\n return li;\n }\n\n float diffuseWeight = 1.0;\n Ray ray;\n initRay(ray, si.position + EPS * lightDir, lightDir);\n if (intersectSceneShadow(ray)) {\n if (lastBounce) {\n diffuseWeight = 0.0;\n } else {\n return li;\n }\n }\n\n vec2 uv = cartesianToEquirect(lightDir);\n\n float lightPdf = envmapPdf(uv);\n\n vec3 irr = textureLinear(envmap, uv).rgb;\n\n float scatteringPdf;\n vec3 brdf = materialBrdf(si, viewDir, lightDir, cosThetaL, diffuseWeight, scatteringPdf);\n\n float weight = powerHeuristic(scatteringPdf, lightPdf);\n\n li += brdf * irr * abs(cosThetaL) * weight / scatteringPdf;\n\n return li;\n}\n\nvoid sampleMaterial(SurfaceInteraction si, int bounce, inout Path path) {\n mat3 basis = orthonormalBasis(si.normal);\n vec3 viewDir = -path.ray.d;\n\n vec2 diffuseOrSpecular = randomSampleVec2();\n\n vec3 lightDir = diffuseOrSpecular.x < mix(0.5, 0.0, si.metalness) ?\n lightDirDiffuse(si.faceNormal, viewDir, basis, randomSampleVec2()) :\n lightDirSpecular(si.faceNormal, viewDir, basis, si.roughness, randomSampleVec2());\n\n bool lastBounce = bounce == BOUNCES;\n\n // Add path contribution\n path.li += path.beta * (\n importanceSampleLight(si, viewDir, lastBounce, randomSampleVec2()) +\n importanceSampleMaterial(si, viewDir, lastBounce, lightDir)\n );\n\n // Get new path direction\n\n lightDir = diffuseOrSpecular.y < mix(0.5, 0.0, si.metalness) ?\n lightDirDiffuse(si.faceNormal, viewDir, basis, randomSampleVec2()) :\n lightDirSpecular(si.faceNormal, viewDir, basis, si.roughness, randomSampleVec2());\n\n float cosThetaL = dot(si.normal, lightDir);\n\n float scatteringPdf;\n vec3 brdf = materialBrdf(si, viewDir, lightDir, cosThetaL, 1.0, scatteringPdf);\n\n path.beta *= abs(cosThetaL) * brdf / scatteringPdf;\n\n initRay(path.ray, si.position + EPS * lightDir, lightDir);\n\n // If new ray direction is pointing into the surface,\n // the light path is physically impossible and we terminate the path.\n float orientation = dot(si.faceNormal, viewDir) * cosThetaL;\n path.abort = orientation < 0.0;\n\n path.specularBounce = false;\n}\n\n"; - - var sampleShadowCatcher = "\n\n#ifdef USE_SHADOW_CATCHER\n\nfloat importanceSampleLightShadowCatcher(SurfaceInteraction si, vec3 viewDir, vec2 random, inout float alpha) {\n float li;\n\n float lightPdf;\n vec2 uv;\n vec3 lightDir = sampleEnvmap(random, uv, lightPdf);\n\n float cosThetaL = dot(si.normal, lightDir);\n\n float orientation = dot(si.faceNormal, viewDir) * cosThetaL;\n if (orientation < 0.0) {\n return li;\n }\n\n float occluded = 1.0;\n\n Ray ray;\n initRay(ray, si.position + EPS * lightDir, lightDir);\n if (intersectSceneShadow(ray)) {\n occluded = 0.0;\n }\n\n float irr = dot(luminance, textureLinear(envmap, uv).rgb);\n\n // lambertian BRDF\n float brdf = INVPI;\n float scatteringPdf = abs(cosThetaL) * INVPI;\n\n float weight = powerHeuristic(lightPdf, scatteringPdf);\n\n float lightEq = irr * brdf * abs(cosThetaL) * weight / lightPdf;\n\n alpha += lightEq;\n li += occluded * lightEq;\n\n return li;\n}\n\nfloat importanceSampleMaterialShadowCatcher(SurfaceInteraction si, vec3 viewDir, vec3 lightDir, inout float alpha) {\n float li;\n\n float cosThetaL = dot(si.normal, lightDir);\n\n float orientation = dot(si.faceNormal, viewDir) * cosThetaL;\n if (orientation < 0.0) {\n return li;\n }\n\n float occluded = 1.0;\n\n Ray ray;\n initRay(ray, si.position + EPS * lightDir, lightDir);\n if (intersectSceneShadow(ray)) {\n occluded = 0.0;\n }\n\n vec2 uv = cartesianToEquirect(lightDir);\n\n float lightPdf = envmapPdf(uv);\n\n float irr = dot(luminance, textureLinear(envmap, uv).rgb);\n\n // lambertian BRDF\n float brdf = INVPI;\n float scatteringPdf = abs(cosThetaL) * INVPI;\n\n float weight = powerHeuristic(scatteringPdf, lightPdf);\n\n float lightEq = irr * brdf * abs(cosThetaL) * weight / scatteringPdf;\n\n alpha += lightEq;\n li += occluded * lightEq;\n\n return li;\n}\n\nvoid sampleShadowCatcher(SurfaceInteraction si, int bounce, inout Path path) {\n mat3 basis = orthonormalBasis(si.normal);\n vec3 viewDir = -path.ray.d;\n vec3 color = bounce > 1 && !path.specularBounce ? sampleEnvmapFromDirection(-viewDir) : sampleBackgroundFromDirection(-viewDir);\n\n vec3 lightDir = lightDirDiffuse(si.faceNormal, viewDir, basis, randomSampleVec2());\n\n float alphaBounce = 0.0;\n\n vec3 li = path.beta * color * (\n importanceSampleLightShadowCatcher(si, viewDir, randomSampleVec2(), alphaBounce) +\n importanceSampleMaterialShadowCatcher(si, viewDir, lightDir, alphaBounce)\n );\n\n // alphaBounce contains the lighting of the shadow catcher *without* shadows\n alphaBounce = alphaBounce == 0.0 ? 1.0 : alphaBounce;\n\n // in post processing step, we divide by alpha to obtain the percentage of light relative to shadow for the shadow catcher\n path.alpha *= alphaBounce;\n\n // we only want the alpha division to affect the shadow catcher\n // factor in alpha to the previous light, so that dividing by alpha with the previous light cancels out this contribution\n path.li *= alphaBounce;\n\n // add path contribution\n path.li += li;\n\n // Get new path direction\n\n lightDir = lightDirDiffuse(si.faceNormal, viewDir, basis, randomSampleVec2());\n\n float cosThetaL = dot(si.normal, lightDir);\n\n // lambertian brdf with terms cancelled\n path.beta *= color;\n\n initRay(path.ray, si.position + EPS * lightDir, lightDir);\n\n // If new ray direction is pointing into the surface,\n // the light path is physically impossible and we terminate the path.\n float orientation = dot(si.faceNormal, viewDir) * cosThetaL;\n path.abort = orientation < 0.0;\n\n path.specularBounce = false;\n\n // advance dimension index by unused stratified samples\n const int usedSamples = 6;\n sampleIndex += SAMPLES_PER_MATERIAL - usedSamples;\n}\n\n#endif\n\n"; - - var sampleGlass = "\n\n#ifdef USE_GLASS\n\nvoid sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) {\n vec3 viewDir = -path.ray.d;\n float cosTheta = dot(si.normal, viewDir);\n\n float F = si.materialType == THIN_GLASS ?\n fresnelSchlick(abs(cosTheta), R0) : // thin glass\n fresnelSchlickTIR(cosTheta, R0, IOR); // thick glass\n\n vec3 lightDir;\n\n float reflectionOrRefraction = randomSample();\n\n if (reflectionOrRefraction < F) {\n lightDir = reflect(-viewDir, si.normal);\n } else {\n lightDir = si.materialType == THIN_GLASS ?\n refract(-viewDir, sign(cosTheta) * si.normal, INV_IOR_THIN) : // thin glass\n refract(-viewDir, sign(cosTheta) * si.normal, cosTheta < 0.0 ? IOR : INV_IOR); // thick glass\n path.beta *= si.color;\n }\n\n initRay(path.ray, si.position + EPS * lightDir, lightDir);\n\n // advance sample index by unused stratified samples\n const int usedSamples = 1;\n sampleIndex += SAMPLES_PER_MATERIAL - usedSamples;\n\n path.li += bounce == BOUNCES ? path.beta * sampleBackgroundFromDirection(lightDir) : vec3(0.0);\n}\n\n#endif\n\n"; + function boxOffset(box3, dim, v) { + var offset = v[dim] - box3.min[dim]; - var fragment = { - includes: [core, textureLinear, intersect, random, envmap, bsdf, sample, sampleMaterial, sampleGlass, sampleShadowCatcher], - outputs: ['light', 'position'], - source: function source(defines) { - return "\n void bounce(inout Path path, int i, inout SurfaceInteraction si) {\n if (path.abort) {\n return;\n }\n\n si = intersectScene(path.ray);\n\n if (!si.hit) {\n if (path.specularBounce) {\n path.li += path.beta * sampleBackgroundFromDirection(path.ray.d);\n }\n\n path.abort = true;\n } else {\n #ifdef USE_GLASS\n if (si.materialType == THIN_GLASS || si.materialType == THICK_GLASS) {\n sampleGlassSpecular(si, i, path);\n }\n #endif\n #ifdef USE_SHADOW_CATCHER\n if (si.materialType == SHADOW_CATCHER) {\n sampleShadowCatcher(si, i, path);\n }\n #endif\n if (si.materialType == STANDARD) {\n sampleMaterial(si, i, path);\n }\n\n // Russian Roulette sampling\n if (i >= 2) {\n float q = 1.0 - dot(path.beta, luminance);\n if (randomSample() < q) {\n path.abort = true;\n }\n path.beta /= 1.0 - q;\n }\n }\n }\n\n // Path tracing integrator as described in\n // http://www.pbr-book.org/3ed-2018/Light_Transport_I_Surface_Reflection/Path_Tracing.html#\n vec4 integrator(inout Ray ray, inout SurfaceInteraction si) {\n Path path;\n path.ray = ray;\n path.li = vec3(0);\n path.alpha = 1.0;\n path.beta = vec3(1.0);\n path.specularBounce = true;\n path.abort = false;\n\n bounce(path, 1, si);\n\n SurfaceInteraction indirectSi;\n\n // Manually unroll for loop.\n // Some hardware fails to interate over a GLSL loop, so we provide this workaround\n // for (int i = 1; i < defines.bounces + 1, i += 1)\n // equivelant to\n ".concat(unrollLoop('i', 2, defines.BOUNCES + 1, 1, "\n bounce(path, i, indirectSi);\n "), "\n\n return vec4(path.li, path.alpha);\n }\n\n void main() {\n initRandom();\n\n vec2 vCoordAntiAlias = vCoord + jitter;\n\n vec3 direction = normalize(vec3(vCoordAntiAlias - 0.5, -1.0) * vec3(camera.aspect, 1.0, camera.fov));\n\n // Thin lens model with depth-of-field\n // http://www.pbr-book.org/3ed-2018/Camera_Models/Projective_Camera_Models.html#TheThinLensModelandDepthofField\n // vec2 lensPoint = camera.aperture * sampleCircle(randomSampleVec2());\n // vec3 focusPoint = -direction * camera.focus / direction.z; // intersect ray direction with focus plane\n\n // vec3 origin = vec3(lensPoint, 0.0);\n // direction = normalize(focusPoint - origin);\n\n // origin = vec3(camera.transform * vec4(origin, 1.0));\n // direction = mat3(camera.transform) * direction;\n\n vec3 origin = camera.transform[3].xyz;\n direction = mat3(camera.transform) * direction;\n\n Ray cam;\n initRay(cam, origin, direction);\n\n SurfaceInteraction si;\n\n vec4 liAndAlpha = integrator(cam, si);\n\n if (dot(si.position, si.position) == 0.0) {\n si.position = origin + direction * RAY_MAX_DISTANCE;\n }\n\n if (!(liAndAlpha.x < INF && liAndAlpha.x > -EPS)) {\n liAndAlpha = vec4(0, 0, 0, 1);\n }\n\n out_light = liAndAlpha;\n out_position = vec4(si.position, si.meshId);\n\n // Stratified Sampling Sample Count Test\n // ---------------\n // Uncomment the following code\n // Then observe the colors of the image\n // If:\n // * The resulting image is pure black\n // Extra samples are being passed to the shader that aren't being used.\n // * The resulting image contains red\n // Not enough samples are being passed to the shader\n // * The resulting image contains only white with some black\n // All samples are used by the shader. Correct result!\n\n // fragColor = vec4(0, 0, 0, 1);\n // if (sampleIndex == SAMPLING_DIMENSIONS) {\n // fragColor = vec4(1, 1, 1, 1);\n // } else if (sampleIndex > SAMPLING_DIMENSIONS) {\n // fragColor = vec4(1, 0, 0, 1);\n // }\n}\n"); + if (box3.max[dim] > box3.min[dim]) { + offset /= box3.max[dim] - box3.min[dim]; } - }; - function mergeMeshesToGeometry(meshes) { - var vertexCount = 0; - var indexCount = 0; - var geometryAndMaterialIndex = []; - var materialIndexMap = new Map(); - var _iteratorNormalCompletion = true; - var _didIteratorError = false; - var _iteratorError = undefined; + return offset; + } - try { - for (var _iterator = meshes[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) { - var mesh = _step.value; + function surfaceArea(box3) { + box3.getSize(size); + return 2 * (size.x * size.z + size.x * size.y + size.z * size.y); + } + + // Convert image data from the RGBE format to a 32-bit floating point format + // See https://www.cg.tuwien.ac.at/research/theses/matkovic/node84.html for a description of the RGBE format + // Optional multiplier argument for performance optimization + function rgbeToFloat(buffer) { + var intensity = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 1; + var texels = buffer.length / 4; + var floatBuffer = new Float32Array(texels * 3); + var expTable = []; - var _geometry = cloneBufferGeometry(mesh.geometry, ['position', 'normal', 'uv']); + for (var i = 0; i < 255; i++) { + expTable[i] = intensity * Math.pow(2, i - 128) / 255; + } - var index = _geometry.getIndex(); + for (var _i = 0; _i < texels; _i++) { + var r = buffer[4 * _i]; + var g = buffer[4 * _i + 1]; + var b = buffer[4 * _i + 2]; + var a = buffer[4 * _i + 3]; + var e = expTable[a]; + floatBuffer[3 * _i] = r * e; + floatBuffer[3 * _i + 1] = g * e; + floatBuffer[3 * _i + 2] = b * e; + } - if (!index) { - addFlatGeometryIndices(_geometry); - } + return floatBuffer; + } - _geometry.applyMatrix(mesh.matrixWorld); + // Convert image data from the RGBE format to a 32-bit floating point format + var DEFAULT_MAP_RESOLUTION = { + width: 2048, + height: 1024 + }; // Tools for generating and modify env maps for lighting from scene component data - if (!_geometry.getAttribute('normal')) { - _geometry.computeVertexNormals(); - } else { - _geometry.normalizeNormals(); - } + function generateBackgroundMapFromSceneBackground(background) { + var backgroundImage; - vertexCount += _geometry.getAttribute('position').count; - indexCount += _geometry.getIndex().count; - var material = mesh.material; - var materialIndex = materialIndexMap.get(material); + if (background.isColor) { + backgroundImage = generateSolidMap(1, 1, background); + } else if (background.encoding === THREE$1.RGBEEncoding) { + backgroundImage = { + width: background.image.width, + height: background.image.height, + data: background.image.data + }; + backgroundImage.data = rgbeToFloat(backgroundImage.data); + } - if (materialIndex === undefined) { - materialIndex = materialIndexMap.size; - materialIndexMap.set(material, materialIndex); - } + return backgroundImage; + } + function generateEnvMapFromSceneComponents(directionalLights, ambientLights, environmentLights) { + var envImage = initializeEnvMap(environmentLights); + ambientLights.forEach(function (light) { + addAmbientLightToEnvMap(light, envImage); + }); + directionalLights.forEach(function (light) { + envImage.data = addDirectionalLightToEnvMap(light, envImage); + }); + return envImage; + } + function initializeEnvMap(environmentLights) { + var envImage; // Initialize map from environment light if present - geometryAndMaterialIndex.push({ - geometry: _geometry, - materialIndex: materialIndex - }); - } - } catch (err) { - _didIteratorError = true; - _iteratorError = err; - } finally { - try { - if (!_iteratorNormalCompletion && _iterator["return"] != null) { - _iterator["return"](); - } - } finally { - if (_didIteratorError) { - throw _iteratorError; - } - } + if (environmentLights.length > 0) { + // TODO: support multiple environment lights (what if they have different resolutions?) + var environmentLight = environmentLights[0]; + envImage = { + width: environmentLight.map.image.width, + height: environmentLight.map.image.height, + data: environmentLight.map.image.data + }; + envImage.data = rgbeToFloat(envImage.data, environmentLight.intensity); + } else { + // initialize blank map + envImage = generateSolidMap(DEFAULT_MAP_RESOLUTION.width, DEFAULT_MAP_RESOLUTION.height); } - var _mergeGeometry = mergeGeometry(geometryAndMaterialIndex, vertexCount, indexCount), - geometry = _mergeGeometry.geometry, - materialIndices = _mergeGeometry.materialIndices; + return envImage; + } + function generateSolidMap(width, height, color, intensity) { + var texels = width * height; + var floatBuffer = new Float32Array(texels * 3); + + if (color && color.isColor) { + setBufferToColor(floatBuffer, color, intensity); + } return { - geometry: geometry, - materialIndices: materialIndices, - materials: Array.from(materialIndexMap.keys()) + width: width, + height: height, + data: floatBuffer }; } - function mergeGeometry(geometryAndMaterialIndex, vertexCount, indexCount) { - var position = new THREE$1.BufferAttribute(new Float32Array(3 * vertexCount), 3, false); - var normal = new THREE$1.BufferAttribute(new Float32Array(3 * vertexCount), 3, false); - var uv = new THREE$1.BufferAttribute(new Float32Array(2 * vertexCount), 2, false); - var index = new THREE$1.BufferAttribute(new Uint32Array(indexCount), 1, false); - var materialIndices = []; - var bg = new THREE$1.BufferGeometry(); - bg.addAttribute('position', position); - bg.addAttribute('normal', normal); - bg.addAttribute('uv', uv); - bg.setIndex(index); - var currentVertex = 0; - var currentIndex = 0; - var _iteratorNormalCompletion2 = true; - var _didIteratorError2 = false; - var _iteratorError2 = undefined; + function setBufferToColor(buffer, color) { + var intensity = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 1; + buffer.forEach(function (part, index) { + var component = index % 3; - try { - for (var _iterator2 = geometryAndMaterialIndex[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) { - var _step2$value = _step2.value, - geometry = _step2$value.geometry, - materialIndex = _step2$value.materialIndex; - var _vertexCount = geometry.getAttribute('position').count; - bg.merge(geometry, currentVertex); - var meshIndex = geometry.getIndex(); + if (component === 0) { + buffer[index] = color.r * intensity; + } else if (component === 1) { + buffer[index] = color.g * intensity; + } else if (component === 2) { + buffer[index] = color.b * intensity; + } + }); + return buffer; + } - for (var i = 0; i < meshIndex.count; i++) { - index.setX(currentIndex + i, currentVertex + meshIndex.getX(i)); - } + function addAmbientLightToEnvMap(light, image) { + var color = light.color; + image.data.forEach(function (part, index) { + var component = index % 3; - var triangleCount = meshIndex.count / 3; + if (component === 0) { + image.data[index] += color.r * light.intensity; + } else if (component === 1) { + image.data[index] += color.g * light.intensity; + } else if (component === 2) { + image.data[index] += color.b * light.intensity; + } + }); + } + function addDirectionalLightToEnvMap(light, image) { + var sphericalCoords = new THREE$1.Spherical(); + var lightDirection = light.position.clone().sub(light.target.position); + sphericalCoords.setFromVector3(lightDirection); + sphericalCoords.theta = Math.PI * 3 / 2 - sphericalCoords.theta; + sphericalCoords.makeSafe(); + return addLightAtCoordinates(light, image, sphericalCoords); + } // Perform modifications on env map to match input scene - for (var _i = 0; _i < triangleCount; _i++) { - materialIndices.push(materialIndex); - } + function addLightAtCoordinates(light, image, originCoords) { + var floatBuffer = image.data; + var width = image.width; + var height = image.height; + var xTexels = floatBuffer.length / (3 * height); + var yTexels = floatBuffer.length / (3 * width); // default softness for standard directional lights is 0.01, i.e. a hard shadow - currentVertex += _vertexCount; - currentIndex += meshIndex.count; - } - } catch (err) { - _didIteratorError2 = true; - _iteratorError2 = err; - } finally { - try { - if (!_iteratorNormalCompletion2 && _iterator2["return"] != null) { - _iterator2["return"](); - } - } finally { - if (_didIteratorError2) { - throw _iteratorError2; - } - } - } + var softness = light.softness || 0.01; // angle from center of light at which no more contributions are projected - return { - geometry: bg, - materialIndices: materialIndices - }; - } // Similar to buffergeometry.clone(), except we only copy - // specific attributes instead of everything + var threshold = findThreshold(softness); // if too few texels are rejected by the threshold then the time to evaluate it is no longer worth it + var useThreshold = threshold < Math.PI / 5; // functional trick to keep the conditional check out of the main loop - function cloneBufferGeometry(bufferGeometry, attributes) { - var newGeometry = new THREE$1.BufferGeometry(); - var _iteratorNormalCompletion3 = true; - var _didIteratorError3 = false; - var _iteratorError3 = undefined; + var intensityFromAngleFunction = useThreshold ? getIntensityFromAngleDifferentialThresholded : getIntensityFromAngleDifferential; + var begunAddingContributions = false; + var currentCoords = new THREE$1.Spherical(); // Iterates over each row from top to bottom - try { - for (var _iterator3 = attributes[Symbol.iterator](), _step3; !(_iteratorNormalCompletion3 = (_step3 = _iterator3.next()).done); _iteratorNormalCompletion3 = true) { - var name = _step3.value; - var attrib = bufferGeometry.getAttribute(name); + for (var i = 0; i < xTexels; i++) { + var encounteredInThisRow = false; // Iterates over each texel in row - if (attrib) { - newGeometry.addAttribute(name, attrib.clone()); - } - } - } catch (err) { - _didIteratorError3 = true; - _iteratorError3 = err; - } finally { - try { - if (!_iteratorNormalCompletion3 && _iterator3["return"] != null) { - _iterator3["return"](); - } - } finally { - if (_didIteratorError3) { - throw _iteratorError3; + for (var j = 0; j < yTexels; j++) { + var bufferIndex = j * width + i; + currentCoords = equirectangularToSpherical(i, j, width, height, currentCoords); + var falloff = intensityFromAngleFunction(originCoords, currentCoords, softness, threshold); + + if (falloff > 0) { + encounteredInThisRow = true; + begunAddingContributions = true; } - } - } - var index = bufferGeometry.getIndex(); + var intensity = light.intensity * falloff; + floatBuffer[bufferIndex * 3] += intensity * light.color.r; + floatBuffer[bufferIndex * 3 + 1] += intensity * light.color.g; + floatBuffer[bufferIndex * 3 + 2] += intensity * light.color.b; + } // First row to not add a contribution since adding began + // This means the entire light has been added and we can exit early - if (index) { - newGeometry.setIndex(index); + + if (!encounteredInThisRow && begunAddingContributions) { + return floatBuffer; + } } - return newGeometry; + return floatBuffer; } - function addFlatGeometryIndices(geometry) { - var position = geometry.getAttribute('position'); + function findThreshold(softness) { + var step = Math.PI / 128; + var maxSteps = 2.0 * Math.PI / step; - if (!position) { - console.warn('No position attribute'); - return; + for (var i = 0; i < maxSteps; i++) { + var angle = i * step; + var falloff = getFalloffAtAngle(angle, softness); + + if (falloff <= 0.0001) { + return angle; + } } + } - var index = new Uint32Array(position.count); + function getIntensityFromAngleDifferentialThresholded(originCoords, currentCoords, softness, threshold) { + var deltaPhi = getAngleDelta(originCoords.phi, currentCoords.phi); + var deltaTheta = getAngleDelta(originCoords.theta, currentCoords.theta); - for (var i = 0; i < index.length; i++) { - index[i] = i; + if (deltaTheta > threshold && deltaPhi > threshold) { + return 0; } - geometry.setIndex(new THREE$1.BufferAttribute(index, 1, false)); - return geometry; + var angle = angleBetweenSphericals(originCoords, currentCoords); + return getFalloffAtAngle(angle, softness); } - /* - Stratified Sampling - http://www.pbr-book.org/3ed-2018/Sampling_and_Reconstruction/Stratified_Sampling.html + function getIntensityFromAngleDifferential(originCoords, currentCoords, softness) { + var angle = angleBetweenSphericals(originCoords, currentCoords); + return getFalloffAtAngle(angle, softness); + } - Repeatedly sampling random numbers between [0, 1) has the effect of producing numbers that are coincidentally clustered together, - instead of being evenly spaced across the domain. - This produces low quality results for the path tracer since clustered samples send too many rays in similar directions. + function getAngleDelta(angleA, angleB) { + var diff = Math.abs(angleA - angleB) % (2 * Math.PI); + return diff > Math.PI ? 2 * Math.PI - diff : diff; + } - We can reduce the amount of clustering of random numbers by using stratified sampling. - Stratification divides the [0, 1) range into partitions, or stratum, of equal size. - Each invocation of the stratified sampler draws one uniform random number from one stratum from a shuffled sequence of stratums. - When every stratum has been sampled once, this sequence is shuffled again and the process repeats. + var angleBetweenSphericals = function () { + var originVector = new THREE$1.Vector3(); + var currentVector = new THREE$1.Vector3(); + return function (originCoords, currentCoords) { + originVector.setFromSpherical(originCoords); + currentVector.setFromSpherical(currentCoords); + return originVector.angleTo(currentVector); + }; + }(); // TODO: possibly clean this up and optimize it + // + // This function was arrived at through experimentation, it provides good + // looking results with percieved softness that scale relatively linearly with + // the softness value in the 0 - 1 range + // + // For now it doesn't incur too much of a performance penalty because for most of our use cases (lights without too much softness) + // the threshold cutoff in getIntensityFromAngleDifferential stops us from running it too many times - The returned sample ranges between [0, numberOfStratum). - The integer part ideintifies the stratum (the first stratum being 0). - The fractional part is the random number. - To obtain the stratified sample between [0, 1), divide the returned sample by the stratum count. - */ - function makeStratifiedSampler(strataCount, dimensions) { - var strata = []; - var l = Math.pow(strataCount, dimensions); + function getFalloffAtAngle(angle, softness) { + var softnessCoefficient = Math.pow(2, 14.5 * Math.max(0.001, 1.0 - clamp(softness, 0.0, 1.0))); + var falloff = Math.pow(softnessCoefficient, 1.1) * Math.pow(8, -softnessCoefficient * Math.pow(angle, 1.8)); + return falloff; + } - for (var i = 0; i < l; i++) { - strata[i] = i; - } + function equirectangularToSpherical(x, y, width, height, target) { + target.phi = Math.PI * y / height; + target.theta = 2.0 * Math.PI * x / width; + return target; + } - var index = strata.length; - var sample = []; + // Create a piecewise 2D cumulative distribution function of light intensity from an envmap + // http://www.pbr-book.org/3ed-2018/Monte_Carlo_Integration/2D_Sampling_with_Multidimensional_Transformations.html#Piecewise-Constant2DDistributions + function envmapDistribution(image) { + var data = image.data; + var cdfImage = { + width: image.width + 2, + height: image.height + 1 + }; + var cdf = makeTextureArray$1(cdfImage.width, cdfImage.height, 2); - function restart() { - index = 0; - } + for (var y = 0; y < image.height; y++) { + var sinTheta = Math.sin(Math.PI * (y + 0.5) / image.height); - function next() { - if (index >= strata.length) { - shuffle(strata); - restart(); + for (var x = 0; x < image.width; x++) { + var i = 3 * (y * image.width + x); + var r = data[i]; + var g = data[i + 1]; + var b = data[i + 2]; + var luminance = 0.2126 * r + 0.7152 * g + 0.0722 * b; + luminance *= sinTheta; + cdf.set(x + 2, y, 0, cdf.get(x + 1, y, 0) + luminance / image.width); + cdf.set(x + 1, y, 1, luminance); } - var stratum = strata[index++]; + var rowIntegral = cdf.get(cdfImage.width - 1, y, 0); - for (var _i = 0; _i < dimensions; _i++) { - sample[_i] = stratum % strataCount + Math.random(); - stratum = Math.floor(stratum / strataCount); + for (var _x = 1; _x < cdf.width; _x++) { + cdf.set(_x, y, 0, cdf.get(_x, y, 0) / rowIntegral); + cdf.set(_x, y, 1, cdf.get(_x, y, 1) / rowIntegral); } - return sample; + cdf.set(0, y + 1, 0, cdf.get(0, y, 0) + rowIntegral / image.height); + cdf.set(0, y, 1, rowIntegral); + } + + var integral = cdf.get(0, cdf.height - 1, 0); + + for (var _y = 0; _y < cdf.height; _y++) { + cdf.set(0, _y, 0, cdf.get(0, _y, 0) / integral); + cdf.set(0, _y, 1, cdf.get(0, _y, 1) / integral); } + cdfImage.data = cdf.array; + return cdfImage; + } + + function makeTextureArray$1(width, height, channels) { + var array = new Float32Array(channels * width * height); return { - next: next, - restart: restart, - strataCount: strataCount + set: function set(x, y, channel, val) { + array[channels * (y * width + x) + channel] = val; + }, + get: function get(x, y, channel) { + return array[channels * (y * width + x) + channel]; + }, + width: width, + height: height, + channels: channels, + array: array }; } - /* - Stratified Sampling - http://www.pbr-book.org/3ed-2018/Sampling_and_Reconstruction/Stratified_Sampling.html - - It is computationally unfeasible to compute stratified sampling for large dimensions (>2) - Instead, we can compute stratified sampling for lower dimensional patterns that sum to the high dimension - e.g. instead of sampling a 6D domain, we sample a 2D + 2D + 2D domain. - This reaps many benefits of stratification while still allowing for small strata sizes. - */ - function makeStratifiedSamplerCombined(strataCount, listOfDimensions) { - var strataObjs = []; - var _iteratorNormalCompletion = true; - var _didIteratorError = false; - var _iteratorError = undefined; + function unrollLoop(indexName, start, limit, step, code) { + var unrolled = "int ".concat(indexName, ";\n"); - try { - for (var _iterator = listOfDimensions[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) { - var dim = _step.value; - strataObjs.push(makeStratifiedSampler(strataCount, dim)); - } - } catch (err) { - _didIteratorError = true; - _iteratorError = err; - } finally { - try { - if (!_iteratorNormalCompletion && _iterator["return"] != null) { - _iterator["return"](); - } - } finally { - if (_didIteratorError) { - throw _iteratorError; - } - } + for (var i = start; step > 0 && i < limit || step < 0 && i > limit; i += step) { + unrolled += "".concat(indexName, " = ").concat(i, ";\n"); + unrolled += code; } - var combined = []; + return unrolled; + } - function next() { - var i = 0; + var rayTraceCore = "\n #define STANDARD 0\n #define THIN_GLASS 1\n #define THICK_GLASS 2\n #define SHADOW_CATCHER 3\n\n #define SAMPLES_PER_MATERIAL 8\n\n const float IOR = 1.5;\n const float INV_IOR = 1.0 / IOR;\n\n const float IOR_THIN = 1.015;\n const float INV_IOR_THIN = 1.0 / IOR_THIN;\n\n const float R0 = (1.0 - IOR) * (1.0 - IOR) / ((1.0 + IOR) * (1.0 + IOR));\n\n // https://www.w3.org/WAI/GL/wiki/Relative_luminance\n const vec3 luminance = vec3(0.2126, 0.7152, 0.0722);\n\n #define RAY_MAX_DISTANCE 9999.0\n\n struct Ray {\n vec3 o;\n vec3 d;\n vec3 invD;\n float tMax;\n };\n\n struct SurfaceInteraction {\n bool hit;\n vec3 position;\n vec3 normal; // smoothed normal from the three triangle vertices\n vec3 faceNormal; // normal of the triangle\n vec3 color;\n float roughness;\n float metalness;\n int materialType;\n };\n\n struct Camera {\n mat4 transform;\n float aspect;\n float fov;\n float focus;\n float aperture;\n };\n\n void initRay(inout Ray ray, vec3 origin, vec3 direction) {\n ray.o = origin;\n ray.d = direction;\n ray.invD = 1.0 / ray.d;\n ray.tMax = RAY_MAX_DISTANCE;\n }\n\n // given the index from a 1D array, retrieve corresponding position from packed 2D texture\n ivec2 unpackTexel(int i, int columnsLog2) {\n ivec2 u;\n u.y = i >> columnsLog2; // equivalent to (i / 2^columnsLog2)\n u.x = i - (u.y << columnsLog2); // equivalent to (i % 2^columnsLog2)\n return u;\n }\n\n vec4 fetchData(sampler2D s, int i, int columnsLog2) {\n return texelFetch(s, unpackTexel(i, columnsLog2), 0);\n }\n\n ivec4 fetchData(isampler2D s, int i, int columnsLog2) {\n return texelFetch(s, unpackTexel(i, columnsLog2), 0);\n }\n\n struct Path {\n Ray ray;\n vec3 li;\n vec3 albedo;\n float alpha;\n vec3 beta;\n bool specularBounce;\n bool abort;\n };\n\n uniform Camera camera;\n uniform vec2 pixelSize; // 1 / screenResolution\n uniform vec2 jitter;\n\n in vec2 vCoord;\n"; - for (var _i = 0, _strataObjs = strataObjs; _i < _strataObjs.length; _i++) { - var strata = _strataObjs[_i]; - var nums = strata.next(); - var _iteratorNormalCompletion2 = true; - var _didIteratorError2 = false; - var _iteratorError2 = undefined; + // Manually performs linear filtering if the extension OES_texture_float_linear is not supported + var textureLinear = "\nvec4 textureLinear(sampler2D map, vec2 uv) {\n #ifdef OES_texture_float_linear\n return texture(map, uv);\n #else\n vec2 size = vec2(textureSize(map, 0));\n vec2 texelSize = 1.0 / size;\n\n uv = uv * size - 0.5;\n vec2 f = fract(uv);\n uv = floor(uv) + 0.5;\n\n vec4 s1 = texture(map, (uv + vec2(0, 0)) * texelSize);\n vec4 s2 = texture(map, (uv + vec2(1, 0)) * texelSize);\n vec4 s3 = texture(map, (uv + vec2(0, 1)) * texelSize);\n vec4 s4 = texture(map, (uv + vec2(1, 1)) * texelSize);\n\n return mix(mix(s1, s2, f.x), mix(s3, s4, f.x), f.y);\n #endif\n}\n"; - try { - for (var _iterator2 = nums[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) { - var num = _step2.value; - combined[i++] = num; - } - } catch (err) { - _didIteratorError2 = true; - _iteratorError2 = err; - } finally { - try { - if (!_iteratorNormalCompletion2 && _iterator2["return"] != null) { - _iterator2["return"](); - } - } finally { - if (_didIteratorError2) { - throw _iteratorError2; - } - } - } - } + var intersect = "\n\nuniform sampler2D positions;\nuniform sampler2D normals;\nuniform sampler2D uvs;\nuniform sampler2D bvh;\n\nstruct Triangle {\n vec3 p0;\n vec3 p1;\n vec3 p2;\n};\n\nvoid surfaceInteractionFromBVH(inout SurfaceInteraction si, Triangle tri, vec3 barycentric, ivec3 index, vec3 faceNormal, int materialIndex) {\n si.hit = true;\n si.faceNormal = faceNormal;\n si.position = barycentric.x * tri.p0 + barycentric.y * tri.p1 + barycentric.z * tri.p2;\n ivec2 i0 = unpackTexel(index.x, VERTEX_COLUMNS);\n ivec2 i1 = unpackTexel(index.y, VERTEX_COLUMNS);\n ivec2 i2 = unpackTexel(index.z, VERTEX_COLUMNS);\n\n vec3 n0 = texelFetch(normals, i0, 0).xyz;\n vec3 n1 = texelFetch(normals, i1, 0).xyz;\n vec3 n2 = texelFetch(normals, i2, 0).xyz;\n vec3 normal = normalize(barycentric.x * n0 + barycentric.y * n1 + barycentric.z * n2);\n\n #if defined(NUM_DIFFUSE_MAPS) || defined(NUM_NORMAL_MAPS) || defined(NUM_PBR_MAPS)\n vec2 uv0 = texelFetch(uvs, i0, 0).xy;\n vec2 uv1 = texelFetch(uvs, i1, 0).xy;\n vec2 uv2 = texelFetch(uvs, i2, 0).xy;\n vec2 uv = fract(barycentric.x * uv0 + barycentric.y * uv1 + barycentric.z * uv2);\n #else\n vec2 uv = vec2();\n #endif\n\n si.materialType = int(getMatType(materialIndex));\n si.color = getMatColor(materialIndex, uv);\n si.roughness = getMatRoughness(materialIndex, uv);\n si.metalness = getMatMetalness(materialIndex, uv);\n\n #ifdef NUM_NORMAL_MAPS\n vec3 dp1 = tri.p0 - tri.p2;\n vec3 dp2 = tri.p1 - tri.p2;\n vec2 duv1 = uv0 - uv2;\n vec2 duv2 = uv1 - uv2;\n si.normal = getMatNormal(materialIndex, uv, normal, dp1, dp2, duv1, duv2);\n #else\n si.normal = normal;\n #endif\n}\n\nstruct TriangleIntersect {\n float t;\n vec3 barycentric;\n};\n\n// Triangle-ray intersection\n// Faster than the classic M\xF6ller\u2013Trumbore intersection algorithm\n// http://www.pbr-book.org/3ed-2018/Shapes/Triangle_Meshes.html#TriangleIntersection\nTriangleIntersect intersectTriangle(Ray r, Triangle tri, int maxDim, vec3 shear) {\n TriangleIntersect ti;\n vec3 d = r.d;\n\n // translate vertices based on ray origin\n vec3 p0t = tri.p0 - r.o;\n vec3 p1t = tri.p1 - r.o;\n vec3 p2t = tri.p2 - r.o;\n\n // permute components of triangle vertices\n if (maxDim == 0) {\n p0t = p0t.yzx;\n p1t = p1t.yzx;\n p2t = p2t.yzx;\n } else if (maxDim == 1) {\n p0t = p0t.zxy;\n p1t = p1t.zxy;\n p2t = p2t.zxy;\n }\n\n // apply shear transformation to translated vertex positions\n p0t.xy += shear.xy * p0t.z;\n p1t.xy += shear.xy * p1t.z;\n p2t.xy += shear.xy * p2t.z;\n\n // compute edge function coefficients\n vec3 e = vec3(\n p1t.x * p2t.y - p1t.y * p2t.x,\n p2t.x * p0t.y - p2t.y * p0t.x,\n p0t.x * p1t.y - p0t.y * p1t.x\n );\n\n // check if intersection is inside triangle\n if (any(lessThan(e, vec3(0))) && any(greaterThan(e, vec3(0)))) {\n return ti;\n }\n\n float det = e.x + e.y + e.z;\n\n // not needed?\n // if (det == 0.) {\n // return ti;\n // }\n\n p0t.z *= shear.z;\n p1t.z *= shear.z;\n p2t.z *= shear.z;\n float tScaled = (e.x * p0t.z + e.y * p1t.z + e.z * p2t.z);\n\n // not needed?\n // if (sign(det) != sign(tScaled)) {\n // return ti;\n // }\n\n // check if closer intersection already exists\n if (abs(tScaled) > abs(r.tMax * det)) {\n return ti;\n }\n\n float invDet = 1. / det;\n ti.t = tScaled * invDet;\n ti.barycentric = e * invDet;\n\n return ti;\n}\n\nstruct Box {\n vec3 min;\n vec3 max;\n};\n\n// Branchless ray/box intersection\n// https://tavianator.com/fast-branchless-raybounding-box-intersections/\nfloat intersectBox(Ray r, Box b) {\n vec3 tBot = (b.min - r.o) * r.invD;\n vec3 tTop = (b.max - r.o) * r.invD;\n vec3 tNear = min(tBot, tTop);\n vec3 tFar = max(tBot, tTop);\n float t0 = max(tNear.x, max(tNear.y, tNear.z));\n float t1 = min(tFar.x, min(tFar.y, tFar.z));\n\n return (t0 > t1 || t0 > r.tMax) ? -1.0 : (t0 > 0.0 ? t0 : t1);\n}\n\nint maxDimension(vec3 v) {\n return v.x > v.y ? (v.x > v.z ? 0 : 2) : (v.y > v.z ? 1 : 2);\n}\n\n// Traverse BVH, find closest triangle intersection, and return surface information\nvoid intersectScene(inout Ray ray, inout SurfaceInteraction si) {\n si.hit = false;\n\n int maxDim = maxDimension(abs(ray.d));\n\n // Permute space so that the z dimension is the one where the absolute value of the ray's direction is largest.\n // Then create a shear transformation that aligns ray direction with the +z axis\n vec3 shear;\n if (maxDim == 0) {\n shear = vec3(-ray.d.y, -ray.d.z, 1.0) * ray.invD.x;\n } else if (maxDim == 1) {\n shear = vec3(-ray.d.z, -ray.d.x, 1.0) * ray.invD.y;\n } else {\n shear = vec3(-ray.d.x, -ray.d.y, 1.0) * ray.invD.z;\n }\n\n int nodesToVisit[STACK_SIZE];\n int stack = 0;\n\n nodesToVisit[0] = 0;\n\n while(stack >= 0) {\n int i = nodesToVisit[stack--];\n\n vec4 r1 = fetchData(bvh, i, BVH_COLUMNS);\n vec4 r2 = fetchData(bvh, i + 1, BVH_COLUMNS);\n\n int splitAxisOrNumPrimitives = floatBitsToInt(r1.w);\n\n if (splitAxisOrNumPrimitives >= 0) {\n // Intersection is a bounding box. Test for box intersection and keep traversing BVH\n int splitAxis = splitAxisOrNumPrimitives;\n\n Box bbox = Box(r1.xyz, r2.xyz);\n\n if (intersectBox(ray, bbox) > 0.0) {\n // traverse near node to ray first, and far node to ray last\n if (ray.d[splitAxis] > 0.0) {\n nodesToVisit[++stack] = floatBitsToInt(r2.w);\n nodesToVisit[++stack] = i + 2;\n } else {\n nodesToVisit[++stack] = i + 2;\n nodesToVisit[++stack] = floatBitsToInt(r2.w);\n }\n }\n } else {\n ivec3 index = floatBitsToInt(r1.xyz);\n Triangle tri = Triangle(\n fetchData(positions, index.x, VERTEX_COLUMNS).xyz,\n fetchData(positions, index.y, VERTEX_COLUMNS).xyz,\n fetchData(positions, index.z, VERTEX_COLUMNS).xyz\n );\n TriangleIntersect hit = intersectTriangle(ray, tri, maxDim, shear);\n\n if (hit.t > 0.0) {\n ray.tMax = hit.t;\n int materialIndex = floatBitsToInt(r2.w);\n vec3 faceNormal = r2.xyz;\n surfaceInteractionFromBVH(si, tri, hit.barycentric, index, faceNormal, materialIndex);\n }\n }\n }\n\n // Values must be clamped outside of intersection loop. Clamping inside the loop produces incorrect numbers on some devices.\n si.roughness = clamp(si.roughness, ROUGHNESS_MIN, 1.0);\n si.metalness = clamp(si.metalness, 0.0, 1.0);\n}\n\nbool intersectSceneShadow(inout Ray ray) {\n int maxDim = maxDimension(abs(ray.d));\n\n // Permute space so that the z dimension is the one where the absolute value of the ray's direction is largest.\n // Then create a shear transformation that aligns ray direction with the +z axis\n vec3 shear;\n if (maxDim == 0) {\n shear = vec3(-ray.d.y, -ray.d.z, 1.0) * ray.invD.x;\n } else if (maxDim == 1) {\n shear = vec3(-ray.d.z, -ray.d.x, 1.0) * ray.invD.y;\n } else {\n shear = vec3(-ray.d.x, -ray.d.y, 1.0) * ray.invD.z;\n }\n\n int nodesToVisit[STACK_SIZE];\n int stack = 0;\n\n nodesToVisit[0] = 0;\n\n while(stack >= 0) {\n int i = nodesToVisit[stack--];\n\n vec4 r1 = fetchData(bvh, i, BVH_COLUMNS);\n vec4 r2 = fetchData(bvh, i + 1, BVH_COLUMNS);\n\n int splitAxisOrNumPrimitives = floatBitsToInt(r1.w);\n\n if (splitAxisOrNumPrimitives >= 0) {\n int splitAxis = splitAxisOrNumPrimitives;\n\n Box bbox = Box(r1.xyz, r2.xyz);\n\n if (intersectBox(ray, bbox) > 0.0) {\n if (ray.d[splitAxis] > 0.0) {\n nodesToVisit[++stack] = floatBitsToInt(r2.w);\n nodesToVisit[++stack] = i + 2;\n } else {\n nodesToVisit[++stack] = i + 2;\n nodesToVisit[++stack] = floatBitsToInt(r2.w);\n }\n }\n } else {\n ivec3 index = floatBitsToInt(r1.xyz);\n Triangle tri = Triangle(\n fetchData(positions, index.x, VERTEX_COLUMNS).xyz,\n fetchData(positions, index.y, VERTEX_COLUMNS).xyz,\n fetchData(positions, index.z, VERTEX_COLUMNS).xyz\n );\n\n if (intersectTriangle(ray, tri, maxDim, shear).t > 0.0) {\n return true;\n }\n }\n }\n\n return false;\n}\n\n"; - return combined; - } + var surfaceInteractionDirect = "\n\n uniform sampler2D gPosition;\n uniform sampler2D gNormal;\n uniform sampler2D gFaceNormal;\n uniform sampler2D gColor;\n uniform sampler2D gMatProps;\n\n void surfaceInteractionDirect(vec2 coord, inout SurfaceInteraction si) {\n si.position = texture(gPosition, coord).xyz;\n\n vec4 normalMaterialType = texture(gNormal, coord);\n\n si.normal = normalize(normalMaterialType.xyz);\n si.materialType = int(normalMaterialType.w);\n\n si.faceNormal = normalize(texture(gFaceNormal, coord).xyz);\n\n si.color = texture(gColor, coord).rgb;\n\n vec4 matProps = texture(gMatProps, coord);\n si.roughness = matProps.x;\n si.metalness = matProps.y;\n\n si.hit = dot(si.normal, si.normal) > 0.0 ? true : false;\n }\n"; - function restart() { - for (var _i2 = 0, _strataObjs2 = strataObjs; _i2 < _strataObjs2.length; _i2++) { - var strata = _strataObjs2[_i2]; - strata.restart(); - } - } + var random = "\n\n// Noise texture used to generate a different random number for each pixel.\n// We use blue noise in particular, but any type of noise will work.\nuniform sampler2D noise;\n\nuniform float stratifiedSamples[SAMPLING_DIMENSIONS];\nuniform float strataSize;\n\n// Every time we call randomSample() in the shader, and for every call to render,\n// we want that specific bit of the shader to fetch a sample from the same position in stratifiedSamples\n// This allows us to use stratified sampling for each random variable in our path tracing\nint sampleIndex = 0;\n\nfloat pixelSeed;\n\nvoid initRandom() {\n vec2 noiseSize = vec2(textureSize(noise, 0));\n\n // tile the small noise texture across the entire screen\n pixelSeed = texture(noise, vCoord / (pixelSize * noiseSize)).r;\n}\n\nfloat randomSample() {\n float stratifiedSample = stratifiedSamples[sampleIndex++];\n\n float random = fract((stratifiedSample + pixelSeed) * strataSize); // blue noise + stratified samples\n\n // transform random number between [0, 1] to (0, 1)\n return EPS + (1.0 - 2.0 * EPS) * random;\n}\n\nvec2 randomSampleVec2() {\n return vec2(randomSample(), randomSample());\n}\n"; - return { - next: next, - restart: restart, - strataCount: strataCount - }; - } + // Sample the environment map using a cumulative distribution function as described in + // http://www.pbr-book.org/3ed-2018/Light_Transport_I_Surface_Reflection/Sampling_Light_Sources.html#InfiniteAreaLights + var envmap = "\n\nuniform sampler2D envmap;\nuniform sampler2D envmapDistribution;\nuniform sampler2D backgroundMap;\n\nvec2 cartesianToEquirect(vec3 pointOnSphere) {\n float phi = mod(atan(-pointOnSphere.z, -pointOnSphere.x), TWOPI);\n float theta = acos(pointOnSphere.y);\n return vec2(phi * 0.5 * INVPI, theta * INVPI);\n}\n\nfloat getEnvmapV(float u, out int vOffset, out float pdf) {\n ivec2 size = textureSize(envmap, 0);\n\n int left = 0;\n int right = size.y + 1; // cdf length is the length of the envmap + 1\n while (left < right) {\n int mid = (left + right) >> 1;\n float s = texelFetch(envmapDistribution, ivec2(0, mid), 0).x;\n if (s <= u) {\n left = mid + 1;\n } else {\n right = mid;\n }\n }\n vOffset = left - 1;\n\n // x channel is cumulative distribution of envmap luminance\n // y channel is partial probability density of envmap luminance\n vec2 s0 = texelFetch(envmapDistribution, ivec2(0, vOffset), 0).xy;\n vec2 s1 = texelFetch(envmapDistribution, ivec2(0, vOffset + 1), 0).xy;\n\n pdf = s0.y;\n\n return (float(vOffset) + (u - s0.x) / (s1.x - s0.x)) / float(size.y);\n}\n\nfloat getEnvmapU(float u, int vOffset, out float pdf) {\n ivec2 size = textureSize(envmap, 0);\n\n int left = 0;\n int right = size.x + 1; // cdf length is the length of the envmap + 1\n while (left < right) {\n int mid = (left + right) >> 1;\n float s = texelFetch(envmapDistribution, ivec2(1 + mid, vOffset), 0).x;\n if (s <= u) {\n left = mid + 1;\n } else {\n right = mid;\n }\n }\n int uOffset = left - 1;\n\n // x channel is cumulative distribution of envmap luminance\n // y channel is partial probability density of envmap luminance\n vec2 s0 = texelFetch(envmapDistribution, ivec2(1 + uOffset, vOffset), 0).xy;\n vec2 s1 = texelFetch(envmapDistribution, ivec2(1 + uOffset + 1, vOffset), 0).xy;\n\n pdf = s0.y;\n\n return (float(uOffset) + (u - s0.x) / (s1.x - s0.x)) / float(size.x);\n}\n\n// Perform two binary searches to find light direction.\nvec3 sampleEnvmap(vec2 random, out vec2 uv, out float pdf) {\n vec2 partialPdf;\n int vOffset;\n\n uv.y = getEnvmapV(random.x, vOffset, partialPdf.y);\n uv.x = getEnvmapU(random.y, vOffset, partialPdf.x);\n\n float phi = uv.x * TWOPI;\n float theta = uv.y * PI;\n float cosTheta = cos(theta);\n float sinTheta = sin(theta);\n float cosPhi = cos(phi);\n float sinPhi = sin(phi);\n\n vec3 dir = vec3(-sinTheta * cosPhi, cosTheta, -sinTheta * sinPhi);\n\n pdf = partialPdf.x * partialPdf.y * INVPI2 / (2.0 * sinTheta);\n\n return dir;\n}\n\nfloat envmapPdf(vec2 uv) {\n vec2 size = vec2(textureSize(envmap, 0));\n\n float sinTheta = sin(uv.y * PI);\n\n uv *= size;\n\n float partialX = texelFetch(envmapDistribution, ivec2(1.0 + uv.x, uv.y), 0).y;\n float partialY = texelFetch(envmapDistribution, ivec2(0, uv.y), 0).y;\n\n return partialX * partialY * INVPI2 / (2.0 * sinTheta);\n}\n\nvec3 sampleEnvmapFromDirection(vec3 d) {\n vec2 uv = cartesianToEquirect(d);\n return textureLinear(envmap, uv).rgb;\n}\n\nvec3 sampleBackgroundFromDirection(vec3 d) {\n vec2 uv = cartesianToEquirect(d);\n return textureLinear(backgroundMap, uv).rgb;\n}\n\n"; - function makeTexture(gl, params) { - var _params$width = params.width, - width = _params$width === void 0 ? null : _params$width, - _params$height = params.height, - height = _params$height === void 0 ? null : _params$height, - _params$data = params.data, - data = _params$data === void 0 ? null : _params$data, - _params$length = params.length, - length = _params$length === void 0 ? 1 : _params$length, - _params$channels = params.channels, - channels = _params$channels === void 0 ? null : _params$channels, - _params$storage = params.storage, - storage = _params$storage === void 0 ? null : _params$storage, - _params$flipY = params.flipY, - flipY = _params$flipY === void 0 ? false : _params$flipY, - _params$gammaCorrecti = params.gammaCorrection, - gammaCorrection = _params$gammaCorrecti === void 0 ? false : _params$gammaCorrecti, - _params$wrapS = params.wrapS, - wrapS = _params$wrapS === void 0 ? gl.REPEAT : _params$wrapS, - _params$wrapT = params.wrapT, - wrapT = _params$wrapT === void 0 ? gl.REPEAT : _params$wrapT, - _params$minFilter = params.minFilter, - minFilter = _params$minFilter === void 0 ? gl.LINEAR : _params$minFilter, - _params$magFilter = params.magFilter, - magFilter = _params$magFilter === void 0 ? gl.LINEAR : _params$magFilter; - width = width || data.width || 0; - height = height || data.height || 0; - var texture = gl.createTexture(); - var target; - var dataArray; // if data is a JS array but not a TypedArray, assume data is an array of images and create a GL Array Texture + var bsdf = "\n\n// Computes the exact value of the Fresnel factor\n// https://seblagarde.wordpress.com/2013/04/29/memo-on-fresnel-equations/\nfloat fresnel(float cosTheta, float eta, float invEta) {\n eta = cosTheta > 0.0 ? eta : invEta;\n cosTheta = abs(cosTheta);\n\n float gSquared = eta * eta + cosTheta * cosTheta - 1.0;\n\n if (gSquared < 0.0) {\n return 1.0;\n }\n\n float g = sqrt(gSquared);\n\n float a = (g - cosTheta) / (g + cosTheta);\n float b = (cosTheta * (g + cosTheta) - 1.0) / (cosTheta * (g - cosTheta) + 1.0);\n\n return 0.5 * a * a * (1.0 + b * b);\n}\n\nfloat fresnelSchlickWeight(float cosTheta) {\n float w = 1.0 - cosTheta;\n return (w * w) * (w * w) * w;\n}\n\n// Computes Schlick's approximation of the Fresnel factor\n// Assumes ray is moving from a less dense to a more dense medium\nfloat fresnelSchlick(float cosTheta, float r0) {\n return mix(fresnelSchlickWeight(cosTheta), 1.0, r0);\n}\n\n// Computes Schlick's approximation of Fresnel factor\n// Accounts for total internal reflection if ray is moving from a more dense to a less dense medium\nfloat fresnelSchlickTIR(float cosTheta, float r0, float ni) {\n\n // moving from a more dense to a less dense medium\n if (cosTheta < 0.0) {\n float inv_eta = ni;\n float SinT2 = inv_eta * inv_eta * (1.0f - cosTheta * cosTheta);\n if (SinT2 > 1.0) {\n return 1.0; // total internal reflection\n }\n cosTheta = sqrt(1.0f - SinT2);\n }\n\n return mix(fresnelSchlickWeight(cosTheta), 1.0, r0);\n}\n\nfloat trowbridgeReitzD(float cosTheta, float alpha2) {\n float e = cosTheta * cosTheta * (alpha2 - 1.0) + 1.0;\n return alpha2 / (PI * e * e);\n}\n\nfloat trowbridgeReitzLambda(float cosTheta, float alpha2) {\n float cos2Theta = cosTheta * cosTheta;\n float tan2Theta = (1.0 - cos2Theta) / cos2Theta;\n return 0.5 * (-1.0 + sqrt(1.0 + alpha2 * tan2Theta));\n}\n\n// An implementation of Disney's principled BRDF\n// https://disney-animation.s3.amazonaws.com/library/s2012_pbs_disney_brdf_notes_v2.pdf\nvec3 materialBrdf(SurfaceInteraction si, vec3 viewDir, vec3 lightDir, float cosThetaL, float diffuseWeight, out float pdf) {\n vec3 halfVector = normalize(viewDir + lightDir);\n\n cosThetaL = abs(cosThetaL);\n float cosThetaV = abs(dot(si.normal, viewDir));\n float cosThetaH = abs(dot(si.normal, halfVector));\n float cosThetaD = abs(dot(lightDir, halfVector));\n\n float alpha2 = (si.roughness * si.roughness) * (si.roughness * si.roughness);\n\n float F = fresnelSchlick(cosThetaD, mix(R0, 0.6, si.metalness));\n float D = trowbridgeReitzD(cosThetaH, alpha2);\n\n float roughnessRemapped = 0.5 + 0.5 * si.roughness;\n float alpha2Remapped = (roughnessRemapped * roughnessRemapped) * (roughnessRemapped * roughnessRemapped);\n\n float G = 1.0 / (1.0 + trowbridgeReitzLambda(cosThetaV, alpha2Remapped) + trowbridgeReitzLambda(cosThetaL, alpha2Remapped));\n\n float specular = F * D * G / (4.0 * cosThetaV * cosThetaL);\n float specularPdf = D * cosThetaH / (4.0 * cosThetaD);\n\n float f = -0.5 + 2.0 * cosThetaD * cosThetaD * si.roughness;\n float diffuse = diffuseWeight * INVPI * (1.0 + f * fresnelSchlickWeight(cosThetaL)) * (1.0 + f * fresnelSchlickWeight(cosThetaV));\n float diffusePdf = cosThetaL * INVPI;\n\n pdf = mix(0.5 * (specularPdf + diffusePdf), specularPdf, si.metalness);\n\n return mix(si.color * diffuse + specular, si.color * specular, si.metalness);\n}\n\n"; + + var sample = "\n\n// https://graphics.pixar.com/library/OrthonormalB/paper.pdf\nmat3 orthonormalBasis(vec3 n) {\n float zsign = n.z >= 0.0 ? 1.0 : -1.0;\n float a = -1.0 / (zsign + n.z);\n float b = n.x * n.y * a;\n vec3 s = vec3(1.0 + zsign * n.x * n.x * a, zsign * b, -zsign * n.x);\n vec3 t = vec3(b, zsign + n.y * n.y * a, -n.y);\n return mat3(s, t, n);\n}\n\n// http://www.pbr-book.org/3ed-2018/Monte_Carlo_Integration/2D_Sampling_with_Multidimensional_Transformations.html#SamplingaUnitDisk\nvec2 sampleCircle(vec2 p) {\n p = 2.0 * p - 1.0;\n\n bool greater = abs(p.x) > abs(p.y);\n\n float r = greater ? p.x : p.y;\n float theta = greater ? 0.25 * PI * p.y / p.x : PI * (0.5 - 0.25 * p.x / p.y);\n\n return r * vec2(cos(theta), sin(theta));\n}\n\n// http://www.pbr-book.org/3ed-2018/Monte_Carlo_Integration/2D_Sampling_with_Multidimensional_Transformations.html#Cosine-WeightedHemisphereSampling\nvec3 cosineSampleHemisphere(vec2 p) {\n vec2 h = sampleCircle(p);\n float z = sqrt(max(0.0, 1.0 - h.x * h.x - h.y * h.y));\n return vec3(h, z);\n}\n\n\n// http://www.pbr-book.org/3ed-2018/Light_Transport_I_Surface_Reflection/Sampling_Reflection_Functions.html#MicrofacetBxDFs\n// Instead of Beckmann distrubtion, we use the GTR2 (GGX) distrubtion as covered in Disney's Principled BRDF paper\nvec3 lightDirSpecular(vec3 faceNormal, vec3 viewDir, mat3 basis, float roughness, vec2 random) {\n float phi = TWOPI * random.y;\n float alpha = roughness * roughness;\n float cosTheta = sqrt((1.0 - random.x) / (1.0 + (alpha * alpha - 1.0) * random.x));\n float sinTheta = sqrt(1.0 - cosTheta * cosTheta);\n\n vec3 halfVector = basis * sign(dot(faceNormal, viewDir)) * vec3(sinTheta * cos(phi), sinTheta * sin(phi), cosTheta);\n\n vec3 lightDir = reflect(-viewDir, halfVector);\n\n return lightDir;\n}\n\nvec3 lightDirDiffuse(vec3 faceNormal, vec3 viewDir, mat3 basis, vec2 random) {\n return basis * sign(dot(faceNormal, viewDir)) * cosineSampleHemisphere(random);\n}\n\nfloat powerHeuristic(float f, float g) {\n return (f * f) / (f * f + g * g);\n}\n\n"; + + // Estimate the direct lighting integral using multiple importance sampling + // http://www.pbr-book.org/3ed-2018/Light_Transport_I_Surface_Reflection/Direct_Lighting.html#EstimatingtheDirectLightingIntegral + var sampleMaterial = "\n\nvec3 importanceSampleLight(SurfaceInteraction si, vec3 viewDir, bool lastBounce, vec2 random) {\n vec3 li;\n\n float lightPdf;\n vec2 uv;\n vec3 lightDir = sampleEnvmap(random, uv, lightPdf);\n\n float cosThetaL = dot(si.normal, lightDir);\n\n float orientation = dot(si.faceNormal, viewDir) * cosThetaL;\n if (orientation < 0.0) {\n return li;\n }\n\n float diffuseWeight = 1.0;\n Ray ray;\n initRay(ray, si.position + EPS * lightDir, lightDir);\n if (intersectSceneShadow(ray)) {\n if (lastBounce) {\n diffuseWeight = 0.0;\n } else {\n return li;\n }\n }\n\n vec3 irr = textureLinear(envmap, uv).xyz;\n\n float scatteringPdf;\n vec3 brdf = materialBrdf(si, viewDir, lightDir, cosThetaL, diffuseWeight, scatteringPdf);\n\n float weight = powerHeuristic(lightPdf, scatteringPdf);\n\n li = brdf * irr * abs(cosThetaL) * weight / lightPdf;\n\n return li;\n}\n\nvec3 importanceSampleMaterial(SurfaceInteraction si, vec3 viewDir, bool lastBounce, vec3 lightDir) {\n vec3 li;\n\n float cosThetaL = dot(si.normal, lightDir);\n\n float orientation = dot(si.faceNormal, viewDir) * cosThetaL;\n if (orientation < 0.0) {\n return li;\n }\n\n float diffuseWeight = 1.0;\n Ray ray;\n initRay(ray, si.position + EPS * lightDir, lightDir);\n if (intersectSceneShadow(ray)) {\n if (lastBounce) {\n diffuseWeight = 0.0;\n } else {\n return li;\n }\n }\n\n vec2 uv = cartesianToEquirect(lightDir);\n\n float lightPdf = envmapPdf(uv);\n\n vec3 irr = textureLinear(envmap, uv).rgb;\n\n float scatteringPdf;\n vec3 brdf = materialBrdf(si, viewDir, lightDir, cosThetaL, diffuseWeight, scatteringPdf);\n\n float weight = powerHeuristic(scatteringPdf, lightPdf);\n\n li += brdf * irr * abs(cosThetaL) * weight / scatteringPdf;\n\n return li;\n}\n\nvoid sampleMaterial(SurfaceInteraction si, int bounce, inout Path path) {\n mat3 basis = orthonormalBasis(si.normal);\n vec3 viewDir = -path.ray.d;\n\n vec2 diffuseOrSpecular = randomSampleVec2();\n\n vec3 lightDir = diffuseOrSpecular.x < mix(0.5, 0.0, si.metalness) ?\n lightDirDiffuse(si.faceNormal, viewDir, basis, randomSampleVec2()) :\n lightDirSpecular(si.faceNormal, viewDir, basis, si.roughness, randomSampleVec2());\n\n bool lastBounce = bounce == BOUNCES;\n\n // Add path contribution\n path.li += path.beta * (\n importanceSampleLight(si, viewDir, lastBounce, randomSampleVec2()) +\n importanceSampleMaterial(si, viewDir, lastBounce, lightDir)\n );\n\n // Get new path direction\n\n if (lastBounce) {\n return;\n }\n\n lightDir = diffuseOrSpecular.y < mix(0.5, 0.0, si.metalness) ?\n lightDirDiffuse(si.faceNormal, viewDir, basis, randomSampleVec2()) :\n lightDirSpecular(si.faceNormal, viewDir, basis, si.roughness, randomSampleVec2());\n\n float cosThetaL = dot(si.normal, lightDir);\n\n float scatteringPdf;\n vec3 brdf = materialBrdf(si, viewDir, lightDir, cosThetaL, 1.0, scatteringPdf);\n\n path.beta *= abs(cosThetaL) * brdf / scatteringPdf;\n\n initRay(path.ray, si.position + EPS * lightDir, lightDir);\n\n // If new ray direction is pointing into the surface,\n // the light path is physically impossible and we terminate the path.\n float orientation = dot(si.faceNormal, viewDir) * cosThetaL;\n path.abort = orientation < 0.0;\n\n path.specularBounce = false;\n}\n\n"; + + var sampleShadowCatcher = "\n\n#ifdef USE_SHADOW_CATCHER\n\nfloat importanceSampleLightShadowCatcher(SurfaceInteraction si, vec3 viewDir, vec2 random, inout float alpha) {\n float li;\n\n float lightPdf;\n vec2 uv;\n vec3 lightDir = sampleEnvmap(random, uv, lightPdf);\n\n float cosThetaL = dot(si.normal, lightDir);\n\n float orientation = dot(si.faceNormal, viewDir) * cosThetaL;\n if (orientation < 0.0) {\n return li;\n }\n\n float occluded = 1.0;\n\n Ray ray;\n initRay(ray, si.position + EPS * lightDir, lightDir);\n if (intersectSceneShadow(ray)) {\n occluded = 0.0;\n }\n\n float irr = dot(luminance, textureLinear(envmap, uv).rgb);\n\n // lambertian BRDF\n float brdf = INVPI;\n float scatteringPdf = abs(cosThetaL) * INVPI;\n\n float weight = powerHeuristic(lightPdf, scatteringPdf);\n\n float lightEq = irr * brdf * abs(cosThetaL) * weight / lightPdf;\n\n alpha += lightEq;\n li += occluded * lightEq;\n\n return li;\n}\n\nfloat importanceSampleMaterialShadowCatcher(SurfaceInteraction si, vec3 viewDir, vec3 lightDir, inout float alpha) {\n float li;\n\n float cosThetaL = dot(si.normal, lightDir);\n\n float orientation = dot(si.faceNormal, viewDir) * cosThetaL;\n if (orientation < 0.0) {\n return li;\n }\n\n float occluded = 1.0;\n\n Ray ray;\n initRay(ray, si.position + EPS * lightDir, lightDir);\n if (intersectSceneShadow(ray)) {\n occluded = 0.0;\n }\n\n vec2 uv = cartesianToEquirect(lightDir);\n\n float lightPdf = envmapPdf(uv);\n\n float irr = dot(luminance, textureLinear(envmap, uv).rgb);\n\n // lambertian BRDF\n float brdf = INVPI;\n float scatteringPdf = abs(cosThetaL) * INVPI;\n\n float weight = powerHeuristic(scatteringPdf, lightPdf);\n\n float lightEq = irr * brdf * abs(cosThetaL) * weight / scatteringPdf;\n\n alpha += lightEq;\n li += occluded * lightEq;\n\n return li;\n}\n\nvoid sampleShadowCatcher(SurfaceInteraction si, int bounce, inout Path path) {\n mat3 basis = orthonormalBasis(si.normal);\n vec3 viewDir = -path.ray.d;\n vec3 color = bounce > 1 && !path.specularBounce ? sampleEnvmapFromDirection(-viewDir) : sampleBackgroundFromDirection(-viewDir);\n\n vec3 lightDir = lightDirDiffuse(si.faceNormal, viewDir, basis, randomSampleVec2());\n\n float alphaBounce = 0.0;\n\n vec3 li = path.beta * color * (\n importanceSampleLightShadowCatcher(si, viewDir, randomSampleVec2(), alphaBounce) +\n importanceSampleMaterialShadowCatcher(si, viewDir, lightDir, alphaBounce)\n );\n\n // alphaBounce contains the lighting of the shadow catcher *without* shadows\n alphaBounce = alphaBounce == 0.0 ? 1.0 : alphaBounce;\n\n // in post processing step, we divide by alpha to obtain the percentage of light relative to shadow for the shadow catcher\n path.alpha *= alphaBounce;\n\n // we only want the alpha division to affect the shadow catcher\n // factor in alpha to the previous light, so that dividing by alpha with the previous light cancels out this contribution\n path.li *= alphaBounce;\n\n // add path contribution\n path.li += li;\n\n // Get new path direction\n\n lightDir = lightDirDiffuse(si.faceNormal, viewDir, basis, randomSampleVec2());\n\n float cosThetaL = dot(si.normal, lightDir);\n\n // lambertian brdf with terms cancelled\n path.beta *= color;\n\n initRay(path.ray, si.position + EPS * lightDir, lightDir);\n\n // If new ray direction is pointing into the surface,\n // the light path is physically impossible and we terminate the path.\n float orientation = dot(si.faceNormal, viewDir) * cosThetaL;\n path.abort = orientation < 0.0;\n\n path.specularBounce = false;\n\n // advance dimension index by unused stratified samples\n const int usedSamples = 6;\n sampleIndex += SAMPLES_PER_MATERIAL - usedSamples;\n}\n\n#endif\n\n"; - if (Array.isArray(data)) { - dataArray = data; - data = dataArray[0]; + var sampleGlass = "\n\n#ifdef USE_GLASS\n\nvoid sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) {\n vec3 viewDir = -path.ray.d;\n float cosTheta = dot(si.normal, viewDir);\n\n float F = si.materialType == THIN_GLASS ?\n fresnelSchlick(abs(cosTheta), R0) : // thin glass\n fresnelSchlickTIR(cosTheta, R0, IOR); // thick glass\n\n vec3 lightDir;\n\n float reflectionOrRefraction = randomSample();\n\n if (reflectionOrRefraction < F) {\n lightDir = reflect(-viewDir, si.normal);\n } else {\n lightDir = si.materialType == THIN_GLASS ?\n refract(-viewDir, sign(cosTheta) * si.normal, INV_IOR_THIN) : // thin glass\n refract(-viewDir, sign(cosTheta) * si.normal, cosTheta < 0.0 ? IOR : INV_IOR); // thick glass\n path.beta *= si.color;\n }\n\n initRay(path.ray, si.position + EPS * lightDir, lightDir);\n\n // advance sample index by unused stratified samples\n const int usedSamples = 1;\n sampleIndex += SAMPLES_PER_MATERIAL - usedSamples;\n\n path.li += bounce == BOUNCES ? path.beta * sampleBackgroundFromDirection(lightDir) : vec3(0.0);\n}\n\n#endif\n\n"; + + var fragment$1 = { + includes: [constants$1, rayTraceCore, textureLinear, materialBuffer, intersect, surfaceInteractionDirect, random, envmap, bsdf, sample, sampleMaterial, sampleGlass, sampleShadowCatcher], + outputs: ['light'], + source: function source(defines) { + return "\n void bounce(inout Path path, int i, inout SurfaceInteraction si) {\n if (!si.hit) {\n if (path.specularBounce) {\n path.li += path.beta * sampleBackgroundFromDirection(path.ray.d);\n }\n\n path.abort = true;\n } else {\n #ifdef USE_GLASS\n if (si.materialType == THIN_GLASS || si.materialType == THICK_GLASS) {\n sampleGlassSpecular(si, i, path);\n }\n #endif\n #ifdef USE_SHADOW_CATCHER\n if (si.materialType == SHADOW_CATCHER) {\n sampleShadowCatcher(si, i, path);\n }\n #endif\n if (si.materialType == STANDARD) {\n sampleMaterial(si, i, path);\n }\n\n // Russian Roulette sampling\n if (i >= 2) {\n float q = 1.0 - dot(path.beta, luminance);\n if (randomSample() < q) {\n path.abort = true;\n }\n path.beta /= 1.0 - q;\n }\n }\n }\n\n // Path tracing integrator as described in\n // http://www.pbr-book.org/3ed-2018/Light_Transport_I_Surface_Reflection/Path_Tracing.html#\n vec4 integrator(inout Ray ray) {\n Path path;\n path.ray = ray;\n path.li = vec3(0);\n path.alpha = 1.0;\n path.beta = vec3(1.0);\n path.specularBounce = true;\n path.abort = false;\n\n SurfaceInteraction si;\n\n // first surface interaction from g-buffer\n surfaceInteractionDirect(vCoord, si);\n\n // first surface interaction from ray interesction\n // intersectScene(path.ray, si);\n\n bounce(path, 1, si);\n\n // Manually unroll for loop.\n // Some hardware fails to iterate over a GLSL loop, so we provide this workaround\n // for (int i = 1; i < defines.bounces + 1, i += 1)\n // equivelant to\n ".concat(unrollLoop('i', 2, defines.BOUNCES + 1, 1, "\n if (!path.abort) {\n intersectScene(path.ray, si);\n bounce(path, i, si);\n }\n "), "\n\n return vec4(path.li, path.alpha);\n }\n\n void main() {\n initRandom();\n\n vec2 vCoordAntiAlias = vCoord + jitter;\n\n vec3 direction = normalize(vec3(vCoordAntiAlias - 0.5, -1.0) * vec3(camera.aspect, 1.0, camera.fov));\n\n // Thin lens model with depth-of-field\n // http://www.pbr-book.org/3ed-2018/Camera_Models/Projective_Camera_Models.html#TheThinLensModelandDepthofField\n // vec2 lensPoint = camera.aperture * sampleCircle(randomSampleVec2());\n // vec3 focusPoint = -direction * camera.focus / direction.z; // intersect ray direction with focus plane\n\n // vec3 origin = vec3(lensPoint, 0.0);\n // direction = normalize(focusPoint - origin);\n\n // origin = vec3(camera.transform * vec4(origin, 1.0));\n // direction = mat3(camera.transform) * direction;\n\n vec3 origin = camera.transform[3].xyz;\n direction = mat3(camera.transform) * direction;\n\n Ray cam;\n initRay(cam, origin, direction);\n\n vec4 liAndAlpha = integrator(cam);\n\n if (!(liAndAlpha.x < INF && liAndAlpha.x > -EPS)) {\n liAndAlpha = vec4(0, 0, 0, 1);\n }\n\n out_light = liAndAlpha;\n\n // Stratified Sampling Sample Count Test\n // ---------------\n // Uncomment the following code\n // Then observe the colors of the image\n // If:\n // * The resulting image is pure black\n // Extra samples are being passed to the shader that aren't being used.\n // * The resulting image contains red\n // Not enough samples are being passed to the shader\n // * The resulting image contains only white with some black\n // All samples are used by the shader. Correct result!\n\n // fragColor = vec4(0, 0, 0, 1);\n // if (sampleIndex == SAMPLING_DIMENSIONS) {\n // fragColor = vec4(1, 1, 1, 1);\n // } else if (sampleIndex > SAMPLING_DIMENSIONS) {\n // fragColor = vec4(1, 0, 0, 1);\n // }\n}\n"); } + }; - target = dataArray || length > 1 ? gl.TEXTURE_2D_ARRAY : gl.TEXTURE_2D; - gl.activeTexture(gl.TEXTURE0); - gl.bindTexture(target, texture); - gl.texParameteri(target, gl.TEXTURE_WRAP_S, wrapS); - gl.texParameteri(target, gl.TEXTURE_WRAP_T, wrapT); - gl.texParameteri(target, gl.TEXTURE_MIN_FILTER, minFilter); - gl.texParameteri(target, gl.TEXTURE_MAG_FILTER, magFilter); + /* + Stratified Sampling + http://www.pbr-book.org/3ed-2018/Sampling_and_Reconstruction/Stratified_Sampling.html - if (!channels) { - if (data && data.length) { - channels = data.length / (width * height); // infer number of channels from data size - } else { - channels = 4; - } - } + Repeatedly sampling random numbers between [0, 1) has the effect of producing numbers that are coincidentally clustered together, + instead of being evenly spaced across the domain. + This produces low quality results for the path tracer since clustered samples send too many rays in similar directions. - channels = clamp(channels, 1, 4); - var format = [gl.RED, gl.RG, gl.RGB, gl.RGBA][channels - 1]; - var isByteArray = storage === 'byte' || data instanceof Uint8Array || data instanceof HTMLImageElement || data instanceof HTMLCanvasElement || data instanceof ImageData; - var isFloatArray = storage === 'float' || data instanceof Float32Array; - var type; - var internalFormat; + We can reduce the amount of clustering of random numbers by using stratified sampling. + Stratification divides the [0, 1) range into partitions, or stratum, of equal size. + Each invocation of the stratified sampler draws one uniform random number from one stratum from a shuffled sequence of stratums. + When every stratum has been sampled once, this sequence is shuffled again and the process repeats. - if (isByteArray) { - type = gl.UNSIGNED_BYTE; - internalFormat = [gl.R8, gl.RG8, gammaCorrection ? gl.SRGB8 : gl.RGB8, gammaCorrection ? gl.SRGB8_ALPHA8 : gl.RGBA8][channels - 1]; - } else if (isFloatArray) { - type = gl.FLOAT; - internalFormat = [gl.R32F, gl.RG32F, gl.RGB32F, gl.RGBA32F][channels - 1]; - } else { - console.error('Texture of unknown type:', storage || data); + The returned sample ranges between [0, numberOfStratum). + The integer part ideintifies the stratum (the first stratum being 0). + The fractional part is the random number. + + To obtain the stratified sample between [0, 1), divide the returned sample by the stratum count. + */ + function makeStratifiedSampler(strataCount, dimensions) { + var strata = []; + var l = Math.pow(strataCount, dimensions); + + for (var i = 0; i < l; i++) { + strata[i] = i; } - if (dataArray) { - gl.texStorage3D(target, 1, internalFormat, width, height, dataArray.length); + var index = strata.length; + var sample = []; - for (var i = 0; i < dataArray.length; i++) { - // if layer is an HTMLImageElement, use the .width and .height properties of each layer - // otherwise use the max size of the array texture - var layerWidth = dataArray[i].width || width; - var layerHeight = dataArray[i].height || height; - gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, Array.isArray(flipY) ? flipY[i] : flipY); - gl.texSubImage3D(target, 0, 0, 0, i, layerWidth, layerHeight, 1, format, type, dataArray[i]); + function restart() { + index = 0; + } + + function next() { + if (index >= strata.length) { + shuffle(strata); + restart(); } - } else if (length > 1) { - // create empty array texture - gl.texStorage3D(target, 1, internalFormat, width, height, length); - } else { - gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, flipY); - gl.texStorage2D(target, 1, internalFormat, width, height); - if (data) { - gl.texSubImage2D(target, 0, 0, 0, width, height, format, type, data); + var stratum = strata[index++]; + + for (var _i = 0; _i < dimensions; _i++) { + sample[_i] = stratum % strataCount + Math.random(); + stratum = Math.floor(stratum / strataCount); } - } // return state to default + return sample; + } - gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false); return { - target: target, - texture: texture + next: next, + restart: restart, + strataCount: strataCount }; } - // retrieve textures used by meshes, grouping textures from meshes shared by *the same* mesh property - function getTexturesFromMaterials(meshes, textureNames) { - var textureMap = {}; + /* + Stratified Sampling + http://www.pbr-book.org/3ed-2018/Sampling_and_Reconstruction/Stratified_Sampling.html + + It is computationally unfeasible to compute stratified sampling for large dimensions (>2) + Instead, we can compute stratified sampling for lower dimensional patterns that sum to the high dimension + e.g. instead of sampling a 6D domain, we sample a 2D + 2D + 2D domain. + This reaps many benefits of stratification while still allowing for small strata sizes. + */ + function makeStratifiedSamplerCombined(strataCount, listOfDimensions) { + var strataObjs = []; var _iteratorNormalCompletion = true; var _didIteratorError = false; var _iteratorError = undefined; try { - for (var _iterator = textureNames[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) { - var name = _step.value; - var textures = []; - textureMap[name] = { - indices: texturesFromMaterials(meshes, name, textures), - textures: textures - }; + for (var _iterator = listOfDimensions[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) { + var dim = _step.value; + strataObjs.push(makeStratifiedSampler(strataCount, dim)); } } catch (err) { _didIteratorError = true; @@ -1958,195 +2494,63 @@ } } - return textureMap; - } // retrieve textures used by meshes, grouping textures from meshes shared *across all* mesh properties - - function mergeTexturesFromMaterials(meshes, textureNames) { - var textureMap = { - textures: [], - indices: {} - }; - var _iteratorNormalCompletion2 = true; - var _didIteratorError2 = false; - var _iteratorError2 = undefined; - - try { - for (var _iterator2 = textureNames[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) { - var name = _step2.value; - textureMap.indices[name] = texturesFromMaterials(meshes, name, textureMap.textures); - } - } catch (err) { - _didIteratorError2 = true; - _iteratorError2 = err; - } finally { - try { - if (!_iteratorNormalCompletion2 && _iterator2["return"] != null) { - _iterator2["return"](); - } - } finally { - if (_didIteratorError2) { - throw _iteratorError2; - } - } - } - - return textureMap; - } - - function texturesFromMaterials(materials, textureName, textures) { - var indices = []; - var _iteratorNormalCompletion3 = true; - var _didIteratorError3 = false; - var _iteratorError3 = undefined; + var combined = []; - try { - for (var _iterator3 = materials[Symbol.iterator](), _step3; !(_iteratorNormalCompletion3 = (_step3 = _iterator3.next()).done); _iteratorNormalCompletion3 = true) { - var material = _step3.value; + function next() { + var i = 0; - if (!material[textureName]) { - indices.push(-1); - } else { - var index = textures.length; + for (var _i = 0, _strataObjs = strataObjs; _i < _strataObjs.length; _i++) { + var strata = _strataObjs[_i]; + var nums = strata.next(); + var _iteratorNormalCompletion2 = true; + var _didIteratorError2 = false; + var _iteratorError2 = undefined; - for (var i = 0; i < textures.length; i++) { - if (textures[i] === material[textureName]) { - // Reuse existing duplicate texture. - index = i; - break; - } + try { + for (var _iterator2 = nums[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) { + var num = _step2.value; + combined[i++] = num; } - - if (index === textures.length) { - // New texture. Add texture to list. - textures.push(material[textureName]); + } catch (err) { + _didIteratorError2 = true; + _iteratorError2 = err; + } finally { + try { + if (!_iteratorNormalCompletion2 && _iterator2["return"] != null) { + _iterator2["return"](); + } + } finally { + if (_didIteratorError2) { + throw _iteratorError2; + } } - - indices.push(index); - } - } - } catch (err) { - _didIteratorError3 = true; - _iteratorError3 = err; - } finally { - try { - if (!_iteratorNormalCompletion3 && _iterator3["return"] != null) { - _iterator3["return"](); - } - } finally { - if (_didIteratorError3) { - throw _iteratorError3; } } - } - - return indices; - } - - // Packs different arrays into vec4's to take advantage of GLSL's std140 memory layout - - function uploadBuffers(gl, program, bufferData) { - var _ref, _ref2; - - var materialBuffer = makeUniformBuffer(gl, program, 'Materials'); - var _bufferData$color = bufferData.color, - color = _bufferData$color === void 0 ? [] : _bufferData$color, - _bufferData$roughness = bufferData.roughness, - roughness = _bufferData$roughness === void 0 ? [] : _bufferData$roughness, - _bufferData$metalness = bufferData.metalness, - metalness = _bufferData$metalness === void 0 ? [] : _bufferData$metalness, - _bufferData$normalSca = bufferData.normalScale, - normalScale = _bufferData$normalSca === void 0 ? [] : _bufferData$normalSca, - _bufferData$type = bufferData.type, - type = _bufferData$type === void 0 ? [] : _bufferData$type, - _bufferData$diffuseMa = bufferData.diffuseMapIndex, - diffuseMapIndex = _bufferData$diffuseMa === void 0 ? [] : _bufferData$diffuseMa, - _bufferData$diffuseMa2 = bufferData.diffuseMapSize, - diffuseMapSize = _bufferData$diffuseMa2 === void 0 ? [] : _bufferData$diffuseMa2, - _bufferData$normalMap = bufferData.normalMapIndex, - normalMapIndex = _bufferData$normalMap === void 0 ? [] : _bufferData$normalMap, - _bufferData$normalMap2 = bufferData.normalMapSize, - normalMapSize = _bufferData$normalMap2 === void 0 ? [] : _bufferData$normalMap2, - _bufferData$roughness2 = bufferData.roughnessMapIndex, - roughnessMapIndex = _bufferData$roughness2 === void 0 ? [] : _bufferData$roughness2, - _bufferData$metalness2 = bufferData.metalnessMapIndex, - metalnessMapIndex = _bufferData$metalness2 === void 0 ? [] : _bufferData$metalness2, - _bufferData$pbrMapSiz = bufferData.pbrMapSize, - pbrMapSize = _bufferData$pbrMapSiz === void 0 ? [] : _bufferData$pbrMapSiz; - materialBuffer.set('Materials.colorAndMaterialType[0]', interleave({ - data: (_ref = []).concat.apply(_ref, _toConsumableArray(color.map(function (d) { - return d.toArray(); - }))), - channels: 3 - }, { - data: type, - channels: 1 - })); - materialBuffer.set('Materials.roughnessMetalnessNormalScale[0]', interleave({ - data: roughness, - channels: 1 - }, { - data: metalness, - channels: 1 - }, { - data: (_ref2 = []).concat.apply(_ref2, _toConsumableArray(normalScale.map(function (d) { - return d.toArray(); - }))), - channels: 2 - })); - materialBuffer.set('Materials.diffuseNormalRoughnessMetalnessMapIndex[0]', interleave({ - data: diffuseMapIndex, - channels: 1 - }, { - data: normalMapIndex, - channels: 1 - }, { - data: roughnessMapIndex, - channels: 1 - }, { - data: metalnessMapIndex, - channels: 1 - })); - materialBuffer.set('Materials.diffuseNormalMapSize[0]', interleave({ - data: diffuseMapSize, - channels: 2 - }, { - data: normalMapSize, - channels: 2 - })); - materialBuffer.set('Materials.pbrMapSize[0]', pbrMapSize); - materialBuffer.bind(0); - } - function interleave() { - for (var _len = arguments.length, arrays = new Array(_len), _key = 0; _key < _len; _key++) { - arrays[_key] = arguments[_key]; + return combined; } - var maxLength = arrays.reduce(function (m, a) { - return Math.max(m, a.data.length / a.channels); - }, 0); - var interleaved = []; - - for (var i = 0; i < maxLength; i++) { - for (var j = 0; j < arrays.length; j++) { - var _arrays$j = arrays[j], - data = _arrays$j.data, - channels = _arrays$j.channels; - - for (var c = 0; c < channels; c++) { - interleaved.push(data[i * channels + c]); - } + function restart() { + for (var _i2 = 0, _strataObjs2 = strataObjs; _i2 < _strataObjs2.length; _i2++) { + var strata = _strataObjs2[_i2]; + strata.restart(); } } - return interleaved; + return { + next: next, + restart: restart, + strataCount: strataCount + }; } function makeRayTracePass(gl, _ref) { var bounces = _ref.bounces, + decomposedScene = _ref.decomposedScene, fullscreenQuad = _ref.fullscreenQuad, - optionalExtensions = _ref.optionalExtensions, - scene = _ref.scene; + materialBuffer = _ref.materialBuffer, + mergedMesh = _ref.mergedMesh, + optionalExtensions = _ref.optionalExtensions; bounces = clamp(bounces, 1, 6); var samplingDimensions = []; @@ -2164,11 +2568,13 @@ var samples; var renderPass = makeRenderPassFromScene({ bounces: bounces, + decomposedScene: decomposedScene, fullscreenQuad: fullscreenQuad, gl: gl, + materialBuffer: materialBuffer, + mergedMesh: mergedMesh, optionalExtensions: optionalExtensions, - samplingDimensions: samplingDimensions, - scene: scene + samplingDimensions: samplingDimensions }); function setSize(width, height) { @@ -2179,9 +2585,9 @@ function setNoise(noiseImage) { renderPass.setTexture('noise', makeTexture(gl, { data: noiseImage, - minFilter: gl.NEAREST, - magFilter: gl.NEAREST, - storage: 'float' + wrapS: gl.REPEAT, + wrapT: gl.REPEAT, + storage: 'halfFloat' })); } @@ -2195,6 +2601,19 @@ renderPass.setUniform('jitter', x, y); } + function setGBuffers(_ref2) { + var position = _ref2.position, + normal = _ref2.normal, + faceNormal = _ref2.faceNormal, + color = _ref2.color, + matProps = _ref2.matProps; + renderPass.setTexture('gPosition', position); + renderPass.setTexture('gNormal', normal); + renderPass.setTexture('gFaceNormal', faceNormal); + renderPass.setTexture('gColor', color); + renderPass.setTexture('gMatProps', matProps); + } + function nextSeed() { renderPass.setUniform('stratifiedSamples[0]', samples.next()); } @@ -2230,128 +2649,58 @@ outputLocs: renderPass.outputLocs, setCamera: setCamera, setJitter: setJitter, + setGBuffers: setGBuffers, setNoise: setNoise, setSize: setSize, setStrataCount: setStrataCount }; } - function makeRenderPassFromScene(_ref2) { - var bounces = _ref2.bounces, - fullscreenQuad = _ref2.fullscreenQuad, - gl = _ref2.gl, - optionalExtensions = _ref2.optionalExtensions, - samplingDimensions = _ref2.samplingDimensions, - scene = _ref2.scene; + function makeRenderPassFromScene(_ref3) { + var bounces = _ref3.bounces, + decomposedScene = _ref3.decomposedScene, + fullscreenQuad = _ref3.fullscreenQuad, + gl = _ref3.gl, + materialBuffer = _ref3.materialBuffer, + mergedMesh = _ref3.mergedMesh, + optionalExtensions = _ref3.optionalExtensions, + samplingDimensions = _ref3.samplingDimensions; var OES_texture_float_linear = optionalExtensions.OES_texture_float_linear; - - var _decomposeScene = decomposeScene(scene), - meshes = _decomposeScene.meshes, - directionalLights = _decomposeScene.directionalLights, - ambientLights = _decomposeScene.ambientLights, - environmentLights = _decomposeScene.environmentLights; - - if (meshes.length === 0) { - throw 'RayTracingRenderer: Scene contains no renderable meshes.'; - } // merge meshes in scene to a single, static geometry - - - var _mergeMeshesToGeometr = mergeMeshesToGeometry(meshes), - geometry = _mergeMeshesToGeometr.geometry, - materials = _mergeMeshesToGeometr.materials, - materialIndices = _mergeMeshesToGeometr.materialIndices; // extract textures shared by meshes in scene - - - var maps = getTexturesFromMaterials(materials, ['map', 'normalMap']); - var pbrMap = mergeTexturesFromMaterials(materials, ['roughnessMap', 'metalnessMap']); // create bounding volume hierarchy from a static scene - - var bvh = bvhAccel(geometry, materialIndices); + var background = decomposedScene.background, + directionalLights = decomposedScene.directionalLights, + ambientLights = decomposedScene.ambientLights, + environmentLights = decomposedScene.environmentLights; + var geometry = mergedMesh.geometry, + materials = mergedMesh.materials, + materialIndices = mergedMesh.materialIndices; // create bounding volume hierarchy from a static scene + + var bvh = bvhAccel(geometry); var flattenedBvh = flattenBvh(bvh); var numTris = geometry.index.count / 3; - var useGlass = materials.some(function (m) { - return m.transparent; - }); - var useShadowCatcher = materials.some(function (m) { - return m.shadowCatcher; - }); var renderPass = makeRenderPass(gl, { - defines: { + defines: _objectSpread2({ OES_texture_float_linear: OES_texture_float_linear, BVH_COLUMNS: textureDimensionsFromArray(flattenedBvh.count).columnsLog, INDEX_COLUMNS: textureDimensionsFromArray(numTris).columnsLog, VERTEX_COLUMNS: textureDimensionsFromArray(geometry.attributes.position.count).columnsLog, STACK_SIZE: flattenedBvh.maxDepth, - NUM_TRIS: numTris, - NUM_MATERIALS: materials.length, - NUM_DIFFUSE_MAPS: maps.map.textures.length, - NUM_NORMAL_MAPS: maps.normalMap.textures.length, - NUM_DIFFUSE_NORMAL_MAPS: Math.max(maps.map.textures.length, maps.normalMap.textures.length), - NUM_PBR_MAPS: pbrMap.textures.length, BOUNCES: bounces, - USE_GLASS: useGlass, - USE_SHADOW_CATCHER: useShadowCatcher, + USE_GLASS: materials.some(function (m) { + return m.transparent; + }), + USE_SHADOW_CATCHER: materials.some(function (m) { + return m.shadowCatcher; + }), SAMPLING_DIMENSIONS: samplingDimensions.reduce(function (a, b) { return a + b; }) - }, - fragment: fragment, + }, materialBuffer.defines), + fragment: fragment$1, vertex: fullscreenQuad.vertexShader }); - var bufferData = {}; - bufferData.color = materials.map(function (m) { - return m.color; - }); - bufferData.roughness = materials.map(function (m) { - return m.roughness; - }); - bufferData.metalness = materials.map(function (m) { - return m.metalness; - }); - bufferData.normalScale = materials.map(function (m) { - return m.normalScale; - }); - bufferData.type = materials.map(function (m) { - if (m.shadowCatcher) { - return ShadowCatcherMaterial; - } - - if (m.transparent) { - return m.solid ? ThickMaterial : ThinMaterial; - } - }); - - if (maps.map.textures.length > 0) { - var _makeTextureArray = makeTextureArray$1(gl, maps.map.textures, true), - relativeSizes = _makeTextureArray.relativeSizes, - texture = _makeTextureArray.texture; - - renderPass.setTexture('diffuseMap', texture); - bufferData.diffuseMapSize = relativeSizes; - bufferData.diffuseMapIndex = maps.map.indices; - } - - if (maps.normalMap.textures.length > 0) { - var _makeTextureArray2 = makeTextureArray$1(gl, maps.normalMap.textures, false), - _relativeSizes = _makeTextureArray2.relativeSizes, - _texture = _makeTextureArray2.texture; - - renderPass.setTexture('normalMap', _texture); - bufferData.normalMapSize = _relativeSizes; - bufferData.normalMapIndex = maps.normalMap.indices; - } - - if (pbrMap.textures.length > 0) { - var _makeTextureArray3 = makeTextureArray$1(gl, pbrMap.textures, false), - _relativeSizes2 = _makeTextureArray3.relativeSizes, - _texture2 = _makeTextureArray3.texture; - - renderPass.setTexture('pbrMap', _texture2); - bufferData.pbrMapSize = _relativeSizes2; - bufferData.roughnessMapIndex = pbrMap.indices.roughnessMap; - bufferData.metalnessMapIndex = pbrMap.indices.metalnessMap; - } - - uploadBuffers(gl, renderPass.program, bufferData); + renderPass.setTexture('diffuseMap', materialBuffer.textures.diffuseMap); + renderPass.setTexture('normalMap', materialBuffer.textures.normalMap); + renderPass.setTexture('pbrMap', materialBuffer.textures.pbrMap); renderPass.setTexture('positions', makeDataTexture(gl, geometry.getAttribute('position').array, 3)); renderPass.setTexture('normals', makeDataTexture(gl, geometry.getAttribute('normal').array, 3)); renderPass.setTexture('uvs', makeDataTexture(gl, geometry.getAttribute('uv').array, 2)); @@ -2359,195 +2708,59 @@ var envImage = generateEnvMapFromSceneComponents(directionalLights, ambientLights, environmentLights); var envImageTextureObject = makeTexture(gl, { data: envImage.data, + storage: 'halfFloat', minFilter: OES_texture_float_linear ? gl.LINEAR : gl.NEAREST, - magFilter: OES_texture_float_linear ? gl.LINEAR : gl.NEAREST, - width: envImage.width, - height: envImage.height - }); - renderPass.setTexture('envmap', envImageTextureObject); - var backgroundImageTextureObject; - - if (scene.background) { - var backgroundImage = generateBackgroundMapFromSceneBackground(scene.background); - backgroundImageTextureObject = makeTexture(gl, { - data: backgroundImage.data, - minFilter: OES_texture_float_linear ? gl.LINEAR : gl.NEAREST, - magFilter: OES_texture_float_linear ? gl.LINEAR : gl.NEAREST, - width: backgroundImage.width, - height: backgroundImage.height - }); - } else { - backgroundImageTextureObject = envImageTextureObject; - } - - renderPass.setTexture('backgroundMap', backgroundImageTextureObject); - var distribution = envmapDistribution(envImage); - renderPass.setTexture('envmapDistribution', makeTexture(gl, { - data: distribution.data, - minFilter: gl.NEAREST, - magFilter: gl.NEAREST, - width: distribution.width, - height: distribution.height - })); - return renderPass; - } - - function decomposeScene(scene) { - var meshes = []; - var directionalLights = []; - var ambientLights = []; - var environmentLights = []; - scene.traverse(function (child) { - if (child.isMesh) { - if (!child.geometry || !child.geometry.getAttribute('position')) { - console.warn(child, 'must have a geometry property with a position attribute'); - } else if (!child.material.isMeshStandardMaterial) { - console.warn(child, 'must use MeshStandardMaterial in order to be rendered.'); - } else { - meshes.push(child); - } - } - - if (child.isDirectionalLight) { - directionalLights.push(child); - } - - if (child.isAmbientLight) { - ambientLights.push(child); - } - - if (child.isEnvironmentLight) { - if (environmentLights.length > 1) { - console.warn(environmentLights, 'only one environment light can be used per scene'); - } // Valid lights have HDR texture map in RGBEEncoding - - - if (isHDRTexture(child)) { - environmentLights.push(child); - } else { - console.warn(child, 'environment light does not use color value or map with THREE.RGBEEncoding'); - } - } - }); - return { - meshes: meshes, - directionalLights: directionalLights, - ambientLights: ambientLights, - environmentLights: environmentLights - }; - } - - function textureDimensionsFromArray(count) { - var columnsLog = Math.round(Math.log2(Math.sqrt(count))); - var columns = Math.pow(2, columnsLog); - var rows = Math.ceil(count / columns); - return { - columnsLog: columnsLog, - columns: columns, - rows: rows, - size: rows * columns - }; - } - - function makeDataTexture(gl, dataArray, channels) { - var textureDim = textureDimensionsFromArray(dataArray.length / channels); - return makeTexture(gl, { - data: padArray(dataArray, channels * textureDim.size), - minFilter: gl.NEAREST, - magFilter: gl.NEAREST, - width: textureDim.columns, - height: textureDim.rows - }); - } - - function makeTextureArray$1(gl, textures) { - var gammaCorrection = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : false; - var images = textures.map(function (t) { - return t.image; - }); - var flipY = textures.map(function (t) { - return t.flipY; - }); - - var _maxImageSize = maxImageSize(images), - maxSize = _maxImageSize.maxSize, - relativeSizes = _maxImageSize.relativeSizes; // create GL Array Texture from individual textures - - - var texture = makeTexture(gl, { - width: maxSize.width, - height: maxSize.height, - gammaCorrection: gammaCorrection, - data: images, - flipY: flipY, - channels: 3 - }); - return { - texture: texture, - relativeSizes: relativeSizes - }; - } - - function maxImageSize(images) { - var maxSize = { - width: 0, - height: 0 - }; - var _iteratorNormalCompletion = true; - var _didIteratorError = false; - var _iteratorError = undefined; - - try { - for (var _iterator = images[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) { - var image = _step.value; - maxSize.width = Math.max(maxSize.width, image.width); - maxSize.height = Math.max(maxSize.height, image.height); - } - } catch (err) { - _didIteratorError = true; - _iteratorError = err; - } finally { - try { - if (!_iteratorNormalCompletion && _iterator["return"] != null) { - _iterator["return"](); - } - } finally { - if (_didIteratorError) { - throw _iteratorError; - } - } - } - - var relativeSizes = []; - var _iteratorNormalCompletion2 = true; - var _didIteratorError2 = false; - var _iteratorError2 = undefined; - - try { - for (var _iterator2 = images[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) { - var _image = _step2.value; - relativeSizes.push(_image.width / maxSize.width); - relativeSizes.push(_image.height / maxSize.height); - } - } catch (err) { - _didIteratorError2 = true; - _iteratorError2 = err; - } finally { - try { - if (!_iteratorNormalCompletion2 && _iterator2["return"] != null) { - _iterator2["return"](); - } - } finally { - if (_didIteratorError2) { - throw _iteratorError2; - } - } + magFilter: OES_texture_float_linear ? gl.LINEAR : gl.NEAREST, + width: envImage.width, + height: envImage.height + }); + renderPass.setTexture('envmap', envImageTextureObject); + var backgroundImageTextureObject; + + if (background) { + var backgroundImage = generateBackgroundMapFromSceneBackground(background); + backgroundImageTextureObject = makeTexture(gl, { + data: backgroundImage.data, + storage: 'halfFloat', + minFilter: OES_texture_float_linear ? gl.LINEAR : gl.NEAREST, + magFilter: OES_texture_float_linear ? gl.LINEAR : gl.NEAREST, + width: backgroundImage.width, + height: backgroundImage.height + }); + } else { + backgroundImageTextureObject = envImageTextureObject; } + renderPass.setTexture('backgroundMap', backgroundImageTextureObject); + var distribution = envmapDistribution(envImage); + renderPass.setTexture('envmapDistribution', makeTexture(gl, { + data: distribution.data, + storage: 'halfFloat', + width: distribution.width, + height: distribution.height + })); + return renderPass; + } + + function textureDimensionsFromArray(count) { + var columnsLog = Math.round(Math.log2(Math.sqrt(count))); + var columns = Math.pow(2, columnsLog); + var rows = Math.ceil(count / columns); return { - maxSize: maxSize, - relativeSizes: relativeSizes + columnsLog: columnsLog, + columns: columns, + rows: rows, + size: rows * columns }; + } + + function makeDataTexture(gl, dataArray, channels) { + var textureDim = textureDimensionsFromArray(dataArray.length / channels); + return makeTexture(gl, { + data: padArray(dataArray, channels * textureDim.size), + width: textureDim.columns, + height: textureDim.rows + }); } // expand array to the given length @@ -2557,89 +2770,100 @@ return newArray; } - function isHDRTexture(texture) { - return texture.map && texture.map.image && (texture.map.encoding === THREE$1.RGBEEncoding || texture.map.encoding === THREE$1.LinearEncoding); - } - - var fragment$1 = { + var fragment$2 = { + outputs: ['light'], includes: [textureLinear], - outputs: ['color'], - source: "\n in vec2 vCoord;\n\n uniform mediump sampler2D light;\n\n uniform vec2 textureScale;\n\n // Tonemapping functions from THREE.js\n\n vec3 linear(vec3 color) {\n return color;\n }\n // https://www.cs.utah.edu/~reinhard/cdrom/\n vec3 reinhard(vec3 color) {\n return clamp(color / (vec3(1.0) + color), vec3(0.0), vec3(1.0));\n }\n // http://filmicworlds.com/blog/filmic-tonemapping-operators/\n #define uncharted2Helper(x) max(((x * (0.15 * x + 0.10 * 0.50) + 0.20 * 0.02) / (x * (0.15 * x + 0.50) + 0.20 * 0.30)) - 0.02 / 0.30, vec3(0.0))\n const vec3 uncharted2WhitePoint = 1.0 / uncharted2Helper(vec3(WHITE_POINT));\n vec3 uncharted2( vec3 color ) {\n // John Hable's filmic operator from Uncharted 2 video game\n return clamp(uncharted2Helper(color) * uncharted2WhitePoint, vec3(0.0), vec3(1.0));\n }\n // http://filmicworlds.com/blog/filmic-tonemapping-operators/\n vec3 cineon( vec3 color ) {\n // optimized filmic operator by Jim Hejl and Richard Burgess-Dawson\n color = max(vec3( 0.0 ), color - 0.004);\n return pow((color * (6.2 * color + 0.5)) / (color * (6.2 * color + 1.7) + 0.06), vec3(2.2));\n }\n // https://knarkowicz.wordpress.com/2016/01/06/aces-filmic-tone-mapping-curve/\n vec3 acesFilmic( vec3 color ) {\n return clamp((color * (2.51 * color + 0.03)) / (color * (2.43 * color + 0.59) + 0.14), vec3(0.0), vec3(1.0));\n }\n\n void main() {\n vec4 tex = texture(light, textureScale * vCoord);\n\n // alpha channel stores the number of samples progressively rendered\n // divide the sum of light by alpha to obtain average contribution of light\n\n // in addition, alpha contains a scale factor for the shadow catcher material\n // dividing by alpha normalizes the brightness of the shadow catcher to match the background envmap.\n vec3 light = tex.rgb / tex.a;\n\n light *= EXPOSURE;\n\n light = TONE_MAPPING(light);\n\n light = pow(light, vec3(1.0 / 2.2)); // gamma correction\n\n out_color = vec4(light, 1.0);\n }\n" + source: "\n in vec2 vCoord;\n\n uniform mediump sampler2D light;\n uniform mediump sampler2D position;\n uniform vec2 lightScale;\n uniform vec2 previousLightScale;\n\n uniform mediump sampler2D previousLight;\n uniform mediump sampler2D previousPosition;\n\n uniform mat4 historyCamera;\n uniform float blendAmount;\n uniform vec2 jitter;\n\n vec2 reproject(vec3 position) {\n vec4 historyCoord = historyCamera * vec4(position, 1.0);\n return 0.5 * historyCoord.xy / historyCoord.w + 0.5;\n }\n\n float getMeshId(sampler2D meshIdTex, vec2 vCoord) {\n return floor(texture(meshIdTex, vCoord).w);\n }\n\n void main() {\n vec3 currentPosition = textureLinear(position, vCoord).xyz;\n float currentMeshId = getMeshId(position, vCoord);\n\n vec4 currentLight = texture(light, lightScale * vCoord);\n\n if (currentMeshId == 0.0) {\n out_light = currentLight;\n return;\n }\n\n vec2 hCoord = reproject(currentPosition) - jitter;\n\n vec2 hSizef = previousLightScale * vec2(textureSize(previousLight, 0));\n vec2 hSizeInv = 1.0 / hSizef;\n ivec2 hSize = ivec2(hSizef);\n\n vec2 hTexelf = hCoord * hSizef - 0.5;\n ivec2 hTexel = ivec2(hTexelf);\n vec2 f = fract(hTexelf);\n\n ivec2 texel[] = ivec2[](\n hTexel + ivec2(0, 0),\n hTexel + ivec2(1, 0),\n hTexel + ivec2(0, 1),\n hTexel + ivec2(1, 1)\n );\n\n float weights[] = float[](\n (1.0 - f.x) * (1.0 - f.y),\n f.x * (1.0 - f.y),\n (1.0 - f.x) * f.y,\n f.x * f.y\n );\n\n vec4 history;\n float sum;\n\n // bilinear sampling, rejecting samples that don't have a matching mesh id\n for (int i = 0; i < 4; i++) {\n vec2 gCoord = (vec2(texel[i]) + 0.5) * hSizeInv;\n\n float histMeshId = getMeshId(previousPosition, gCoord);\n\n float isValid = histMeshId != currentMeshId || any(greaterThanEqual(texel[i], hSize)) ? 0.0 : 1.0;\n\n float weight = isValid * weights[i];\n history += weight * texelFetch(previousLight, texel[i], 0);\n sum += weight;\n }\n\n if (sum > 0.0) {\n history /= sum;\n } else {\n // If all samples of bilinear fail, try a 3x3 box filter\n hTexel = ivec2(hTexelf + 0.5);\n\n for (int x = -1; x <= 1; x++) {\n for (int y = -1; y <= 1; y++) {\n ivec2 texel = hTexel + ivec2(x, y);\n vec2 gCoord = (vec2(texel) + 0.5) * hSizeInv;\n\n float histMeshId = getMeshId(previousPosition, gCoord);\n\n float isValid = histMeshId != currentMeshId || any(greaterThanEqual(texel, hSize)) ? 0.0 : 1.0;\n\n float weight = isValid;\n vec4 h = texelFetch(previousLight, texel, 0);\n history += weight * h;\n sum += weight;\n }\n }\n history = sum > 0.0 ? history / sum : history;\n }\n\n if (history.w > MAX_SAMPLES) {\n history.xyz *= MAX_SAMPLES / history.w;\n history.w = MAX_SAMPLES;\n }\n\n out_light = blendAmount * history + currentLight;\n }\n" }; - var _toneMapFunctions; - var toneMapFunctions = (_toneMapFunctions = {}, _defineProperty(_toneMapFunctions, THREE$1.LinearToneMapping, 'linear'), _defineProperty(_toneMapFunctions, THREE$1.ReinhardToneMapping, 'reinhard'), _defineProperty(_toneMapFunctions, THREE$1.Uncharted2ToneMapping, 'uncharted2'), _defineProperty(_toneMapFunctions, THREE$1.CineonToneMapping, 'cineon'), _defineProperty(_toneMapFunctions, THREE$1.ACESFilmicToneMapping, 'acesFilmic'), _toneMapFunctions); - function makeToneMapPass(gl, params) { + function makeReprojectPass(gl, params) { var fullscreenQuad = params.fullscreenQuad, - toneMappingParams = params.toneMappingParams; // const { OES_texture_float_linear } = optionalExtensions; - - var toneMapping = toneMappingParams.toneMapping, - whitePoint = toneMappingParams.whitePoint, - exposure = toneMappingParams.exposure; + maxReprojectedSamples = params.maxReprojectedSamples; var renderPass = makeRenderPass(gl, { - gl: gl, defines: { - // OES_texture_float_linear, - TONE_MAPPING: toneMapFunctions[toneMapping] || 'linear', - WHITE_POINT: whitePoint.toExponential(), - // toExponential allows integers to be represented as GLSL floats - EXPOSURE: exposure.toExponential() + MAX_SAMPLES: maxReprojectedSamples.toFixed(1) }, vertex: fullscreenQuad.vertexShader, - fragment: fragment$1 + fragment: fragment$2 }); + var historyCamera = new THREE$1.Matrix4(); + + function setPreviousCamera(camera) { + historyCamera.multiplyMatrices(camera.projectionMatrix, camera.matrixWorldInverse); + renderPass.setUniform('historyCamera', historyCamera.elements); + } + + function setJitter(x, y) { + renderPass.setUniform('jitter', x, y); + } function draw(params) { - var light = params.light, - textureScale = params.textureScale; - renderPass.setUniform('textureScale', textureScale.x, textureScale.y); + var blendAmount = params.blendAmount, + light = params.light, + lightScale = params.lightScale, + position = params.position, + previousLight = params.previousLight, + previousLightScale = params.previousLightScale, + previousPosition = params.previousPosition; + renderPass.setUniform('blendAmount', blendAmount); + renderPass.setUniform('lightScale', lightScale.x, lightScale.y); + renderPass.setUniform('previousLightScale', previousLightScale.x, previousLightScale.y); renderPass.setTexture('light', light); + renderPass.setTexture('position', position); + renderPass.setTexture('previousLight', previousLight); + renderPass.setTexture('previousPosition', previousPosition); renderPass.useProgram(); fullscreenQuad.draw(); } return { - draw: draw + draw: draw, + setJitter: setJitter, + setPreviousCamera: setPreviousCamera }; } - function makeFramebuffer(gl, _ref) { - var attachments = _ref.attachments; - var framebuffer = gl.createFramebuffer(); - - function bind() { - gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer); - } - - function unbind() { - gl.bindFramebuffer(gl.FRAMEBUFFER, null); - } - - function init() { - bind(); - var drawBuffers = []; - - for (var location in attachments) { - location = Number(location); - - if (location === undefined) { - console.error('invalid location'); - } + var fragment$3 = { + includes: [textureLinear], + outputs: ['color'], + source: "\n in vec2 vCoord;\n\n uniform sampler2D light;\n uniform sampler2D position;\n\n uniform vec2 lightScale;\n\n // Tonemapping functions from THREE.js\n\n vec3 linear(vec3 color) {\n return color;\n }\n // https://www.cs.utah.edu/~reinhard/cdrom/\n vec3 reinhard(vec3 color) {\n return clamp(color / (vec3(1.0) + color), vec3(0.0), vec3(1.0));\n }\n // http://filmicworlds.com/blog/filmic-tonemapping-operators/\n #define uncharted2Helper(x) max(((x * (0.15 * x + 0.10 * 0.50) + 0.20 * 0.02) / (x * (0.15 * x + 0.50) + 0.20 * 0.30)) - 0.02 / 0.30, vec3(0.0))\n const vec3 uncharted2WhitePoint = 1.0 / uncharted2Helper(vec3(WHITE_POINT));\n vec3 uncharted2( vec3 color ) {\n // John Hable's filmic operator from Uncharted 2 video game\n return clamp(uncharted2Helper(color) * uncharted2WhitePoint, vec3(0.0), vec3(1.0));\n }\n // http://filmicworlds.com/blog/filmic-tonemapping-operators/\n vec3 cineon( vec3 color ) {\n // optimized filmic operator by Jim Hejl and Richard Burgess-Dawson\n color = max(vec3( 0.0 ), color - 0.004);\n return pow((color * (6.2 * color + 0.5)) / (color * (6.2 * color + 1.7) + 0.06), vec3(2.2));\n }\n // https://knarkowicz.wordpress.com/2016/01/06/aces-filmic-tone-mapping-curve/\n vec3 acesFilmic( vec3 color ) {\n return clamp((color * (2.51 * color + 0.03)) / (color * (2.43 * color + 0.59) + 0.14), vec3(0.0), vec3(1.0));\n }\n\n #ifdef EDGE_PRESERVING_UPSCALE\n vec4 getUpscaledLight(vec2 coord) {\n float meshId = texture(position, coord).w;\n\n vec2 sizef = lightScale * vec2(textureSize(position, 0));\n vec2 texelf = coord * sizef - 0.5;\n ivec2 texel = ivec2(texelf);\n vec2 f = fract(texelf);\n\n ivec2 texels[] = ivec2[](\n texel + ivec2(0, 0),\n texel + ivec2(1, 0),\n texel + ivec2(0, 1),\n texel + ivec2(1, 1)\n );\n\n float weights[] = float[](\n (1.0 - f.x) * (1.0 - f.y),\n f.x * (1.0 - f.y),\n (1.0 - f.x) * f.y,\n f.x * f.y\n );\n\n vec4 upscaledLight;\n float sum;\n for (int i = 0; i < 4; i++) {\n vec2 pCoord = (vec2(texels[i]) + 0.5) / sizef;\n float isValid = texture(position, pCoord).w == meshId ? 1.0 : 0.0;\n float weight = isValid * weights[i];\n upscaledLight += weight * texelFetch(light, texels[i], 0);\n sum += weight;\n }\n\n if (sum > 0.0) {\n upscaledLight /= sum;\n } else {\n upscaledLight = texture(light, lightScale * coord);\n }\n\n return upscaledLight;\n }\n #endif\n\n void main() {\n #ifdef EDGE_PRESERVING_UPSCALE\n vec4 upscaledLight = getUpscaledLight(vCoord);\n #else\n vec4 upscaledLight = texture(light, lightScale * vCoord);\n #endif\n\n // alpha channel stores the number of samples progressively rendered\n // divide the sum of light by alpha to obtain average contribution of light\n\n // in addition, alpha contains a scale factor for the shadow catcher material\n // dividing by alpha normalizes the brightness of the shadow catcher to match the background envmap.\n vec3 light = upscaledLight.rgb / upscaledLight.a;\n\n light *= EXPOSURE;\n\n light = TONE_MAPPING(light);\n\n light = pow(light, vec3(1.0 / 2.2)); // gamma correction\n\n out_color = vec4(light, 1.0);\n }\n" + }; - var tex = attachments[location]; - gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0 + location, tex.target, tex.texture, 0); - drawBuffers.push(gl.COLOR_ATTACHMENT0 + location); - } + var _toneMapFunctions; + var toneMapFunctions = (_toneMapFunctions = {}, _defineProperty(_toneMapFunctions, THREE$1.LinearToneMapping, 'linear'), _defineProperty(_toneMapFunctions, THREE$1.ReinhardToneMapping, 'reinhard'), _defineProperty(_toneMapFunctions, THREE$1.Uncharted2ToneMapping, 'uncharted2'), _defineProperty(_toneMapFunctions, THREE$1.CineonToneMapping, 'cineon'), _defineProperty(_toneMapFunctions, THREE$1.ACESFilmicToneMapping, 'acesFilmic'), _toneMapFunctions); + function makeToneMapPass(gl, params) { + var fullscreenQuad = params.fullscreenQuad, + toneMappingParams = params.toneMappingParams; + var renderPassConfig = { + gl: gl, + defines: { + TONE_MAPPING: toneMapFunctions[toneMappingParams.toneMapping] || 'linear', + WHITE_POINT: toneMappingParams.whitePoint.toExponential(), + // toExponential allows integers to be represented as GLSL floats + EXPOSURE: toneMappingParams.exposure.toExponential() + }, + vertex: fullscreenQuad.vertexShader, + fragment: fragment$3 + }; + renderPassConfig.defines.EDGE_PRESERVING_UPSCALE = true; + var renderPassUpscale = makeRenderPass(gl, renderPassConfig); + renderPassConfig.defines.EDGE_PRESERVING_UPSCALE = false; + var renderPassNative = makeRenderPass(gl, renderPassConfig); - gl.drawBuffers(drawBuffers); - unbind(); + function draw(params) { + var light = params.light, + lightScale = params.lightScale, + position = params.position; + var renderPass = lightScale.x !== 1 && lightScale.y !== 1 ? renderPassUpscale : renderPassNative; + renderPass.setUniform('lightScale', lightScale.x, lightScale.y); + renderPass.setTexture('light', light); + renderPass.setTexture('position', position); + renderPass.useProgram(); + fullscreenQuad.draw(); } - init(); return { - attachments: attachments, - bind: bind, - unbind: unbind + draw: draw }; } @@ -2763,58 +2987,6 @@ } } - var fragment$2 = { - outputs: ['light'], - source: "\n in vec2 vCoord;\n\n uniform mediump sampler2D light;\n uniform mediump sampler2D position;\n uniform vec2 textureScale;\n\n uniform mediump sampler2D previousLight;\n uniform mediump sampler2D previousPosition;\n uniform vec2 previousTextureScale;\n\n uniform mat4 historyCamera;\n uniform float blendAmount;\n uniform vec2 jitter;\n\n vec2 reproject(vec3 position) {\n vec4 historyCoord = historyCamera * vec4(position, 1.0);\n return 0.5 * historyCoord.xy / historyCoord.w + 0.5;\n }\n\n void main() {\n vec2 scaledCoord = textureScale * vCoord;\n\n vec4 positionTex = texture(position, scaledCoord);\n vec4 lightTex = texture(light, scaledCoord);\n\n vec3 currentPosition = positionTex.xyz;\n float currentMeshId = positionTex.w;\n\n vec2 hCoord = reproject(currentPosition) - jitter;\n\n vec2 hSizef = previousTextureScale * vec2(textureSize(previousPosition, 0));\n ivec2 hSize = ivec2(hSizef);\n\n vec2 hTexelf = hCoord * hSizef - 0.5;\n ivec2 hTexel = ivec2(hTexelf);\n vec2 f = fract(hTexelf);\n\n ivec2 texel[] = ivec2[](\n hTexel + ivec2(0, 0),\n hTexel + ivec2(1, 0),\n hTexel + ivec2(0, 1),\n hTexel + ivec2(1, 1)\n );\n\n float weights[] = float[](\n (1.0 - f.x) * (1.0 - f.y),\n f.x * (1.0 - f.y),\n (1.0 - f.x) * f.y,\n f.x * f.y\n );\n\n vec4 history;\n float sum;\n\n // bilinear sampling, rejecting samples that don't have a matching mesh id\n for (int i = 0; i < 4; i++) {\n float histMeshId = texelFetch(previousPosition, texel[i], 0).w;\n\n float isValid = histMeshId != currentMeshId || any(greaterThanEqual(texel[i], hSize)) ? 0.0 : 1.0;\n // float isValid = 0.0;\n\n float weight = isValid * weights[i];\n history += weight * texelFetch(previousLight, texel[i], 0);\n sum += weight;\n }\n\n if (sum > 0.0) {\n history /= sum;\n } else {\n // If all samples of bilinear fail, try a 3x3 box filter\n hTexel = ivec2(hTexelf + 0.5);\n\n for (int x = -1; x <= 1; x++) {\n for (int y = -1; y <= 1; y++) {\n ivec2 texel = hTexel + ivec2(x, y);\n\n float histMeshId = texelFetch(previousPosition, texel, 0).w;\n\n float isValid = histMeshId != currentMeshId || any(greaterThanEqual(texel, hSize)) ? 0.0 : 1.0;\n\n float weight = isValid;\n vec4 h = texelFetch(previousLight, texel, 0);\n history += weight * h;\n sum += weight;\n }\n }\n history = sum > 0.0 ? history / sum : history;\n }\n\n if (history.w > MAX_SAMPLES) {\n history.xyz *= MAX_SAMPLES / history.w;\n history.w = MAX_SAMPLES;\n }\n\n out_light = blendAmount * history + lightTex;\n\n }\n" - }; - - function makeReprojectPass(gl, params) { - var fullscreenQuad = params.fullscreenQuad, - maxReprojectedSamples = params.maxReprojectedSamples; - var renderPass = makeRenderPass(gl, { - defines: { - MAX_SAMPLES: maxReprojectedSamples.toFixed(1) - }, - vertex: fullscreenQuad.vertexShader, - fragment: fragment$2 - }); - var historyCamera = new THREE$1.Matrix4(); - - function setPreviousCamera(camera) { - historyCamera.multiplyMatrices(camera.projectionMatrix, camera.matrixWorldInverse); - renderPass.setUniform('historyCamera', historyCamera.elements); - } - - function setJitter(x, y) { - renderPass.setUniform('jitter', x, y); - } - - function draw(params) { - var blendAmount = params.blendAmount, - light = params.light, - position = params.position, - previousLight = params.previousLight, - previousPosition = params.previousPosition, - textureScale = params.textureScale, - previousTextureScale = params.previousTextureScale; - renderPass.setUniform('blendAmount', blendAmount); - renderPass.setUniform('textureScale', textureScale.x, textureScale.y); - renderPass.setUniform('previousTextureScale', previousTextureScale.x, previousTextureScale.y); - renderPass.setTexture('light', light); - renderPass.setTexture('position', position); - renderPass.setTexture('previousLight', previousLight); - renderPass.setTexture('previousPosition', previousPosition); - renderPass.useProgram(); - fullscreenQuad.draw(); - } - - return { - draw: draw, - setJitter: setJitter, - setPreviousCamera: setPreviousCamera - }; - } - var noiseBase64 = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAEAAAABAEAAAAADfkvJBAAAbsklEQVR4nA3UhQIIvBoA0E830810M91MN9PNdDPd/ulmupluppvpZrqZbqabe89DHCiDv5GzaossZGYBp2PFIFqKdmMXIKW85edCB/RT11SD3JMQidRlL7n2ufRH1jVkFUNVc3NaZ7DP0T7/112kM1Qc3RDG0K/4uN7CPC7OmtFRZK3Jy3fhSSySKIZXopTsnIhN69JjLHJYYnfpZu44hnV+UkhG/lPd/D+fIVwWtdhhupVPJmtsLFIhjHA7UUqY4fPIQ2qdKxviqH2sugJ2nC+1ZdV0vEF3RGNcMd4KdvIXaJnujdPrKj4ifkeX2f04avjEbqO0ogI/rD7zhmy6GKG/2w32IetIX5vE9DbrS+CNy4sbmgXoiaug48lV4bVKZgluwPujd+Ioa+KjuntypepEEvl/YYCYTq6w4aaReGMShwLkC4nvq7jFKJmLpoepHJTag/h2aMklShou+tyip5wm67P2/CnvH7K6zuq+KGvy2rkkrR4mc4dpUNTEFHDId9TXQiST3RxHO0lHNgNFIA/Ub1kC0pOlNBf77EtyZ0ejxvikzySL8C8hNWyyc1GvcBCusv/otvBO3YSj+KvvRlKgoNaF/GEB64prsx8qFRwVJcRmMk8l5E5swfHMPuhlr9DmtrLeqs7KOrCMQSpeGW/zH5F2dc0AXZhcp9IthLZyuxpHrkNnp0JfnsY+55XkAtgSOvsWzps8uoJ5GtpAXRWZ5TK9cEM1WVRWC81ZUstPZHHkC7GDjZfl7BJ+VcXkI8RfVIMW0Jq95oxE0R+MDQnMX97DPhYjEXzHM0LvUNyODhdDCvJdNmXlfFp0RsbBNclTj8hpXofsCgVYsAnwPRTNTiTLxZkQW43BmK6wHk7Y0iSdXIfyK8/aQULdx1/hJc0JkRE/UgNDc/dGZWanTCs2WQ0W6Xh7PZGuDMXEaLtIRMZcZAM4ieOwO661Qf4xVyhLOOA2mLe0JyvIDrBhUA42ioUiMmrHJ9te6jwtbQ6xWrKf/ED3qKJ0qvzO2of57KkcyMBvNZndbLTX/iWNaWTezm9E8cleKOSEXK1B3LDfeGk4yx/b7L5+uAvp6UVC/UYAhvPLvSwTWm+qqO5saYjh79LadBJaAR90ct9S/GGZ7Q1zhKyTOUJ9MzT85IldVjLLduUOqovEaASJbXeZ37oFv0w/sOGhvMzpVrL/2MeQx8+ldfQU/QBXIqn8NtHAHjCzaTJk+CDS0e6Wk8N7GEDgoR4rG5M/Zig/LD6hEr6VHmxzmijoKu/oZ+p84oEeiwegquE7pBZPYXEoyLeQ66wRicLXmOzWoib6mq6KUoWxuriq62OQh647TUmn0RuuIjtPfuEkcMQtwJ/IaJabRRe9fRX2Q8Z1L2UNlMclpfMFdKYr+XkVEeb6vChZuOBfhNl+l/hly9L0/mzYIxPhBq4oimlnB273mkgwnr+S7Vnp8Fff8/3VC7IJCtqZ9AxZRnujo3wjmQ9n7WtayxwgvUhUNtJ0UjlEU9vPFhePxDLfkl6z43hhdQSW+xbyKooJEEwqTOkL1VHWc1vReFaVxbcnTGM2Uq1XNXRPos0bdtI8VBKXcZdCV1dNpLcL3DE7Cqfmi2w5JGhGFqATTUhzy7sG2+a0II4ZtupikC488mt9abdTvpYXVALXBU6wNzYLXUTPQwTxH/nNttjKDA7pQT47mopOQmxzW/f3GVhXWoguEUl5EHcUoKm8LdpiMoZV9JONpzZa7wa7hG4XzxvquHj2s5lsIrFbtrbew3+SKbiK6Ry+whAyXrTBC0kgDfwZHNOMNRnwOjHVVICdOGVo6LuFsn6GTKN6u4IeZqtN7B6vzlegD7ioW8i/u430kbtO2pABrgTPwb+xchSZ7jK/V6KxPEWK+K+oBXFmeuikt+HzrIU66KQsI9bRaGqQfKqSkMNumbnN4/ljkFsPxqnDElSF32L17D8UhxbUI8xnuwk/0znwXXcGGmD4QpPo5n6kTod70Zb2oI8Y6pFJKiuLoab7bXBEj+CXFTOH4A4kV/1JNjNRLrexaEX5Ht0xQ1RRskzmhCd+rmnFi9hLeqHe7svy7Lq+/+Mq6am+A/X8e+iptvqcbIjzqCOfbW6SpKQ22gPt8HgTFUMPd9kWgKd2O45Pr0EuOlK8waXFfriga7sXrLlKZZbrgeaPnmsrurd+n2H8hugjc+i1OCpJj2vYPyQ27+lT6/f4JM0c6sJIHwm/8AJS4tXuuo6g9qOCjvOZIrI9ZpaaauQAjwb9eTG0RMYPr2y5AHv8YhZLHvZl+DdQqrI5Z1L4QawT/FOLoQCOLR+EyTIrjcqb6YtiA4mg0/L27reYYg7JpvSVOM7G+p2uIb1iJ0hE+/DvvLW+qqfL034nLU5GQh02j8aHi/aDLS2b4ncYk/OcE+V+hhNqmF2rs1j4a1qziXYgaaDWQRetSbOwC60J8VhFSIf62k2osy7FXqpdrDAdZbuQxf5ZOCGLy6Reago9xBydmN9HBdUqX9VtUYdIKZOGbGAFxEDXjLxDmeVXsd5WIOmlhN0kqe2r84o1upy+z9KLRjY/ui5qGkhNiqoL5iXN6hPbeyGa+ckKwRM6l51Ao+EG/yKruXNsrWvHkuDPKKctS4bYRnq7eIQX+at4s8lD2ovy+D/xlXUWuf2jsNiNQx9xDRwjLAgJUSd5AvfTD80U0Qk91fP8DTkBfaXx1Qhv7FMXifZRMw0MlxtxVFVNzoOTrnjoK9ObCZy5HOwjbWgTib1kFo3BJa9t7oojdJK5RpGcifO66LQ2xuIHBvxcnMcLdEoUWc0QjVhs0k3f4dnoXvREODRB5KWJ2UFTX60WcXERxFQ7uo9mDz1YVbzQddDBHQ3QxD0MPfBnsdX+p9+xg+Sybmtum4hKoJW+CG0NGSQxP/TC0AulZ1tozfATr9Ld/QfURp1kg2FqaOQ2QBZ9JNyCoeQfO0eS+SOCa0lLshW6hnulWqHi/qrMTj6Z03gzB/LMzuaXmZXJSUm7nSKACjQDVzafbiNTqUayYpjDNpqhqIzf4SfRU/KF6S+vo0MhAS/v36BoolU4JbKQO3S3nmAL88puH0GoN6tF3vg2rCzscLVcUbmKzHS/dFroBdGk8bP4Hx8DRotKtJdMa4YZKhvR2OgbnULv+lzYUfjhFusD6KaLR8aHFSSPjYmT2MP6tU1L76u4uqJYrqawEqqpW+Onm4G6KIw2CU0Z29/EIc9gKVwjH3wxNV5v8fmxVunIGB94PxYBV+I3RRM4IO8x7Ab6ZXi3aoEeoUXmtzqHVrGCsrUYpOvIFXSMgX4YQp1Qmp6xf/Ae8gR1U19NUzEdSOjApK9nPuoItqt5HE7TXPIm3sff2fm+SbioN9GcPLltyTLKeeGBjGr668sYsfuymdjM8uHjYqL5BLn4SFqRdjbnZJKgyFHIA51lEjEebtEMfqN7LlORlgreiM3B26G2g82iqssbZBQq6k+rGn5J+MMvsVRus95vMpFR9K9K4errLmJFSMO/iepoBu6CfptR4QzqxpOYH6ERP4xmqS4uKzz3V2RS0SnMNwnYKvdW5Bd16FdS0kWlDeQ2VIMEJtgeVJ7GZIdDYQldWQ6UVK2mM1l000/MRyn5GpGZDkRbQ1RUCs/HLcMDV4hV1/OkEZFpRX+f5zfSHGQR7W2obdeiMnK3qQarTK7wEiq5vTqWXayqhyF4By5l6+HDPKK4AZtVRnoHjVBv8Syd1VocyY2UP9g8c15PpXBNVIET8MnVd8/oNlaGcnZJBZoQ7uAe4SjJAWNdX3AkNrQTQ+ClmMxO23i4nXseStC+4agkPDYeChdcOzLRJ2f/2S+ukJqsW/tvKoN4bP5/sOpHxuN5qC3p5VbaizIefWBKkKWkCc+DO5paPAHAP7wQj+VFRVp/zhPy3Ufw+8I4VsE1QVPtS1ZLf6eJ5Qr3Se3GxfURld71EhvEHJXVbLdJzUL/2nk6nX1mGcxdXUpvIg2gt7rADrkoYq0ogKbYXyK1pOwljuEO0rykAh5k2pMp6hR7rVO7h3IY2Y6gOYpsBqhWfp/sQcbbZa6m7uge0dx8pUgjd9GY5CyUldNEXX3L5JRLaHP2G5UhDtfnn8Qk3sak8Y1dUR5BatyTnyTR2PWwnCVCZe09NdwLG8tpvl3nJCd8dfzPNFMp1Wb4YuuihKIPWkP2k5I0o4OVJB96wDby2Oy2TAwv9VAxh8dFJ9EvU1S390Pdekx8d0jrxgik35GaLDoeZR7ZhH4IqyzO+/WiNzkkGNrOm8MvN4dmom9kbtuCzgy14K097SrhJuoeDEMJ7CI5Tjwn+3AmfjkUQpXUTR+DzdDPKVRgh23w1c0MUoI1EYchky6st4hefmS4bhZhr5vJ9/QYfUpbywukv9iib4S8msMqOE6iqH86px6L3oubJike6fJBB1ODDTZb6V+fAvapLL6DTGQ+2hm2k1svL8litoeKxZaRIXq2/U3HsDb6ghQBJqP4OB29iP4Lv/FaVZlctV9QM5tC1UGRbCWRBSfQs/UOFAGtlhX8VJJMLTD7VQY6HRU23ehdXAYlJHN5FlkRvXQHdDzx2I8Lx1A3sxTd8MXdOjVKH4BCOp2pIx6zrHwar6qO6uYB3FaXXdYNycNXCUNlY9TFLwq5SFuemg60UdhieVa8hml4v/2sHOsDNV1JGM5zmx/U2qKhk/lq+7jXaCuuYxaTPba1OuMHhY16GiuJVonzKBUtjEDVtwPxJP+cXUaRfD/1w5zS0Ulr9DXcQPnIK39Xdgkn+WJahGzGkI1cda/xFhfNn6KP1R7c2Y4JZSBnWK26kkJhs51E/tGk8m5oInvSjOI5risjuorqlI8X0oZh+JmKQeuhn7KLjKmvmd6iCVnIKtMH5KOM6zGu5nP5hmixMLo8Ge0P6jWyD0ukR7F0lqIPEMc/gv0OIsqZvCSug8eZ964gnYXr+LsqPmojHrG0apiIzg6TtkyHc7BHIDzTXuL/yQ38Dhsnm5OPfCorYK/LFTKPOU4xr+m/6WzydVCmPWwM5+UuN9e1Ce/8TRbfdJVzbCrWQJTUO+R8V5Ouh6m6T2jpqllYDfew5Ylcb1teraRxUFb8xxp6zFWH+eqtbIhzomc+DRunqvv3doVoKfOEJGoRKilzmAt4B69k+0FyN0m2ED5ss6NkNLTbn1LDAmHU/QDBj5oU8j9cxLxi2dUd+z5E8RfNT9NUHvApzRU/Bv1R0MEPlER9Nzuhpb/lhmsLxUJfP8EkYWdUCbyW3QzlbTco4AfhKEDNUfeY7pLt8U/a063mUaGD+4wtofwtmo0L2WWqlSxHErH0aDltYsbwqHqNq2CnuJ3qdKjJh/hlYYrsKLKwwTy2eOnzyrIMB1A0rmhiNc3Iz9tkvJt44ZqhJQ70F+jhW8CIgNQuO49/Q8bcJ5NxWlaVj6Yx/VVIZWeY2uK+zuw3hSEhIu2hE5NLfiC9p//I7vq6i6+fioJwF2Uyf2lzHoGt521FPlUJrH+AioQzvJtcJnaGEwHewSXxGFExyX7y81hVsQGng6shr9lG74TM5KdX/LyLIevpKyin6sz/Qj/0MjTQh2g594Yct6NVPL5QNUC3QlX/RR3hOXE9th5Nhf2hBswWfdVZVJsvMQNoGnOVfvNx6Qudgo9Ra/hMVJV8wdF1XQwFSYqwzgxjkVQ9kS+cZjHEhzAK6qMKYlZIjg+ZGqIvykCWBy4T0dlkBykCq33WsIAOAoJaQjH/V5w1uekes5plQOPRfBuTFmGvWRueVX9VW2V7GcccoE90CTSW7cXzaU+9hdflUeUTkk001/PDCAnbTRXb2h4jPeCZ2O0Gh1JuOu2M97PnZjBd6QrJDuqBL60+kuH4BK+Fo8uzLjmaoO4Z4DvsCpZM9DJtlWKvUEnVmTVVj/SOUFmOxBHCZV7CJJETIKA8rIuZKavxzKaxvQSlxD/exg9g130ifoH20pBJPKAz2F+bwyVUq2Qrd98mshdVNhVTtjJXSFx4wzegSfhAKECfcY1u4Wamu3pPqogO+Fu4bifDU1MZRfepxAh8EeLYn0i4Ey6NWwYD4Yhp6hfK8uiGimFPubcsYXiI/nO58QmN5V4+zm1kpdl3AtoeFLF0MT0Wbqk5KJ37rmqFTWYR+4vLsGN4BM3uGoYUJgLv5irINGiw+upKhA3qOIxkiQjVGfR+uo7dRAv4B1WLbqApcD472903Hz2T6/0jmR6G0xWmEWz2g3U7uYZF1FNgKX7PK5p85lXoGMBAMzzA17Kb+EnZmFfk/eghNI4W9r1pGjGZ14YvbIHcHQbYy/Cbb0FTcW61x83ySGRGjc0SOC/qqKE+p28MfV0hfJhNV0P4VdGQdICcYrKPz/Lb306IfSKl+66z83LiKPokGeuq4pI5oqFMzY6FSQC50RXxgifnnckXEUfkZS9kFNJCn0b38Q4aWXRRt2Rl/pLMkll4fdwuPNaRXW11xT1lBdE2KfBblwAdDz/dNhIJtSZZzFtdWq+BqHZPKB8ukbZwCkf0Ne19X1hMFAvsLZIWFyPGnTe36TC9Ej8U5Tkk8J/0Ai9JpnCJ7iLz+VWzFqqEdyaXGqSWk8I4vYovWonifKW2Iok7p8boFaozGsinis86MpknWoeJoazD4OW5UEXvcxNoUvdDdDdP5Ag7V2xypbHy/eGcjY56yF2qGQwUz1xSaE2jit++h9mpYZpqYwuYyrAGT+QlXDsjVSrUXcwiiaCxfsYOm2lmszyrh4tY/LbrY9+GQqK8+SdSyYO2qsmqbvEi+old7nrCaL1Ed7Gx8B05gJ82C1FGFds3FM9tDvUJa9E4vNJVZTLzy89i2dg4sLQmFMGZ8TkH61lUf4Q94D1xRPTYMZst/IK9vjhskJdJeTdKfXNMdOfvVR5eDS3STUlGczIYHEvdhxZ2LR1ud/NYpqYIMqEs7P6yTbIpz8eru61QjH4mg1AybF17mgESqAN4PRnl8uvTsBpT9SlsJ4tgBKtjIZXua36TRmirSIo+iqX8FIol7pKx5CNEox1EdpGC3WWR5C4/Qf+wm3Rc9Z+fhdraPGi8KsWdT0Y7idMylzVwldSXGf1MeGZSiFGe+1tin67kr6ixag26TYYaSi771i5ueEjr+U4+neqPY6H37KaEFzBGFqfpuZIXUEsyIJST01xd2walDwvtGd0Xr7al/ALSXKbRNHSh1/xe9cHVDs+1hv7ul6xPX5ppZAjlZm446vuIsuiiW+rf8Yhmil+Bc0N3Ej3UxAXcTzWdZxEhaN3HRJaX5VMyyR3jLXxZDTnkbrsM3cA1eD52UGL2imx3xA7FB2wN+c9Opo3UG3rZDeIn9Wz2kCfTRVwEesH2oCn0MRHFzZWZcHm4y8GmVp/4BBzd7pXZbBd+3Kehjfw/N0duh2e4hTmuouCuvjrbo4uZaX5DqOyT+PxsJXTBMIOfstFd2/BF/8fnyximG1rFk/Bb6AWOywqHHSYhPhjy0zjuOWSndcUAMwVVtGtDZrFT1FCF+Bboxaz+wYujXVBNPSRt3TBel3xHhVk/9xASyFLqjEhr+/FFxMh7YiKktkftn5CDNDW7xTd7kcU1MJRWMm9Vb55YbVIl5D36BxqFk6osFmqjl8GTjLp7qCnHWMPa24NoufkdWuo7+j/zxUx0N+hbaBqQW6VGia52kcsnkb1p1/I5vgo26CIertrZgMfT8jqxrkeJfAMtwmAWX95Uo/g814vXll5BStHMzzG50EN8RE4g1WgWNNwtUpG10jl8S1zZvvfT7Urzi5eCKOEtweoMJWKejoFKoTY0TliqpCCU+WsqI7ywhpzipVFyeKKikfE+o63t11qguWAP/Wau6OEQE52l5dkq3BGeqwimFMnktyn4J4uoS3aNakAj8XbqStjpC/nXpL354q/zo3SxATjjuEtpr7H5uiodjVHoivbLhvoxnCDdMdZn/RMz0x/k0UIz3lv/EdN0K3pYdrO72VeeH24La2aqJ7wjWeFLhjlus/jC89FaKC05oN6biWqpgGjYshGQTpdTP8ggEQ9mkuTmgqglsFkrE4UBUNreIbnEMHcE9xRN8P2wlZTjr0xKv1HOEvn531ApJFLt1WdXRk/UKSyjmdxIkke903Ftc7EEC1PVDiaNfToRT/c2j0km6I6mKqcW44GqobuOOyp4goU26hWewpfxE/QZaoo2+L50vx5N8rmG/IefiDeJeuqDiAUFwjqeWX3VU11fdoFn04N9PVhNJoSdZoDMztbZ42YhfaMvueW4Irkmp+sS+hlJLmL5y6aI2KYvhGr6kG1kopid1vuiNlY4aXO5KhJmmTo8AWmF8/qUugcq5rLxb7gCiunu2jnQhZ2C2CGD6gw71CMzw13kQ0xEVogsZdVtHHjLD4j7LiIvxpxswLwYRguoCG6H7isSi/qwwQ0Rp8U4/IeuNq/oSDsDfto8dJx9ExJJyVqwX3S9Hi2TazjLCsNtu1984NXMdnbPLbaTdCv1Xpf02+UTqMZe8QWquBlDKoeEtp3e6+qTa7gV+SnG+VIhOeWop/0g56o0EFf+QC1wOdwRPyJH1U/AvgPJYffZMqEtzo4jhfoiKdOyrT7uqqA1NIvricqK3ei1gBW8DwE5zM8Jl3CCUC8MRpH0EbscEoihOptLBntDP+/CH5RWLkfvQhn1TCahR/w201XcYEvUGZbJbnajXRWyh/Xgt/TqkIBOcEXkPBsZHtiaaKlMbWbDSdGf7ab3aSl51fe3qf3nMM3e9vF5W5/BwQT/21ZQ611W2YGPtb8hHbuuiBP+nG6Op6HVqJUlEMUexs1YH5qbTBILRCY2nORVUeh0V1X/hwrwJuy5u2KWupx0Bj1NXtBsuKkezra58+Ez9NGN1R3x0VRindg7mRGZMA8XNOd4jXCIL+IfXYMAN3RSbVUT+oTFdmfMOl1R72SvPQtpwl95zZUxn+g9MtnVMOvDbXVcRnOd+Hr6iDcWH0g6/xRvD99FYtwJR/YlbD05AmFUneyl71x3W17k8xNRMrnJR1djaUGxlsThY6ARjgBPUSc7kkeH/GQIKilgG+8KRCv8mVLcW+Z300I7NBzNJ0XZZhSR1OPSLmHdMOJF8Wf5HzD9K5zFFXG/sFIewu1RPFSOrULH1JTwUR1UMdUvNQAv5jHwTb3KxuWt8StXkuz3mfklNIcc0z3DPyhn9opkrClsVI/xqRBbwytYQq7gQTYNXi4bmGPyjk+CYuiHfj8fp3vDMZ+QZSRvzW6Yq7OilGQHFMfx3GyZXBa2DMa7S2YeuWeHyMy6p3lo29LNtDR3rq5Ljf+RI2guPkcHy9rkF2mJEvvqNI+4jRUs50FfgWy+u5uDaynIAq15dF4tPIB9KIp8L7PDUv1NVoWWJht6iQrIdfgcLu05vsbHBkGc5mECeyC2spv8F4rG++C80ICkoNXwOlIwXEOJzSyX23UIU0h/mklVoY9lfNdVL/E36VD20u4QbVxm6GeKyfGkEvrFUqPR/H9s/XjiBWp1EAAAAABJRU5ErkJggg=='; function makeRenderingPipeline(_ref) { @@ -2829,10 +3001,16 @@ // higher number results in faster convergence over time, but with lower quality initial samples var strataCount = 6; + var decomposedScene = decomposeScene(scene); + var mergedMesh = mergeMeshesToGeometry(decomposedScene.meshes); + var materialBuffer = makeMaterialBuffer(gl, mergedMesh.materials); var fullscreenQuad = makeFullscreenQuad(gl); var rayTracePass = makeRayTracePass(gl, { bounces: bounces, + decomposedScene: decomposedScene, fullscreenQuad: fullscreenQuad, + materialBuffer: materialBuffer, + mergedMesh: mergedMesh, optionalExtensions: optionalExtensions, scene: scene }); @@ -2842,12 +3020,14 @@ }); var toneMapPass = makeToneMapPass(gl, { fullscreenQuad: fullscreenQuad, - optionalExtensions: optionalExtensions, toneMappingParams: toneMappingParams + }); + var gBufferPass = makeGBufferPass(gl, { + materialBuffer: materialBuffer, + mergedMesh: mergedMesh }); // used to sample only a portion of the scene to the HDR Buffer to prevent the GPU from locking up from excessive computation var tileRender = makeTileRender(gl); - var clearToBlack = new Float32Array([0, 0, 0, 0]); var ready = false; var noiseImage = new Image(); noiseImage.src = noiseBase64; @@ -2857,6 +3037,13 @@ ready = true; }; + var sampleCount = 0; + + var sampleRenderedCallback = function sampleRenderedCallback() {}; + + var lastCamera = new THREE$1.PerspectiveCamera(); + lastCamera.position.set(1, 1, 1); + lastCamera.updateMatrixWorld(); var screenWidth = 0; var screenHeight = 0; var previewWidth = 0; @@ -2867,36 +3054,36 @@ var hdrBackBuffer; var reprojectBuffer; var reprojectBackBuffer; - var lastToneMappedScale; + var gBuffer; + var gBufferBack; var lastToneMappedTexture; - var lastCamera = new THREE$1.PerspectiveCamera(); - lastCamera.position.set(1, 1, 1); - lastCamera.updateMatrixWorld(); - var sampleCount = 0; - - var sampleRenderedCallback = function sampleRenderedCallback() {}; + var lastToneMappedScale; function initFrameBuffers(width, height) { - var floatTex = function floatTex() { - return makeTexture(gl, { - width: width, - height: height, - storage: 'float' - }); - }; - var makeHdrBuffer = function makeHdrBuffer() { - var _attachments; - return makeFramebuffer(gl, { - attachments: (_attachments = {}, _defineProperty(_attachments, rayTracePass.outputLocs.light, floatTex()), _defineProperty(_attachments, rayTracePass.outputLocs.position, floatTex()), _attachments) + color: { + 0: makeTexture(gl, { + width: width, + height: height, + storage: 'float', + magFilter: gl.LINEAR, + minFilter: gl.LINEAR + }) + } }); }; var makeReprojectBuffer = function makeReprojectBuffer() { return makeFramebuffer(gl, { - attachments: { - 0: floatTex() + color: { + 0: makeTexture(gl, { + width: width, + height: height, + storage: 'float', + magFilter: gl.LINEAR, + minFilter: gl.LINEAR + }) } }); }; @@ -2905,8 +3092,47 @@ hdrBackBuffer = makeHdrBuffer(); reprojectBuffer = makeReprojectBuffer(); reprojectBackBuffer = makeReprojectBuffer(); + var normalBuffer = makeTexture(gl, { + width: width, + height: height, + storage: 'halfFloat' + }); + var faceNormalBuffer = makeTexture(gl, { + width: width, + height: height, + storage: 'halfFloat' + }); + var colorBuffer = makeTexture(gl, { + width: width, + height: height, + storage: 'byte', + channels: 3 + }); + var matProps = makeTexture(gl, { + width: width, + height: height, + storage: 'byte', + channels: 2 + }); + var depthTarget = makeDepthTarget(gl, width, height); + + var makeGBuffer = function makeGBuffer() { + var _color; + + return makeFramebuffer(gl, { + color: (_color = {}, _defineProperty(_color, gBufferPass.outputLocs.position, makeTexture(gl, { + width: width, + height: height, + storage: 'float' + })), _defineProperty(_color, gBufferPass.outputLocs.normal, normalBuffer), _defineProperty(_color, gBufferPass.outputLocs.faceNormal, faceNormalBuffer), _defineProperty(_color, gBufferPass.outputLocs.color, colorBuffer), _defineProperty(_color, gBufferPass.outputLocs.matProps, matProps), _color), + depth: depthTarget + }); + }; + + gBuffer = makeGBuffer(); + gBufferBack = makeGBuffer(); + lastToneMappedTexture = hdrBuffer.color[rayTracePass.outputLocs.light]; lastToneMappedScale = fullscreenScale; - lastToneMappedTexture = hdrBuffer.attachments[rayTracePass.outputLocs.light]; } function swapReprojectBuffer() { @@ -2915,6 +3141,12 @@ reprojectBackBuffer = temp; } + function swapGBuffer() { + var temp = gBuffer; + gBuffer = gBufferBack; + gBufferBack = temp; + } + function swapHdrBuffer() { var temp = hdrBuffer; hdrBuffer = hdrBackBuffer; @@ -2924,8 +3156,9 @@ function swapBuffers() { - swapHdrBuffer(); swapReprojectBuffer(); + swapGBuffer(); + swapHdrBuffer(); } function setSize(w, h) { @@ -2948,6 +3181,24 @@ return numberArraysEqual(cam1.matrixWorld.elements, cam2.matrixWorld.elements) && cam1.aspect === cam2.aspect && cam1.fov === cam2.fov && cam1.focus === cam2.focus; } + function updateSeed(width, height) { + var useJitter = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : true; + rayTracePass.setSize(width, height); + var jitterX = useJitter ? (Math.random() - 0.5) / width : 0; + var jitterY = useJitter ? (Math.random() - 0.5) / height : 0; + gBufferPass.setJitter(jitterX, jitterY); + rayTracePass.setJitter(jitterX, jitterY); + reprojectPass.setJitter(jitterX, jitterY); + + if (sampleCount === 0) { + rayTracePass.setStrataCount(1); + } else if (sampleCount === numUniformSamples) { + rayTracePass.setStrataCount(strataCount); + } else { + rayTracePass.nextSeed(); + } + } + function clearBuffer(buffer) { buffer.bind(); gl.clear(gl.COLOR_BUFFER_BIT); @@ -2959,7 +3210,6 @@ gl.blendEquation(gl.FUNC_ADD); gl.blendFunc(gl.ONE, gl.ONE); gl.enable(gl.BLEND); - gl.clearBufferfv(gl.COLOR, rayTracePass.outputLocs.position, clearToBlack); gl.viewport(0, 0, width, height); rayTracePass.draw(); gl.disable(gl.BLEND); @@ -2973,14 +3223,30 @@ buffer.unbind(); } - function toneMapToScreen(lightTexture, textureScale) { + function toneMapToScreen(lightTexture, lightScale) { gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight); toneMapPass.draw({ light: lightTexture, - textureScale: textureScale + lightScale: lightScale, + position: gBuffer.color[gBufferPass.outputLocs.position] }); lastToneMappedTexture = lightTexture; - lastToneMappedScale = textureScale; + lastToneMappedScale = lightScale; + } + + function renderGBuffer() { + gBuffer.bind(); + gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT); + gl.viewport(0, 0, screenWidth, screenHeight); + gBufferPass.draw(); + gBuffer.unbind(); + rayTracePass.setGBuffers({ + position: gBuffer.color[gBufferPass.outputLocs.position], + normal: gBuffer.color[gBufferPass.outputLocs.normal], + faceNormal: gBuffer.color[gBufferPass.outputLocs.faceNormal], + color: gBuffer.color[gBufferPass.outputLocs.color], + matProps: gBuffer.color[gBufferPass.outputLocs.matProps] + }); } function renderTile(buffer, x, y, width, height) { @@ -2990,24 +3256,6 @@ gl.disable(gl.SCISSOR_TEST); } - function updateSeed(width, height) { - rayTracePass.setSize(width, height); - var jitterX = (Math.random() - 0.5) / width; - var jitterY = (Math.random() - 0.5) / height; - rayTracePass.setJitter(jitterX, jitterY); - reprojectPass.setJitter(jitterX, jitterY); - - if (sampleCount === 0) { - rayTracePass.setStrataCount(1); - } else if (sampleCount === numUniformSamples) { - rayTracePass.setStrataCount(strataCount); - } else { - rayTracePass.nextSeed(); - } - - rayTracePass.bindTextures(); - } - function drawPreview(camera, lastCamera) { if (sampleCount > 0) { swapBuffers(); @@ -3016,24 +3264,27 @@ sampleCount = 0; tileRender.reset(); setPreviewBufferDimensions(); + updateSeed(previewWidth, previewHeight, false); rayTracePass.setCamera(camera); + gBufferPass.setCamera(camera); reprojectPass.setPreviousCamera(lastCamera); lastCamera.copy(camera); - updateSeed(previewWidth, previewHeight); + renderGBuffer(); + rayTracePass.bindTextures(); newSampleToBuffer(hdrBuffer, previewWidth, previewHeight); reprojectBuffer.bind(); gl.viewport(0, 0, previewWidth, previewHeight); reprojectPass.draw({ blendAmount: 1.0, - light: hdrBuffer.attachments[rayTracePass.outputLocs.light], - position: hdrBuffer.attachments[rayTracePass.outputLocs.position], - textureScale: previewScale, + light: hdrBuffer.color[0], + lightScale: previewScale, + position: gBuffer.color[gBufferPass.outputLocs.position], previousLight: lastToneMappedTexture, - previousPosition: hdrBackBuffer.attachments[rayTracePass.outputLocs.position], - previousTextureScale: lastToneMappedScale + previousLightScale: lastToneMappedScale, + previousPosition: gBufferBack.color[gBufferPass.outputLocs.position] }); reprojectBuffer.unbind(); - toneMapToScreen(reprojectBuffer.attachments[0], previewScale); + toneMapToScreen(reprojectBuffer.color[0], previewScale); swapBuffers(); } @@ -3053,7 +3304,9 @@ reprojectPass.setPreviousCamera(lastCamera); } - updateSeed(screenWidth, screenHeight); + updateSeed(screenWidth, screenHeight, true); + renderGBuffer(); + rayTracePass.bindTextures(); } renderTile(hdrBuffer, x, y, tileWidth, tileHeight); @@ -3068,17 +3321,17 @@ gl.viewport(0, 0, screenWidth, screenHeight); reprojectPass.draw({ blendAmount: blendAmount, - light: hdrBuffer.attachments[rayTracePass.outputLocs.light], - position: hdrBuffer.attachments[rayTracePass.outputLocs.position], - textureScale: fullscreenScale, - previousLight: reprojectBackBuffer.attachments[0], - previousPosition: hdrBackBuffer.attachments[rayTracePass.outputLocs.position], - previousTextureScale: previewScale + light: hdrBuffer.color[0], + lightScale: fullscreenScale, + position: gBuffer.color[gBufferPass.outputLocs.position], + previousLight: reprojectBackBuffer.color[0], + previousLightScale: previewScale, + previousPosition: gBufferBack.color[gBufferPass.outputLocs.position] }); reprojectBuffer.unbind(); - toneMapToScreen(reprojectBuffer.attachments[0], fullscreenScale); + toneMapToScreen(reprojectBuffer.color[0], fullscreenScale); } else { - toneMapToScreen(hdrBuffer.attachments[rayTracePass.outputLocs.light], fullscreenScale); + toneMapToScreen(hdrBuffer.color[0], fullscreenScale); } sampleRenderedCallback(sampleCount); @@ -3105,6 +3358,9 @@ return; } + swapGBuffer(); + swapReprojectBuffer(); + if (sampleCount === 0) { reprojectPass.setPreviousCamera(lastCamera); } @@ -3112,29 +3368,30 @@ if (!areCamerasEqual(camera, lastCamera)) { sampleCount = 0; rayTracePass.setCamera(camera); + gBufferPass.setCamera(camera); lastCamera.copy(camera); - swapHdrBuffer(); clearBuffer(hdrBuffer); } else { sampleCount++; } - updateSeed(screenWidth, screenHeight); + updateSeed(screenWidth, screenHeight, true); + renderGBuffer(); + rayTracePass.bindTextures(); addSampleToBuffer(hdrBuffer, screenWidth, screenHeight); reprojectBuffer.bind(); gl.viewport(0, 0, screenWidth, screenHeight); reprojectPass.draw({ blendAmount: 1.0, - light: hdrBuffer.attachments[rayTracePass.outputLocs.light], - position: hdrBuffer.attachments[rayTracePass.outputLocs.position], - previousLight: reprojectBackBuffer.attachments[0], - previousPosition: hdrBackBuffer.attachments[rayTracePass.outputLocs.position], - textureScale: fullscreenScale, - previousTextureScale: fullscreenScale + light: hdrBuffer.color[0], + lightScale: fullscreenScale, + position: gBuffer.color[gBufferPass.outputLocs.position], + previousLight: lastToneMappedTexture, + previousLightScale: lastToneMappedScale, + previousPosition: gBufferBack.color[gBufferPass.outputLocs.position] }); reprojectBuffer.unbind(); - toneMapToScreen(reprojectBuffer.attachments[0], fullscreenScale); - swapReprojectBuffer(); + toneMapToScreen(reprojectBuffer.color[0], fullscreenScale); } return { @@ -3165,7 +3422,7 @@ var canvas = params.canvas || document.createElement('canvas'); var gl = canvas.getContext('webgl2', { alpha: false, - depth: false, + depth: true, stencil: false, antialias: false, powerPreference: 'high-performance', diff --git a/build/RayTracingRenderer.js b/build/RayTracingRenderer.js index ed57573..37a1b8f 100644 --- a/build/RayTracingRenderer.js +++ b/build/RayTracingRenderer.js @@ -113,112 +113,130 @@ } function getUniforms(gl, program) { - const uniforms = []; + const uniforms = {}; const count = gl.getProgramParameter(program, gl.ACTIVE_UNIFORMS); for (let i = 0; i < count; i++) { const { name, type } = gl.getActiveUniform(program, i); const location = gl.getUniformLocation(program, name); if (location) { - uniforms.push({ - name, type, location - }); + uniforms[name] = { + type, location + }; } } return uniforms; } - function makeUniformBuffer(gl, program, blockName) { - const blockIndex = gl.getUniformBlockIndex(program, blockName); - const blockSize = gl.getActiveUniformBlockParameter(program, blockIndex, gl.UNIFORM_BLOCK_DATA_SIZE); + function getAttributes(gl, program) { + const attributes = {}; - const uniforms = getUniformBlockInfo(gl, program, blockIndex); + const count = gl.getProgramParameter(program, gl.ACTIVE_ATTRIBUTES); + for (let i = 0; i < count; i++) { + const { name } = gl.getActiveAttrib(program, i); + if (name) { + attributes[name] = gl.getAttribLocation(program, name); + } + } - const buffer = gl.createBuffer(); - gl.bindBuffer(gl.UNIFORM_BUFFER, buffer); - gl.bufferData(gl.UNIFORM_BUFFER, blockSize, gl.STATIC_DRAW); + return attributes; + } - const data = new DataView(new ArrayBuffer(blockSize)); + function decomposeScene(scene) { + const meshes = []; + const directionalLights = []; + const ambientLights = []; + const environmentLights = []; - function set(name, value) { - if (!uniforms[name]) { - // console.warn('No uniform property with name ', name); - return; + scene.traverse(child => { + if (child.isMesh) { + if (!child.geometry || !child.geometry.getAttribute('position')) { + console.warn(child, 'must have a geometry property with a position attribute'); + } + else if (!(child.material.isMeshStandardMaterial)) { + console.warn(child, 'must use MeshStandardMaterial in order to be rendered.'); + } else { + meshes.push(child); + } } - - const { type, size, offset, stride } = uniforms[name]; - - switch(type) { - case gl.FLOAT: - setData(data, 'setFloat32', size, offset, stride, 1, value); - break; - case gl.FLOAT_VEC2: - setData(data, 'setFloat32', size, offset, stride, 2, value); - break; - case gl.FLOAT_VEC3: - setData(data, 'setFloat32', size, offset, stride, 3, value); - break; - case gl.FLOAT_VEC4: - setData(data, 'setFloat32', size, offset, stride, 4, value); - break; - case gl.INT: - setData(data, 'setInt32', size, offset, stride, 1, value); - break; - case gl.INT_VEC2: - setData(data, 'setInt32', size, offset, stride, 2, value); - break; - case gl.INT_VEC3: - setData(data, 'setInt32', size, offset, stride, 3, value); - break; - case gl.INT_VEC4: - setData(data, 'setInt32', size, offset, stride, 4, value); - break; - case gl.BOOL: - setData(data, 'setUint32', size, offset, stride, 1, value); - break; - default: - console.warn('UniformBuffer: Unsupported type'); + if (child.isDirectionalLight) { + directionalLights.push(child); } - } + if (child.isAmbientLight) { + ambientLights.push(child); + } + if (child.isEnvironmentLight) { + if (environmentLights.length > 1) { + console.warn(environmentLights, 'only one environment light can be used per scene'); + } + // Valid lights have HDR texture map in RGBEEncoding + if (isHDRTexture(child)) { + environmentLights.push(child); + } else { + console.warn(child, 'environment light does not use color value or map with THREE.RGBEEncoding'); + } + } + }); - function bind(index) { - gl.bufferSubData(gl.UNIFORM_BUFFER, 0, data); - gl.bindBufferBase(gl.UNIFORM_BUFFER, index, buffer); - } + const background = scene.background; return { - set, - bind + background, meshes, directionalLights, ambientLights, environmentLights }; } - function getUniformBlockInfo(gl, program, blockIndex) { - const indices = gl.getActiveUniformBlockParameter(program, blockIndex, gl.UNIFORM_BLOCK_ACTIVE_UNIFORM_INDICES); - const offset = gl.getActiveUniforms(program, indices, gl.UNIFORM_OFFSET); - const stride = gl.getActiveUniforms(program, indices, gl.UNIFORM_ARRAY_STRIDE); + function isHDRTexture(texture) { + return texture.map + && texture.map.image + && (texture.map.encoding === THREE$1.RGBEEncoding || texture.map.encoding === THREE$1.LinearEncoding); + } - const uniforms = {}; - for (let i = 0; i < indices.length; i++) { - const { name, type, size } = gl.getActiveUniform(program, indices[i]); - uniforms[name] = { - type, - size, - offset: offset[i], - stride: stride[i] - }; + function makeFramebuffer(gl, { color, depth }) { + + const framebuffer = gl.createFramebuffer(); + + function bind() { + gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer); } - return uniforms; - } + function unbind() { + gl.bindFramebuffer(gl.FRAMEBUFFER, null); + } - function setData(dataView, setter, size, offset, stride, components, value) { - const l = Math.min(value.length / components, size); - for (let i = 0; i < l; i++) { - for (let k = 0; k < components; k++) { - dataView[setter](offset + i * stride + k * 4, value[components * i + k], true); + function init() { + bind(); + + const drawBuffers = []; + + for (let location in color) { + location = Number(location); + + if (location === undefined) { + console.error('invalid location'); + } + + const tex = color[location]; + gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0 + location, tex.target, tex.texture, 0); + drawBuffers.push(gl.COLOR_ATTACHMENT0 + location); + } + + gl.drawBuffers(drawBuffers); + + if (depth) { + gl.framebufferRenderbuffer(gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, depth.target, depth.texture); } + + unbind(); } + + init(); + + return { + color, + bind, + unbind + }; } var vertex = { @@ -241,7 +259,9 @@ const uniforms = {}; const needsUpload = []; - for (let { name, type, location } of uniformInfo) { + for (let name in uniformInfo) { + const { type, location } = uniformInfo[name]; + const uniform = { type, location, @@ -254,18 +274,16 @@ uniforms[name] = uniform; } - const failedUnis = new Set(); - function setUniform(name, v0, v1, v2, v3) { // v0 - v4 are the values to be passed to the uniform // v0 can either be a number or an array, and v1-v3 are optional const uni = uniforms[name]; if (!uni) { - if (!failedUnis.has(name)) { - console.warn(`Uniform "${name}" does not exist in shader`); - failedUnis.add(name); - } + // if (!failedUnis.has(name)) { + // console.warn(`Uniform "${name}" does not exist in shader`); + // failedUnis.add(name); + // } return; } @@ -365,7 +383,6 @@ return makeShaderStage(gl, gl.FRAGMENT_SHADER, fragment, defines); } - function makeRenderPassFromProgram(gl, program) { const uniformSetter = makeUniformSetter(gl, program); @@ -375,19 +392,22 @@ let nextTexUnit = 1; function setTexture(name, texture) { - let cachedTex = textures[name]; + if (!texture) { + return; + } - if (!cachedTex) { + if (!textures[name]) { const unit = nextTexUnit++; uniformSetter.setUniform(name, unit); - cachedTex = { unit }; - - textures[name] = cachedTex; + textures[name] = { + unit, + tex: texture + }; + } else { + textures[name].tex = texture; } - - cachedTex.tex = texture; } function bindTextures() { @@ -407,6 +427,7 @@ } return { + attribLocs: getAttributes(gl, program), bindTextures, program, setTexture, @@ -423,7 +444,7 @@ str += addDefines(defines); } - if (type === gl.FRAGMENT_SHADER) { + if (type === gl.FRAGMENT_SHADER && shader.outputs) { str += addOutputs(shader.outputs); } @@ -494,7 +515,10 @@ } function makeFullscreenQuad(gl) { - // TODO: use VAOs + const vao = gl.createVertexArray(); + + gl.bindVertexArray(vao); + gl.bindBuffer(gl.ARRAY_BUFFER, gl.createBuffer()); gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1]), gl.STATIC_DRAW); @@ -504,9 +528,12 @@ gl.enableVertexAttribArray(posLoc); gl.vertexAttribPointer(posLoc, 2, gl.FLOAT, false, 0, 0); + gl.bindVertexArray(null); + const vertexShader = makeVertexShader(gl, { vertex }); function draw() { + gl.bindVertexArray(vao); gl.drawArrays(gl.TRIANGLES, 0, 6); } @@ -516,336 +543,377 @@ }; } - // Reorders the elements in the range [first, last) in such a way that - // all elements for which the comparator c returns true - // precede the elements for which comparator c returns false. - function partition(array, compare, left = 0, right = array.length) { - while (left !== right) { - while (compare(array[left])) { - left++; - if (left === right) { - return left; - } - } - do { - right--; - if (left === right) { - return left; - } - } while (!compare(array[right])); + var vertex$1 = { - swap(array, left, right); - left++; - } + source: ` + in vec3 aPosition; + in vec3 aNormal; + in vec2 aUv; + in ivec2 aMaterialMeshIndex; - return left; - } + uniform mat4 projView; - // nth_element is a partial sorting algorithm that rearranges elements in [first, last) such that: - // The element pointed at by nth is changed to whatever element would occur in that position if [first, last) were sorted. - // All of the elements before this new nth element compare to true with elements after the nth element - function nthElement(array, compare, left = 0, right = array.length, k = Math.floor((left + right) / 2)) { - for (let i = left; i <= k; i++) { - let minIndex = i; - let minValue = array[i]; - for (let j = i + 1; j < right; j++) { - if (!compare(minValue, array[j])) { - minIndex = j; - minValue = array[j]; - swap(array, i, minIndex); - } - } - } - } + out vec3 vPosition; + out vec3 vNormal; + out vec2 vUv; + flat out ivec2 vMaterialMeshIndex; - function swap(array, a, b) { - const x = array[b]; - array[b] = array[a]; - array[a] = x; + void main() { + vPosition = aPosition; + vNormal = aNormal; + vUv = aUv; + vMaterialMeshIndex = aMaterialMeshIndex; + gl_Position = projView * vec4(aPosition, 1); } +` + }; - // Create a bounding volume hierarchy of scene geometry + var constants$1 = ` + #define PI 3.14159265359 + #define TWOPI 6.28318530718 + #define INVPI 0.31830988618 + #define INVPI2 0.10132118364 + #define EPS 0.0005 + #define INF 1.0e999 - const size = new THREE$1.Vector3(); + #define ROUGHNESS_MIN 0.03 +`; - function bvhAccel(geometry, materialIndices) { - const primitiveInfo = makePrimitiveInfo(geometry, materialIndices); - const node = recursiveBuild(primitiveInfo, 0, primitiveInfo.length); + var materialBuffer = ` - return node; - } +uniform Materials { + vec4 colorAndMaterialType[NUM_MATERIALS]; + vec4 roughnessMetalnessNormalScale[NUM_MATERIALS]; - function flattenBvh(bvh) { - const flat = []; - const isBounds = []; + #if defined(NUM_DIFFUSE_MAPS) || defined(NUM_NORMAL_MAPS) || defined(NUM_PBR_MAPS) + ivec4 diffuseNormalRoughnessMetalnessMapIndex[NUM_MATERIALS]; + #endif - const splitAxisMap = { - x: 0, - y: 1, - z: 2 - }; + #if defined(NUM_DIFFUSE_MAPS) || defined(NUM_NORMAL_MAPS) + vec4 diffuseNormalMapSize[NUM_DIFFUSE_NORMAL_MAPS]; + #endif - let maxDepth = 1; - const traverse = (node, depth = 1) => { + #if defined(NUM_PBR_MAPS) + vec2 pbrMapSize[NUM_PBR_MAPS]; + #endif +} materials; - maxDepth = Math.max(depth, maxDepth); +#ifdef NUM_DIFFUSE_MAPS + uniform mediump sampler2DArray diffuseMap; +#endif - if (node.primitives) { - for (let i = 0; i < node.primitives.length; i++) { - const p = node.primitives[i]; - flat.push( - p.indices[0], p.indices[1], p.indices[2], node.primitives.length, - p.faceNormal.x, p.faceNormal.y, p.faceNormal.z, p.materialIndex - ); - isBounds.push(false); - } - } else { - const bounds = node.bounds; - - flat.push( - bounds.min.x, bounds.min.y, bounds.min.z, splitAxisMap[node.splitAxis], - bounds.max.x, bounds.max.y, bounds.max.z, null // pointer to second shild - ); +#ifdef NUM_NORMAL_MAPS + uniform mediump sampler2DArray normalMap; +#endif - const i = flat.length - 1; - isBounds.push(true); +#ifdef NUM_PBR_MAPS + uniform mediump sampler2DArray pbrMap; +#endif - traverse(node.child0, depth + 1); - flat[i] = flat.length / 4; // pointer to second child - traverse(node.child1, depth + 1); - } - }; +float getMatType(int materialIndex) { + return materials.colorAndMaterialType[materialIndex].w; +} - traverse(bvh); +vec3 getMatColor(int materialIndex, vec2 uv) { + vec3 color = materials.colorAndMaterialType[materialIndex].rgb; - const buffer = new ArrayBuffer(4 * flat.length); - const floatView = new Float32Array(buffer); - const intView = new Int32Array(buffer); + #ifdef NUM_DIFFUSE_MAPS + int diffuseMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].x; + if (diffuseMapIndex >= 0) { + color *= texture(diffuseMap, vec3(uv * materials.diffuseNormalMapSize[diffuseMapIndex].xy, diffuseMapIndex)).rgb; + } + #endif - for (let i = 0; i < isBounds.length; i++) { - let k = 8 * i; + return color; +} - if (isBounds[i]) { - floatView[k] = flat[k]; - floatView[k + 1] = flat[k + 1]; - floatView[k + 2] = flat[k + 2]; - intView[k + 3] = flat[k + 3]; - } else { - intView[k] = flat[k]; - intView[k + 1] = flat[k + 1]; - intView[k + 2] = flat[k + 2]; - intView[k + 3] = -flat[k + 3]; // negative signals to shader that this node is a triangle - } +float getMatRoughness(int materialIndex, vec2 uv) { + float roughness = materials.roughnessMetalnessNormalScale[materialIndex].x; - floatView[k + 4] = flat[k + 4]; - floatView[k + 5] = flat[k + 5]; - floatView[k + 6] = flat[k + 6]; - intView[k + 7] = flat[k + 7]; + #ifdef NUM_PBR_MAPS + int roughnessMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].z; + if (roughnessMapIndex >= 0) { + roughness *= texture(pbrMap, vec3(uv * materials.pbrMapSize[roughnessMapIndex].xy, roughnessMapIndex)).g; } + #endif - return { - maxDepth, - count: flat.length / 4, - buffer: floatView - }; - } + return roughness; +} - function makePrimitiveInfo(geometry, materialIndices) { - const primitiveInfo = []; - const indices = geometry.getIndex().array; - const position = geometry.getAttribute('position'); - const v0 = new THREE$1.Vector3(); - const v1 = new THREE$1.Vector3(); - const v2 = new THREE$1.Vector3(); - const e0 = new THREE$1.Vector3(); - const e1 = new THREE$1.Vector3(); +float getMatMetalness(int materialIndex, vec2 uv) { + float metalness = materials.roughnessMetalnessNormalScale[materialIndex].y; - for (let i = 0; i < indices.length; i += 3) { - const bounds = new THREE$1.Box3(); + #ifdef NUM_PBR_MAPS + int metalnessMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].w; + if (metalnessMapIndex >= 0) { + metalness *= texture(pbrMap, vec3(uv * materials.pbrMapSize[metalnessMapIndex].xy, metalnessMapIndex)).b; + } + #endif - v0.fromBufferAttribute(position, indices[i]); - v1.fromBufferAttribute(position, indices[i + 1]); - v2.fromBufferAttribute(position, indices[i + 2]); - e0.subVectors(v2, v0); - e1.subVectors(v1, v0); + return metalness; +} - bounds.expandByPoint(v0); - bounds.expandByPoint(v1); - bounds.expandByPoint(v2); +#ifdef NUM_NORMAL_MAPS +vec3 getMatNormal(int materialIndex, vec2 uv, vec3 normal, vec3 dp1, vec3 dp2, vec2 duv1, vec2 duv2) { + int normalMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].y; + if (normalMapIndex >= 0) { + // http://www.thetenthplanet.de/archives/1180 + // Compute co-tangent and co-bitangent vectors + vec3 dp2perp = cross(dp2, normal); + vec3 dp1perp = cross(normal, dp1); + vec3 dpdu = dp2perp * duv1.x + dp1perp * duv2.x; + vec3 dpdv = dp2perp * duv1.y + dp1perp * duv2.y; + float invmax = inversesqrt(max(dot(dpdu, dpdu), dot(dpdv, dpdv))); + dpdu *= invmax; + dpdv *= invmax; + + vec3 n = 2.0 * texture(normalMap, vec3(uv * materials.diffuseNormalMapSize[normalMapIndex].zw, normalMapIndex)).rgb - 1.0; + n.xy *= materials.roughnessMetalnessNormalScale[materialIndex].zw; + + mat3 tbn = mat3(dpdu, dpdv, normal); + + return normalize(tbn * n); + } else { + return normal; + } +} +#endif +`; - const info = { - bounds: bounds, - center: bounds.getCenter(new THREE$1.Vector3()), - indices: [indices[i], indices[i + 1], indices[i + 2]], - faceNormal: new THREE$1.Vector3().crossVectors(e1, e0).normalize(), - materialIndex: materialIndices[i / 3] - }; + var fragment = { - primitiveInfo.push(info); - } + outputs: ['position', 'normal', 'faceNormal', 'color', 'matProps'], + includes: [ + constants$1, + materialBuffer, + ], + source: ` + in vec3 vPosition; + in vec3 vNormal; + in vec2 vUv; + flat in ivec2 vMaterialMeshIndex; + + vec3 faceNormals(vec3 pos) { + vec3 fdx = dFdx(pos); + vec3 fdy = dFdy(pos); + return cross(fdx, fdy); + } - return primitiveInfo; + void main() { + int materialIndex = vMaterialMeshIndex.x; + int meshIndex = vMaterialMeshIndex.y; + + vec2 uv = fract(vUv); + + vec3 color = getMatColor(materialIndex, uv); + float roughness = getMatRoughness(materialIndex, uv); + float metalness = getMatMetalness(materialIndex, uv); + float materialType = getMatType(materialIndex); + + roughness = clamp(roughness, ROUGHNESS_MIN, 1.0); + metalness = clamp(metalness, 0.0, 1.0); + + vec3 normal = vNormal; + vec3 faceNormal = faceNormals(vPosition); + normal *= sign(dot(normal, faceNormal)); + + #ifdef NUM_NORMAL_MAPS + vec3 dp1 = dFdx(vPosition); + vec3 dp2 = dFdy(vPosition); + vec2 duv1 = dFdx(vUv); + vec2 duv2 = dFdy(vUv); + normal = getMatNormal(materialIndex, uv, normal, dp1, dp2, duv1, duv2); + #endif + + out_position = vec4(vPosition, float(meshIndex) + EPS); + out_normal = vec4(normal, materialType); + out_faceNormal = vec4(faceNormal, 0); + out_color = vec4(color, 0); + out_matProps = vec4(roughness, metalness, 0, 0); } +` - function recursiveBuild(primitiveInfo, start, end) { - const bounds = new THREE$1.Box3(); - for (let i = start; i < end; i++) { - bounds.union(primitiveInfo[i].bounds); - } + }; - const nPrimitives = end - start; + function makeGBufferPass(gl, { materialBuffer, mergedMesh }) { + const renderPass = makeRenderPass(gl, { + defines: materialBuffer.defines, + vertex: vertex$1, + fragment + }); - if (nPrimitives === 1) { - return makeLeafNode(primitiveInfo.slice(start, end), bounds); - } else { - const centroidBounds = new THREE$1.Box3(); - for (let i = start; i < end; i++) { - centroidBounds.expandByPoint(primitiveInfo[i].center); - } - const dim = maximumExtent(centroidBounds); + renderPass.setTexture('diffuseMap', materialBuffer.textures.diffuseMap); + renderPass.setTexture('normalMap', materialBuffer.textures.normalMap); + renderPass.setTexture('pbrMap', materialBuffer.textures.pbrMap); - let mid = Math.floor((start + end) / 2); + const geometry = mergedMesh.geometry; - // middle split method - // const dimMid = (centroidBounds.max[dim] + centroidBounds.min[dim]) / 2; - // mid = partition(primitiveInfo, p => p.center[dim] < dimMid, start, end); + const elementCount = geometry.getIndex().count; - // if (mid === start || mid === end) { - // mid = Math.floor((start + end) / 2); - // nthElement(primitiveInfo, (a, b) => a.center[dim] < b.center[dim], start, end, mid); - // } + const vao = gl.createVertexArray(); - // surface area heuristic method - if (nPrimitives <= 4) { - nthElement(primitiveInfo, (a, b) => a.center[dim] < b.center[dim], start, end, mid); - } else { - const buckets = []; - for (let i = 0; i < 12; i++) { - buckets.push({ - bounds: new THREE$1.Box3(), - count: 0, - }); - } + gl.bindVertexArray(vao); + uploadAttributes(gl, renderPass, geometry); + gl.bindVertexArray(null); - for (let i = start; i < end; i++) { - let b = Math.floor(buckets.length * boxOffset(centroidBounds, dim, primitiveInfo[i].center)); - if (b === buckets.length) { - b = buckets.length - 1; - } - buckets[b].count++; - buckets[b].bounds.union(primitiveInfo[i].bounds); - } + let jitterX = 0; + let jitterY = 0; + function setJitter(x, y) { + jitterX = x; + jitterY = y; + } - const cost = []; + let currentCamera; + function setCamera(camera) { + currentCamera = camera; + } - for (let i = 0; i < buckets.length - 1; i++) { - const b0 = new THREE$1.Box3(); - const b1 = new THREE$1.Box3(); - let count0 = 0; - let count1 = 0; - for (let j = 0; j <= i; j++) { - b0.union(buckets[j].bounds); - count0 += buckets[j].count; - } - for (let j = i + 1; j < buckets.length; j++) { - b1.union(buckets[j].bounds); - count1 += buckets[j].count; - } - cost.push(0.1 + (count0 * surfaceArea(b0) + count1 * surfaceArea(b1)) / surfaceArea(bounds)); - } + function calcCamera() { + projView.copy(currentCamera.projectionMatrix); - let minCost = cost[0]; - let minCostSplitBucket = 0; - for (let i = 1; i < cost.length; i++) { - if (cost[i] < minCost) { - minCost = cost[i]; - minCostSplitBucket = i; - } - } + projView.elements[8] += 2 * jitterX; + projView.elements[9] += 2 * jitterY; - mid = partition(primitiveInfo, p => { - let b = Math.floor(buckets.length * boxOffset(centroidBounds, dim, p.center)); - if (b === buckets.length) { - b = buckets.length - 1; - } - return b <= minCostSplitBucket; - }, start, end); - } + projView.multiply(currentCamera.matrixWorldInverse); + renderPass.setUniform('projView', projView.elements); + } - return makeInteriorNode( - dim, - recursiveBuild(primitiveInfo, start, mid), - recursiveBuild(primitiveInfo, mid, end), - ); + let projView = new THREE$1.Matrix4(); + + function draw() { + calcCamera(); + gl.bindVertexArray(vao); + renderPass.useProgram(); + gl.enable(gl.DEPTH_TEST); + gl.drawElements(gl.TRIANGLES, elementCount, gl.UNSIGNED_INT, 0); + gl.disable(gl.DEPTH_TEST); } - } - function makeLeafNode(primitives, bounds) { return { - primitives, - bounds + draw, + outputLocs: renderPass.outputLocs, + setCamera, + setJitter }; } - function makeInteriorNode(splitAxis, child0, child1) { - return { - child0, - child1, - bounds: new THREE$1.Box3().union(child0.bounds).union(child1.bounds), - splitAxis, - }; - } + function uploadAttributes(gl, renderPass, geometry) { + setAttribute(gl, renderPass.attribLocs.aPosition, geometry.getAttribute('position')); + setAttribute(gl, renderPass.attribLocs.aNormal, geometry.getAttribute('normal')); + setAttribute(gl, renderPass.attribLocs.aUv, geometry.getAttribute('uv')); + setAttribute(gl, renderPass.attribLocs.aMaterialMeshIndex, geometry.getAttribute('materialMeshIndex')); - function maximumExtent(box3) { - box3.getSize(size); - if (size.x > size.z) { - return size.x > size.y ? 'x' : 'y'; - } else { - return size.z > size.y ? 'z' : 'y'; - } + gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, gl.createBuffer()); + gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, geometry.getIndex().array, gl.STATIC_DRAW); } - function boxOffset(box3, dim, v) { - let offset = v[dim] - box3.min[dim]; + function setAttribute(gl, location, bufferAttribute) { + const { itemSize, array } = bufferAttribute; - if (box3.max[dim] > box3.min[dim]){ - offset /= box3.max[dim] - box3.min[dim]; - } + gl.enableVertexAttribArray(location); + gl.bindBuffer(gl.ARRAY_BUFFER, gl.createBuffer()); + gl.bufferData(gl.ARRAY_BUFFER, array, gl.STATIC_DRAW); - return offset; + if (array instanceof Float32Array) { + gl.vertexAttribPointer(location, itemSize, gl.FLOAT, false, 0, 0); + } else if (array instanceof Int32Array) { + gl.vertexAttribIPointer(location, itemSize, gl.INT, 0, 0); + } else { + throw 'Unsupported buffer type'; + } } - function surfaceArea(box3) { - box3.getSize(size); - return 2 * (size.x * size.z + size.x * size.y + size.z * size.y); - } + function makeUniformBuffer(gl, program, blockName) { + const blockIndex = gl.getUniformBlockIndex(program, blockName); + const blockSize = gl.getActiveUniformBlockParameter(program, blockIndex, gl.UNIFORM_BLOCK_DATA_SIZE); - // Convert image data from the RGBE format to a 32-bit floating point format - // See https://www.cg.tuwien.ac.at/research/theses/matkovic/node84.html for a description of the RGBE format - // Optional multiplier argument for performance optimization - function rgbeToFloat(buffer, intensity = 1) { - const texels = buffer.length / 4; - const floatBuffer = new Float32Array(texels * 3); + const uniforms = getUniformBlockInfo(gl, program, blockIndex); - const expTable = []; - for (let i = 0; i < 255; i++) { - expTable[i] = intensity * Math.pow(2, i - 128) / 255; - } + const buffer = gl.createBuffer(); + gl.bindBuffer(gl.UNIFORM_BUFFER, buffer); + gl.bufferData(gl.UNIFORM_BUFFER, blockSize, gl.STATIC_DRAW); - for (let i = 0; i < texels; i++) { + const data = new DataView(new ArrayBuffer(blockSize)); - const r = buffer[4 * i]; - const g = buffer[4 * i + 1]; - const b = buffer[4 * i + 2]; - const a = buffer[4 * i + 3]; - const e = expTable[a]; + function set(name, value) { + if (!uniforms[name]) { + // console.warn('No uniform property with name ', name); + return; + } - floatBuffer[3 * i] = r * e; - floatBuffer[3 * i + 1] = g * e; - floatBuffer[3 * i + 2] = b * e; + const { type, size, offset, stride } = uniforms[name]; + + switch(type) { + case gl.FLOAT: + setData(data, 'setFloat32', size, offset, stride, 1, value); + break; + case gl.FLOAT_VEC2: + setData(data, 'setFloat32', size, offset, stride, 2, value); + break; + case gl.FLOAT_VEC3: + setData(data, 'setFloat32', size, offset, stride, 3, value); + break; + case gl.FLOAT_VEC4: + setData(data, 'setFloat32', size, offset, stride, 4, value); + break; + case gl.INT: + setData(data, 'setInt32', size, offset, stride, 1, value); + break; + case gl.INT_VEC2: + setData(data, 'setInt32', size, offset, stride, 2, value); + break; + case gl.INT_VEC3: + setData(data, 'setInt32', size, offset, stride, 3, value); + break; + case gl.INT_VEC4: + setData(data, 'setInt32', size, offset, stride, 4, value); + break; + case gl.BOOL: + setData(data, 'setUint32', size, offset, stride, 1, value); + break; + default: + console.warn('UniformBuffer: Unsupported type'); + } } - return floatBuffer; + function bind(index) { + gl.bindBuffer(gl.UNIFORM_BUFFER, buffer); + gl.bufferSubData(gl.UNIFORM_BUFFER, 0, data); + gl.bindBufferBase(gl.UNIFORM_BUFFER, index, buffer); + } + + return { + set, + bind + }; + } + + function getUniformBlockInfo(gl, program, blockIndex) { + const indices = gl.getActiveUniformBlockParameter(program, blockIndex, gl.UNIFORM_BLOCK_ACTIVE_UNIFORM_INDICES); + const offset = gl.getActiveUniforms(program, indices, gl.UNIFORM_OFFSET); + const stride = gl.getActiveUniforms(program, indices, gl.UNIFORM_ARRAY_STRIDE); + + const uniforms = {}; + for (let i = 0; i < indices.length; i++) { + const { name, type, size } = gl.getActiveUniform(program, indices[i]); + uniforms[name] = { + type, + size, + offset: offset[i], + stride: stride[i] + }; + } + + return uniforms; + } + + function setData(dataView, setter, size, offset, stride, components, value) { + const l = Math.min(value.length / components, size); + for (let i = 0; i < l; i++) { + for (let k = 0; k < components; k++) { + dataView[setter](offset + i * stride + k * 4, value[components * i + k], true); + } + } } function clamp(x, min, max) { @@ -872,326 +940,1182 @@ return true; } - // Convert image data from the RGBE format to a 32-bit floating point format + function makeTexture(gl, params) { + let { + width = null, + height = null, - const DEFAULT_MAP_RESOLUTION = { - width: 2048, - height: 1024, - }; + // A single HTMLImageElement, ImageData, or TypedArray, + // Or an array of any of these objects. In this case an Array Texture will be created + data = null, - // Tools for generating and modify env maps for lighting from scene component data + // If greater than 1, create an Array Texture of this length + length = 1, - function generateBackgroundMapFromSceneBackground(background) { - let backgroundImage; + // Number of channels, [1-4]. If left blank, the the function will decide the number of channels automatically from the data + channels = null, - if (background.isColor) { - backgroundImage = generateSolidMap(1, 1, background); - } else if (background.encoding === THREE$1.RGBEEncoding) { - backgroundImage = { - width: background.image.width, - height: background.image.height, - data: background.image.data, - }; - backgroundImage.data = rgbeToFloat(backgroundImage.data); - } - return backgroundImage; - } + // Either 'byte' or 'float' + // If left empty, the function will decide the format automatically from the data + storage = null, - function generateEnvMapFromSceneComponents(directionalLights, ambientLights, environmentLights) { - let envImage = initializeEnvMap(environmentLights); - ambientLights.forEach( light => { addAmbientLightToEnvMap(light, envImage); }); - directionalLights.forEach( light => { envImage.data = addDirectionalLightToEnvMap(light, envImage); }); + // Reverse the texture across the y-axis. + flipY = false, - return envImage; - } + // sampling properties + gammaCorrection = false, + wrapS = gl.CLAMP_TO_EDGE, + wrapT = gl.CLAMP_TO_EDGE, + minFilter = gl.NEAREST, + magFilter = gl.NEAREST, + } = params; - function initializeEnvMap(environmentLights) { - let envImage; + width = width || data.width || 0; + height = height || data.height || 0; - // Initialize map from environment light if present - if (environmentLights.length > 0) { - // TODO: support multiple environment lights (what if they have different resolutions?) - const environmentLight = environmentLights[0]; - envImage = { - width: environmentLight.map.image.width, - height: environmentLight.map.image.height, - data: environmentLight.map.image.data, - }; - envImage.data = rgbeToFloat(envImage.data, environmentLight.intensity); - } else { - // initialize blank map - envImage = generateSolidMap(DEFAULT_MAP_RESOLUTION.width, DEFAULT_MAP_RESOLUTION.height); - } + const texture = gl.createTexture(); - return envImage; - } + let target; + let dataArray; - function generateSolidMap(width, height, color, intensity) { - const texels = width * height; - const floatBuffer = new Float32Array(texels * 3); - if (color && color.isColor) { - setBufferToColor(floatBuffer, color, intensity); + // if data is a JS array but not a TypedArray, assume data is an array of images and create a GL Array Texture + if (Array.isArray(data)) { + dataArray = data; + data = dataArray[0]; } - return { - width: width, - height: height, - data: floatBuffer, - }; - } - function setBufferToColor(buffer, color, intensity = 1) { - buffer.forEach(function(part, index) { - const component = index % 3; - if (component === 0) { - buffer[index] = color.r * intensity; - } - else if (component === 1) { - buffer[index] = color.g * intensity; - } - else if (component === 2) { - buffer[index] = color.b * intensity; - } - }); - return buffer; - } + target = dataArray || length > 1 ? gl.TEXTURE_2D_ARRAY : gl.TEXTURE_2D; - function addAmbientLightToEnvMap(light, image) { - const color = light.color; - image.data.forEach(function(part, index) { - const component = index % 3; - if (component === 0) { - image.data[index] += color.r * light.intensity; - } - else if (component === 1) { - image.data[index] += color.g * light.intensity; - } - else if (component === 2) { - image.data[index] += color.b * light.intensity; - } - }); - } + gl.activeTexture(gl.TEXTURE0); + gl.bindTexture(target, texture); - function addDirectionalLightToEnvMap(light, image) { - const sphericalCoords = new THREE$1.Spherical(); - const lightDirection = light.position.clone().sub(light.target.position); + gl.texParameteri(target, gl.TEXTURE_WRAP_S, wrapS); + gl.texParameteri(target, gl.TEXTURE_WRAP_T, wrapT); + gl.texParameteri(target, gl.TEXTURE_MIN_FILTER, minFilter); + gl.texParameteri(target, gl.TEXTURE_MAG_FILTER, magFilter); - sphericalCoords.setFromVector3(lightDirection); - sphericalCoords.theta = (Math.PI * 3 / 2) - sphericalCoords.theta; - sphericalCoords.makeSafe(); + if (!channels) { + if (data && data.length) { + channels = data.length / (width * height); // infer number of channels from data size + } else { + channels = 4; + } + } - return addLightAtCoordinates(light, image, sphericalCoords); - } + channels = clamp(channels, 1, 4); - // Perform modifications on env map to match input scene - function addLightAtCoordinates(light, image, originCoords) { - const floatBuffer = image.data; - const width = image.width; - const height = image.height; - const xTexels = floatBuffer.length / (3 * height); - const yTexels = floatBuffer.length / (3 * width); + const { type, format, internalFormat } = getTextureFormat(gl, channels, storage, data, gammaCorrection); - // default softness for standard directional lights is 0.01, i.e. a hard shadow - const softness = light.softness || 0.01; + if (dataArray) { + gl.texStorage3D(target, 1, internalFormat, width, height, dataArray.length); + for (let i = 0; i < dataArray.length; i++) { + // if layer is an HTMLImageElement, use the .width and .height properties of each layer + // otherwise use the max size of the array texture + const layerWidth = dataArray[i].width || width; + const layerHeight = dataArray[i].height || height; - // angle from center of light at which no more contributions are projected - const threshold = findThreshold(softness); + gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, Array.isArray(flipY) ? flipY[i] : flipY); - // if too few texels are rejected by the threshold then the time to evaluate it is no longer worth it - const useThreshold = threshold < Math.PI / 5; + gl.texSubImage3D(target, 0, 0, 0, i, layerWidth, layerHeight, 1, format, type, dataArray[i]); + } + } else if (length > 1) { + // create empty array texture + gl.texStorage3D(target, 1, internalFormat, width, height, length); + } else { + gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, flipY); + gl.texStorage2D(target, 1, internalFormat, width, height); + if (data) { + gl.texSubImage2D(target, 0, 0, 0, width, height, format, type, data); + } + } - // functional trick to keep the conditional check out of the main loop - const intensityFromAngleFunction = useThreshold ? getIntensityFromAngleDifferentialThresholded : getIntensityFromAngleDifferential; + // return state to default + gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false); - let begunAddingContributions = false; - let currentCoords = new THREE$1.Spherical(); + return { + target, + texture + }; + } - // Iterates over each row from top to bottom - for (let i = 0; i < xTexels; i++) { + function makeDepthTarget(gl, width, height) { + const texture = gl.createRenderbuffer(); + const target = gl.RENDERBUFFER; - let encounteredInThisRow = false; + gl.bindRenderbuffer(target, texture); + gl.renderbufferStorage(gl.RENDERBUFFER, gl.DEPTH_COMPONENT24, width, height); + gl.bindRenderbuffer(target, null); - // Iterates over each texel in row - for (let j = 0; j < yTexels; j++) { - const bufferIndex = j * width + i; - currentCoords = equirectangularToSpherical(i, j, width, height, currentCoords); - const falloff = intensityFromAngleFunction(originCoords, currentCoords, softness, threshold); + return { + target, + texture + }; + } - if(falloff > 0) { - encounteredInThisRow = true; - begunAddingContributions = true; - } + function getTextureFormat(gl, channels, storage, data, gammaCorrection) { + let type; + let internalFormat; - const intensity = light.intensity * falloff; + const isByteArray = + data instanceof Uint8Array || + data instanceof HTMLImageElement || + data instanceof HTMLCanvasElement || + data instanceof ImageData; - floatBuffer[bufferIndex * 3] += intensity * light.color.r; - floatBuffer[bufferIndex * 3 + 1] += intensity * light.color.g; - floatBuffer[bufferIndex * 3 + 2] += intensity * light.color.b; - } + const isFloatArray = data instanceof Float32Array; - // First row to not add a contribution since adding began - // This means the entire light has been added and we can exit early - if(!encounteredInThisRow && begunAddingContributions) { - return floatBuffer; - } - } + if (storage === 'byte' || (!storage && isByteArray)) { + internalFormat = { + 1: gl.R8, + 2: gl.RG8, + 3: gammaCorrection ? gl.SRGB8 : gl.RGB8, + 4: gammaCorrection ? gl.SRGB8_ALPHA8 : gl.RGBA8 + }[channels]; - return floatBuffer; - } + type = gl.UNSIGNED_BYTE; + } else if (storage === 'float' || (!storage && isFloatArray)) { + internalFormat = { + 1: gl.R32F, + 2: gl.RG32F, + 3: gl.RGB32F, + 4: gl.RGBA32F + }[channels]; - function findThreshold(softness) { - const step = Math.PI / 128; - const maxSteps = (2.0 * Math.PI) / step; + type = gl.FLOAT; + } else if (storage === 'halfFloat') { + internalFormat = { + 1: gl.R16F, + 2: gl.RG16F, + 3: gl.RGB16F, + 4: gl.RGBA16F + }[channels]; - for (let i = 0; i < maxSteps; i++) { - const angle = i * step; - const falloff = getFalloffAtAngle(angle, softness); - if (falloff <= 0.0001) { - return angle; - } + type = gl.FLOAT; + } else if (storage === 'snorm') { + internalFormat = { + 1: gl.R8_SNORM, + 2: gl.RG8_SNORM, + 3: gl.RGB8_SNORM, + 4: gl.RGBA8_SNORM, + }[channels]; + + type = gl.UNSIGNED_BYTE; } + + const format = { + 1: gl.RED, + 2: gl.RG, + 3: gl.RGB, + 4: gl.RGBA + }[channels]; + + return { + format, + internalFormat, + type + }; } - function getIntensityFromAngleDifferentialThresholded(originCoords, currentCoords, softness, threshold) { - const deltaPhi = getAngleDelta(originCoords.phi, currentCoords.phi); - const deltaTheta = getAngleDelta(originCoords.theta, currentCoords.theta); + // retrieve textures used by meshes, grouping textures from meshes shared by *the same* mesh property + function getTexturesFromMaterials(meshes, textureNames) { + const textureMap = {}; - if(deltaTheta > threshold && deltaPhi > threshold) { - return 0; + for (const name of textureNames) { + const textures = []; + textureMap[name] = { + indices: texturesFromMaterials(meshes, name, textures), + textures + }; } - const angle = angleBetweenSphericals(originCoords, currentCoords); - return getFalloffAtAngle(angle, softness); + return textureMap; } - function getIntensityFromAngleDifferential(originCoords, currentCoords, softness) { - const angle = angleBetweenSphericals(originCoords, currentCoords); - return getFalloffAtAngle(angle, softness); - } + // retrieve textures used by meshes, grouping textures from meshes shared *across all* mesh properties + function mergeTexturesFromMaterials(meshes, textureNames) { + const textureMap = { + textures: [], + indices: {} + }; - function getAngleDelta(angleA, angleB) { - const diff = Math.abs(angleA - angleB) % (2 * Math.PI); - return diff > Math.PI ? (2 * Math.PI - diff) : diff; - } + for (const name of textureNames) { + textureMap.indices[name] = texturesFromMaterials(meshes, name, textureMap.textures); + } - const angleBetweenSphericals = function() { - const originVector = new THREE$1.Vector3(); - const currentVector = new THREE$1.Vector3(); + return textureMap; + } - return (originCoords, currentCoords) => { - originVector.setFromSpherical(originCoords); - currentVector.setFromSpherical(currentCoords); - return originVector.angleTo(currentVector); - }; - }(); + function texturesFromMaterials(materials, textureName, textures) { + const indices = []; - // TODO: possibly clean this up and optimize it - // - // This function was arrived at through experimentation, it provides good - // looking results with percieved softness that scale relatively linearly with - // the softness value in the 0 - 1 range - // - // For now it doesn't incur too much of a performance penalty because for most of our use cases (lights without too much softness) - // the threshold cutoff in getIntensityFromAngleDifferential stops us from running it too many times - function getFalloffAtAngle(angle, softness) { - const softnessCoefficient = Math.pow(2, 14.5 * Math.max(0.001, 1.0 - clamp(softness, 0.0, 1.0))); - const falloff = Math.pow(softnessCoefficient, 1.1) * Math.pow(8, -softnessCoefficient * Math.pow(angle, 1.8)); - return falloff; - } + for (const material of materials) { + if (!material[textureName]) { + indices.push(-1); + } else { + let index = textures.length; + for (let i = 0; i < textures.length; i++) { + if (textures[i] === material[textureName]) { + // Reuse existing duplicate texture. + index = i; + break; + } + } + if (index === textures.length) { + // New texture. Add texture to list. + textures.push(material[textureName]); + } + indices.push(index); + } + } - function equirectangularToSpherical(x, y, width, height, target) { - target.phi = (Math.PI * y) / height; - target.theta = (2.0 * Math.PI * x) / width; - return target; + return indices; } - // Create a piecewise 2D cumulative distribution function of light intensity from an envmap - // http://www.pbr-book.org/3ed-2018/Monte_Carlo_Integration/2D_Sampling_with_Multidimensional_Transformations.html#Piecewise-Constant2DDistributions + function makeMaterialBuffer(gl, materials) { + const maps = getTexturesFromMaterials(materials, ['map', 'normalMap']); + const pbrMap = mergeTexturesFromMaterials(materials, ['roughnessMap', 'metalnessMap']); - function envmapDistribution(image) { - const data = image.data; + const textures = {}; - const cdfImage = { - width: image.width + 2, - height: image.height + 1 - }; + const bufferData = {}; - const cdf = makeTextureArray(cdfImage.width, cdfImage.height, 2); + bufferData.color = materials.map(m => m.color); + bufferData.roughness = materials.map(m => m.roughness); + bufferData.metalness = materials.map(m => m.metalness); + bufferData.normalScale = materials.map(m => m.normalScale); - for (let y = 0; y < image.height; y++) { - const sinTheta = Math.sin(Math.PI * (y + 0.5) / image.height); - for (let x = 0; x < image.width; x++) { - const i = 3 * (y * image.width + x); - let r = data[i]; - let g = data[i + 1]; - let b = data[i + 2]; - let luminance = 0.2126 * r + 0.7152 * g + 0.0722 * b; - luminance *= sinTheta; - cdf.set(x + 2, y, 0, cdf.get(x + 1, y, 0) + luminance / image.width); - cdf.set(x + 1, y, 1, luminance); + bufferData.type = materials.map(m => { + if (m.shadowCatcher) { + return ShadowCatcherMaterial; } - - const rowIntegral = cdf.get(cdfImage.width - 1, y, 0); - - for (let x = 1; x < cdf.width; x++) { - cdf.set(x, y, 0, cdf.get(x, y, 0) / rowIntegral); - cdf.set(x, y, 1, cdf.get(x, y, 1) / rowIntegral); + if (m.transparent) { + return m.solid ? ThickMaterial : ThinMaterial; } + }); - cdf.set(0, y + 1, 0, cdf.get(0, y, 0) + rowIntegral / image.height); - cdf.set(0, y, 1, rowIntegral); + if (maps.map.textures.length > 0) { + const { relativeSizes, texture } = makeTextureArray(gl, maps.map.textures, true); + textures.diffuseMap = texture; + bufferData.diffuseMapSize = relativeSizes; + bufferData.diffuseMapIndex = maps.map.indices; } - const integral = cdf.get(0, cdf.height - 1, 0); + if (maps.normalMap.textures.length > 0) { + const { relativeSizes, texture } = makeTextureArray(gl, maps.normalMap.textures, false); + textures.normalMap = texture; + bufferData.normalMapSize = relativeSizes; + bufferData.normalMapIndex = maps.normalMap.indices; + } - for (let y = 0; y < cdf.height; y++) { - cdf.set(0, y, 0, cdf.get(0, y, 0) / integral); - cdf.set(0, y, 1, cdf.get(0, y, 1) / integral); + if (pbrMap.textures.length > 0) { + const { relativeSizes, texture } = makeTextureArray(gl, pbrMap.textures, false); + textures.pbrMap = texture; + bufferData.pbrMapSize = relativeSizes; + bufferData.roughnessMapIndex = pbrMap.indices.roughnessMap; + bufferData.metalnessMapIndex = pbrMap.indices.metalnessMap; } - cdfImage.data = cdf.array; - return cdfImage; + const defines = { + NUM_MATERIALS: materials.length, + NUM_DIFFUSE_MAPS: maps.map.textures.length, + NUM_NORMAL_MAPS: maps.normalMap.textures.length, + NUM_DIFFUSE_NORMAL_MAPS: Math.max(maps.map.textures.length, maps.normalMap.textures.length), + NUM_PBR_MAPS: pbrMap.textures.length, + }; + + // create temporary shader program including the Material uniform buffer + // used to query the compiled structure of the uniform buffer + const renderPass = makeRenderPass(gl, { + vertex: { + source: `void main() {}` + }, + fragment: { + includes: [ materialBuffer ], + source: `void main() {}` + }, + defines + }); + + uploadToUniformBuffer(gl, renderPass.program, bufferData); + + return { defines, textures }; } + function makeTextureArray(gl, textures, gammaCorrection = false) { + const images = textures.map(t => t.image); + const flipY = textures.map(t => t.flipY); + const { maxSize, relativeSizes } = maxImageSize(images); - function makeTextureArray(width, height, channels) { - const array = new Float32Array(channels * width * height); + // create GL Array Texture from individual textures + const texture = makeTexture(gl, { + width: maxSize.width, + height: maxSize.height, + gammaCorrection, + data: images, + flipY, + channels: 3, + minFilter: gl.LINEAR, + magFilter: gl.LINEAR, + }); return { - set(x, y, channel, val) { - array[channels * (y * width + x) + channel] = val; - }, - get(x, y, channel) { - return array[channels * (y * width + x) + channel]; - }, - width, - height, - channels, - array + texture, + relativeSizes }; } - function unrollLoop(indexName, start, limit, step, code) { - let unrolled = `int ${indexName};\n`; + function maxImageSize(images) { + const maxSize = { + width: 0, + height: 0 + }; - for (let i = start; (step > 0 && i < limit) || (step < 0 && i > limit); i += step) { - unrolled += `${indexName} = ${i};\n`; - unrolled += code; + for (const image of images) { + maxSize.width = Math.max(maxSize.width, image.width); + maxSize.height = Math.max(maxSize.height, image.height); } - return unrolled; + const relativeSizes = []; + for (const image of images) { + relativeSizes.push(image.width / maxSize.width); + relativeSizes.push(image.height / maxSize.height); + } + + return { maxSize, relativeSizes }; } - var core = ` - #define PI 3.14159265359 - #define TWOPI 6.28318530718 - #define INVPI 0.31830988618 - #define INVPI2 0.10132118364 - #define EPS 0.0005 - #define INF 1.0e999 - #define RAY_MAX_DISTANCE 9999.0 - #define STANDARD 0 - #define THIN_GLASS 1 + // Upload arrays to uniform buffer objects + // Packs different arrays into vec4's to take advantage of GLSL's std140 memory layout + + function uploadToUniformBuffer(gl, program, bufferData) { + const materialBuffer = makeUniformBuffer(gl, program, 'Materials'); + + materialBuffer.set('Materials.colorAndMaterialType[0]', interleave( + { data: [].concat(...bufferData.color.map(d => d.toArray())), channels: 3 }, + { data: bufferData.type, channels: 1} + )); + + materialBuffer.set('Materials.roughnessMetalnessNormalScale[0]', interleave( + { data: bufferData.roughness, channels: 1 }, + { data: bufferData.metalness, channels: 1 }, + { data: [].concat(...bufferData.normalScale.map(d => d.toArray())), channels: 2 } + )); + + materialBuffer.set('Materials.diffuseNormalRoughnessMetalnessMapIndex[0]', interleave( + { data: bufferData.diffuseMapIndex, channels: 1 }, + { data: bufferData.normalMapIndex, channels: 1 }, + { data: bufferData.roughnessMapIndex, channels: 1 }, + { data: bufferData.metalnessMapIndex, channels: 1 } + )); + + materialBuffer.set('Materials.diffuseNormalMapSize[0]', interleave( + { data: bufferData.diffuseMapSize, channels: 2 }, + { data: bufferData.normalMapSize, channels: 2 } + )); + + materialBuffer.set('Materials.pbrMapSize[0]', bufferData.pbrMapSize); + + materialBuffer.bind(0); + } + + function interleave(...arrays) { + let maxLength = 0; + for (let i = 0; i < arrays.length; i++) { + const a = arrays[i]; + const l = a.data ? a.data.length / a.channels : 0; + maxLength = Math.max(maxLength, l); + } + + const interleaved = []; + for (let i = 0; i < maxLength; i++) { + for (let j = 0; j < arrays.length; j++) { + const { data = [], channels } = arrays[j]; + for (let c = 0; c < channels; c++) { + interleaved.push(data[i * channels + c]); + } + } + } + + return interleaved; + } + + function mergeMeshesToGeometry(meshes) { + + let vertexCount = 0; + let indexCount = 0; + + const geometryAndMaterialIndex = []; + const materialIndexMap = new Map(); + + for (const mesh of meshes) { + const geometry = cloneBufferGeometry(mesh.geometry, ['position', 'normal', 'uv']); + + const index = geometry.getIndex(); + if (!index) { + addFlatGeometryIndices(geometry); + } + + geometry.applyMatrix(mesh.matrixWorld); + + if (!geometry.getAttribute('normal')) { + geometry.computeVertexNormals(); + } else { + geometry.normalizeNormals(); + } + + vertexCount += geometry.getAttribute('position').count; + indexCount += geometry.getIndex().count; + + const material = mesh.material; + let materialIndex = materialIndexMap.get(material); + if (materialIndex === undefined) { + materialIndex = materialIndexMap.size; + materialIndexMap.set(material, materialIndex); + } + + geometryAndMaterialIndex.push({ + geometry, + materialIndex + }); + } + + const geometry = mergeGeometry(geometryAndMaterialIndex, vertexCount, indexCount); + + return { + geometry, + materials: Array.from(materialIndexMap.keys()) + }; + } + + function mergeGeometry(geometryAndMaterialIndex, vertexCount, indexCount) { + const positionAttrib = new THREE$1.BufferAttribute(new Float32Array(3 * vertexCount), 3, false); + const normalAttrib = new THREE$1.BufferAttribute(new Float32Array(3 * vertexCount), 3, false); + const uvAttrib = new THREE$1.BufferAttribute(new Float32Array(2 * vertexCount), 2, false); + const materialMeshIndexAttrib = new THREE$1.BufferAttribute(new Int32Array(2 * vertexCount), 2, false); + const indexAttrib = new THREE$1.BufferAttribute(new Uint32Array(indexCount), 1, false); + + const mergedGeometry = new THREE$1.BufferGeometry(); + mergedGeometry.addAttribute('position', positionAttrib); + mergedGeometry.addAttribute('normal', normalAttrib); + mergedGeometry.addAttribute('uv', uvAttrib); + mergedGeometry.addAttribute('materialMeshIndex', materialMeshIndexAttrib); + mergedGeometry.setIndex(indexAttrib); + + let currentVertex = 0; + let currentIndex = 0; + let currentMesh = 1; + + for (const { geometry, materialIndex } of geometryAndMaterialIndex) { + const vertexCount = geometry.getAttribute('position').count; + mergedGeometry.merge(geometry, currentVertex); + + const meshIndex = geometry.getIndex(); + for (let i = 0; i < meshIndex.count; i++) { + indexAttrib.setX(currentIndex + i, currentVertex + meshIndex.getX(i)); + } + + for (let i = 0; i < vertexCount; i++) { + materialMeshIndexAttrib.setXY(currentVertex + i, materialIndex, currentMesh); + } + + currentVertex += vertexCount; + currentIndex += meshIndex.count; + currentMesh++; + } + + return mergedGeometry; + } + + // Similar to buffergeometry.clone(), except we only copy + // specific attributes instead of everything + function cloneBufferGeometry(bufferGeometry, attributes) { + const newGeometry = new THREE$1.BufferGeometry(); + + for (const name of attributes) { + const attrib = bufferGeometry.getAttribute(name); + if (attrib) { + newGeometry.addAttribute(name, attrib.clone()); + } + } + + const index = bufferGeometry.getIndex(); + if (index) { + newGeometry.setIndex(index); + } + + return newGeometry; + } + + function addFlatGeometryIndices(geometry) { + const position = geometry.getAttribute('position'); + + if (!position) { + console.warn('No position attribute'); + return; + } + + const index = new Uint32Array(position.count); + + for (let i = 0; i < index.length; i++) { + index[i] = i; + } + + geometry.setIndex(new THREE$1.BufferAttribute(index, 1, false)); + + return geometry; + } + + // Reorders the elements in the range [first, last) in such a way that + // all elements for which the comparator c returns true + // precede the elements for which comparator c returns false. + function partition(array, compare, left = 0, right = array.length) { + while (left !== right) { + while (compare(array[left])) { + left++; + if (left === right) { + return left; + } + } + do { + right--; + if (left === right) { + return left; + } + } while (!compare(array[right])); + + swap(array, left, right); + left++; + } + + return left; + } + + // nth_element is a partial sorting algorithm that rearranges elements in [first, last) such that: + // The element pointed at by nth is changed to whatever element would occur in that position if [first, last) were sorted. + // All of the elements before this new nth element compare to true with elements after the nth element + function nthElement(array, compare, left = 0, right = array.length, k = Math.floor((left + right) / 2)) { + for (let i = left; i <= k; i++) { + let minIndex = i; + let minValue = array[i]; + for (let j = i + 1; j < right; j++) { + if (!compare(minValue, array[j])) { + minIndex = j; + minValue = array[j]; + swap(array, i, minIndex); + } + } + } + } + + function swap(array, a, b) { + const x = array[b]; + array[b] = array[a]; + array[a] = x; + } + + // Create a bounding volume hierarchy of scene geometry + + const size = new THREE$1.Vector3(); + + function bvhAccel(geometry) { + const primitiveInfo = makePrimitiveInfo(geometry); + const node = recursiveBuild(primitiveInfo, 0, primitiveInfo.length); + + return node; + } + + function flattenBvh(bvh) { + const flat = []; + const isBounds = []; + + const splitAxisMap = { + x: 0, + y: 1, + z: 2 + }; + + let maxDepth = 1; + const traverse = (node, depth = 1) => { + + maxDepth = Math.max(depth, maxDepth); + + if (node.primitives) { + for (let i = 0; i < node.primitives.length; i++) { + const p = node.primitives[i]; + flat.push( + p.indices[0], p.indices[1], p.indices[2], node.primitives.length, + p.faceNormal.x, p.faceNormal.y, p.faceNormal.z, p.materialIndex + ); + isBounds.push(false); + } + } else { + const bounds = node.bounds; + + flat.push( + bounds.min.x, bounds.min.y, bounds.min.z, splitAxisMap[node.splitAxis], + bounds.max.x, bounds.max.y, bounds.max.z, null // pointer to second shild + ); + + const i = flat.length - 1; + isBounds.push(true); + + traverse(node.child0, depth + 1); + flat[i] = flat.length / 4; // pointer to second child + traverse(node.child1, depth + 1); + } + }; + + traverse(bvh); + + const buffer = new ArrayBuffer(4 * flat.length); + const floatView = new Float32Array(buffer); + const intView = new Int32Array(buffer); + + for (let i = 0; i < isBounds.length; i++) { + let k = 8 * i; + + if (isBounds[i]) { + floatView[k] = flat[k]; + floatView[k + 1] = flat[k + 1]; + floatView[k + 2] = flat[k + 2]; + intView[k + 3] = flat[k + 3]; + } else { + intView[k] = flat[k]; + intView[k + 1] = flat[k + 1]; + intView[k + 2] = flat[k + 2]; + intView[k + 3] = -flat[k + 3]; // negative signals to shader that this node is a triangle + } + + floatView[k + 4] = flat[k + 4]; + floatView[k + 5] = flat[k + 5]; + floatView[k + 6] = flat[k + 6]; + intView[k + 7] = flat[k + 7]; + } + + return { + maxDepth, + count: flat.length / 4, + buffer: floatView + }; + } + + function makePrimitiveInfo(geometry) { + const primitiveInfo = []; + const indices = geometry.getIndex().array; + const position = geometry.getAttribute('position'); + const materialMeshIndex = geometry.getAttribute('materialMeshIndex'); + + const v0 = new THREE$1.Vector3(); + const v1 = new THREE$1.Vector3(); + const v2 = new THREE$1.Vector3(); + const e0 = new THREE$1.Vector3(); + const e1 = new THREE$1.Vector3(); + + for (let i = 0; i < indices.length; i += 3) { + const i0 = indices[i]; + const i1 = indices[i + 1]; + const i2 = indices[i + 2]; + + const bounds = new THREE$1.Box3(); + + v0.fromBufferAttribute(position, i0); + v1.fromBufferAttribute(position, i1); + v2.fromBufferAttribute(position, i2); + e0.subVectors(v2, v0); + e1.subVectors(v1, v0); + + bounds.expandByPoint(v0); + bounds.expandByPoint(v1); + bounds.expandByPoint(v2); + + const info = { + bounds: bounds, + center: bounds.getCenter(new THREE$1.Vector3()), + indices: [i0, i1, i2], + faceNormal: new THREE$1.Vector3().crossVectors(e1, e0).normalize(), + materialIndex: materialMeshIndex.getX(i0) + }; + + primitiveInfo.push(info); + } + + return primitiveInfo; + } + + function recursiveBuild(primitiveInfo, start, end) { + const bounds = new THREE$1.Box3(); + for (let i = start; i < end; i++) { + bounds.union(primitiveInfo[i].bounds); + } + + const nPrimitives = end - start; + + if (nPrimitives === 1) { + return makeLeafNode(primitiveInfo.slice(start, end), bounds); + } else { + const centroidBounds = new THREE$1.Box3(); + for (let i = start; i < end; i++) { + centroidBounds.expandByPoint(primitiveInfo[i].center); + } + const dim = maximumExtent(centroidBounds); + + let mid = Math.floor((start + end) / 2); + + // middle split method + // const dimMid = (centroidBounds.max[dim] + centroidBounds.min[dim]) / 2; + // mid = partition(primitiveInfo, p => p.center[dim] < dimMid, start, end); + + // if (mid === start || mid === end) { + // mid = Math.floor((start + end) / 2); + // nthElement(primitiveInfo, (a, b) => a.center[dim] < b.center[dim], start, end, mid); + // } + + // surface area heuristic method + if (nPrimitives <= 4) { + nthElement(primitiveInfo, (a, b) => a.center[dim] < b.center[dim], start, end, mid); + } else { + const buckets = []; + for (let i = 0; i < 12; i++) { + buckets.push({ + bounds: new THREE$1.Box3(), + count: 0, + }); + } + + for (let i = start; i < end; i++) { + let b = Math.floor(buckets.length * boxOffset(centroidBounds, dim, primitiveInfo[i].center)); + if (b === buckets.length) { + b = buckets.length - 1; + } + buckets[b].count++; + buckets[b].bounds.union(primitiveInfo[i].bounds); + } + + const cost = []; + + for (let i = 0; i < buckets.length - 1; i++) { + const b0 = new THREE$1.Box3(); + const b1 = new THREE$1.Box3(); + let count0 = 0; + let count1 = 0; + for (let j = 0; j <= i; j++) { + b0.union(buckets[j].bounds); + count0 += buckets[j].count; + } + for (let j = i + 1; j < buckets.length; j++) { + b1.union(buckets[j].bounds); + count1 += buckets[j].count; + } + cost.push(0.1 + (count0 * surfaceArea(b0) + count1 * surfaceArea(b1)) / surfaceArea(bounds)); + } + + let minCost = cost[0]; + let minCostSplitBucket = 0; + for (let i = 1; i < cost.length; i++) { + if (cost[i] < minCost) { + minCost = cost[i]; + minCostSplitBucket = i; + } + } + + mid = partition(primitiveInfo, p => { + let b = Math.floor(buckets.length * boxOffset(centroidBounds, dim, p.center)); + if (b === buckets.length) { + b = buckets.length - 1; + } + return b <= minCostSplitBucket; + }, start, end); + } + + return makeInteriorNode( + dim, + recursiveBuild(primitiveInfo, start, mid), + recursiveBuild(primitiveInfo, mid, end), + ); + } + } + + function makeLeafNode(primitives, bounds) { + return { + primitives, + bounds + }; + } + + function makeInteriorNode(splitAxis, child0, child1) { + return { + child0, + child1, + bounds: new THREE$1.Box3().union(child0.bounds).union(child1.bounds), + splitAxis, + }; + } + + function maximumExtent(box3) { + box3.getSize(size); + if (size.x > size.z) { + return size.x > size.y ? 'x' : 'y'; + } else { + return size.z > size.y ? 'z' : 'y'; + } + } + + function boxOffset(box3, dim, v) { + let offset = v[dim] - box3.min[dim]; + + if (box3.max[dim] > box3.min[dim]){ + offset /= box3.max[dim] - box3.min[dim]; + } + + return offset; + } + + function surfaceArea(box3) { + box3.getSize(size); + return 2 * (size.x * size.z + size.x * size.y + size.z * size.y); + } + + // Convert image data from the RGBE format to a 32-bit floating point format + // See https://www.cg.tuwien.ac.at/research/theses/matkovic/node84.html for a description of the RGBE format + // Optional multiplier argument for performance optimization + function rgbeToFloat(buffer, intensity = 1) { + const texels = buffer.length / 4; + const floatBuffer = new Float32Array(texels * 3); + + const expTable = []; + for (let i = 0; i < 255; i++) { + expTable[i] = intensity * Math.pow(2, i - 128) / 255; + } + + for (let i = 0; i < texels; i++) { + + const r = buffer[4 * i]; + const g = buffer[4 * i + 1]; + const b = buffer[4 * i + 2]; + const a = buffer[4 * i + 3]; + const e = expTable[a]; + + floatBuffer[3 * i] = r * e; + floatBuffer[3 * i + 1] = g * e; + floatBuffer[3 * i + 2] = b * e; + } + + return floatBuffer; + } + + // Convert image data from the RGBE format to a 32-bit floating point format + + const DEFAULT_MAP_RESOLUTION = { + width: 2048, + height: 1024, + }; + + // Tools for generating and modify env maps for lighting from scene component data + + function generateBackgroundMapFromSceneBackground(background) { + let backgroundImage; + + if (background.isColor) { + backgroundImage = generateSolidMap(1, 1, background); + } else if (background.encoding === THREE$1.RGBEEncoding) { + backgroundImage = { + width: background.image.width, + height: background.image.height, + data: background.image.data, + }; + backgroundImage.data = rgbeToFloat(backgroundImage.data); + } + return backgroundImage; + } + + function generateEnvMapFromSceneComponents(directionalLights, ambientLights, environmentLights) { + let envImage = initializeEnvMap(environmentLights); + ambientLights.forEach( light => { addAmbientLightToEnvMap(light, envImage); }); + directionalLights.forEach( light => { envImage.data = addDirectionalLightToEnvMap(light, envImage); }); + + return envImage; + } + + function initializeEnvMap(environmentLights) { + let envImage; + + // Initialize map from environment light if present + if (environmentLights.length > 0) { + // TODO: support multiple environment lights (what if they have different resolutions?) + const environmentLight = environmentLights[0]; + envImage = { + width: environmentLight.map.image.width, + height: environmentLight.map.image.height, + data: environmentLight.map.image.data, + }; + envImage.data = rgbeToFloat(envImage.data, environmentLight.intensity); + } else { + // initialize blank map + envImage = generateSolidMap(DEFAULT_MAP_RESOLUTION.width, DEFAULT_MAP_RESOLUTION.height); + } + + return envImage; + } + + function generateSolidMap(width, height, color, intensity) { + const texels = width * height; + const floatBuffer = new Float32Array(texels * 3); + if (color && color.isColor) { + setBufferToColor(floatBuffer, color, intensity); + } + return { + width: width, + height: height, + data: floatBuffer, + }; + } + + function setBufferToColor(buffer, color, intensity = 1) { + buffer.forEach(function(part, index) { + const component = index % 3; + if (component === 0) { + buffer[index] = color.r * intensity; + } + else if (component === 1) { + buffer[index] = color.g * intensity; + } + else if (component === 2) { + buffer[index] = color.b * intensity; + } + }); + return buffer; + } + + function addAmbientLightToEnvMap(light, image) { + const color = light.color; + image.data.forEach(function(part, index) { + const component = index % 3; + if (component === 0) { + image.data[index] += color.r * light.intensity; + } + else if (component === 1) { + image.data[index] += color.g * light.intensity; + } + else if (component === 2) { + image.data[index] += color.b * light.intensity; + } + }); + } + + function addDirectionalLightToEnvMap(light, image) { + const sphericalCoords = new THREE$1.Spherical(); + const lightDirection = light.position.clone().sub(light.target.position); + + sphericalCoords.setFromVector3(lightDirection); + sphericalCoords.theta = (Math.PI * 3 / 2) - sphericalCoords.theta; + sphericalCoords.makeSafe(); + + return addLightAtCoordinates(light, image, sphericalCoords); + } + + // Perform modifications on env map to match input scene + function addLightAtCoordinates(light, image, originCoords) { + const floatBuffer = image.data; + const width = image.width; + const height = image.height; + const xTexels = floatBuffer.length / (3 * height); + const yTexels = floatBuffer.length / (3 * width); + + // default softness for standard directional lights is 0.01, i.e. a hard shadow + const softness = light.softness || 0.01; + + // angle from center of light at which no more contributions are projected + const threshold = findThreshold(softness); + + // if too few texels are rejected by the threshold then the time to evaluate it is no longer worth it + const useThreshold = threshold < Math.PI / 5; + + // functional trick to keep the conditional check out of the main loop + const intensityFromAngleFunction = useThreshold ? getIntensityFromAngleDifferentialThresholded : getIntensityFromAngleDifferential; + + let begunAddingContributions = false; + let currentCoords = new THREE$1.Spherical(); + + // Iterates over each row from top to bottom + for (let i = 0; i < xTexels; i++) { + + let encounteredInThisRow = false; + + // Iterates over each texel in row + for (let j = 0; j < yTexels; j++) { + const bufferIndex = j * width + i; + currentCoords = equirectangularToSpherical(i, j, width, height, currentCoords); + const falloff = intensityFromAngleFunction(originCoords, currentCoords, softness, threshold); + + if(falloff > 0) { + encounteredInThisRow = true; + begunAddingContributions = true; + } + + const intensity = light.intensity * falloff; + + floatBuffer[bufferIndex * 3] += intensity * light.color.r; + floatBuffer[bufferIndex * 3 + 1] += intensity * light.color.g; + floatBuffer[bufferIndex * 3 + 2] += intensity * light.color.b; + } + + // First row to not add a contribution since adding began + // This means the entire light has been added and we can exit early + if(!encounteredInThisRow && begunAddingContributions) { + return floatBuffer; + } + } + + return floatBuffer; + } + + function findThreshold(softness) { + const step = Math.PI / 128; + const maxSteps = (2.0 * Math.PI) / step; + + for (let i = 0; i < maxSteps; i++) { + const angle = i * step; + const falloff = getFalloffAtAngle(angle, softness); + if (falloff <= 0.0001) { + return angle; + } + } + } + + function getIntensityFromAngleDifferentialThresholded(originCoords, currentCoords, softness, threshold) { + const deltaPhi = getAngleDelta(originCoords.phi, currentCoords.phi); + const deltaTheta = getAngleDelta(originCoords.theta, currentCoords.theta); + + if(deltaTheta > threshold && deltaPhi > threshold) { + return 0; + } + + const angle = angleBetweenSphericals(originCoords, currentCoords); + return getFalloffAtAngle(angle, softness); + } + + function getIntensityFromAngleDifferential(originCoords, currentCoords, softness) { + const angle = angleBetweenSphericals(originCoords, currentCoords); + return getFalloffAtAngle(angle, softness); + } + + function getAngleDelta(angleA, angleB) { + const diff = Math.abs(angleA - angleB) % (2 * Math.PI); + return diff > Math.PI ? (2 * Math.PI - diff) : diff; + } + + const angleBetweenSphericals = function() { + const originVector = new THREE$1.Vector3(); + const currentVector = new THREE$1.Vector3(); + + return (originCoords, currentCoords) => { + originVector.setFromSpherical(originCoords); + currentVector.setFromSpherical(currentCoords); + return originVector.angleTo(currentVector); + }; + }(); + + // TODO: possibly clean this up and optimize it + // + // This function was arrived at through experimentation, it provides good + // looking results with percieved softness that scale relatively linearly with + // the softness value in the 0 - 1 range + // + // For now it doesn't incur too much of a performance penalty because for most of our use cases (lights without too much softness) + // the threshold cutoff in getIntensityFromAngleDifferential stops us from running it too many times + function getFalloffAtAngle(angle, softness) { + const softnessCoefficient = Math.pow(2, 14.5 * Math.max(0.001, 1.0 - clamp(softness, 0.0, 1.0))); + const falloff = Math.pow(softnessCoefficient, 1.1) * Math.pow(8, -softnessCoefficient * Math.pow(angle, 1.8)); + return falloff; + } + + function equirectangularToSpherical(x, y, width, height, target) { + target.phi = (Math.PI * y) / height; + target.theta = (2.0 * Math.PI * x) / width; + return target; + } + + // Create a piecewise 2D cumulative distribution function of light intensity from an envmap + // http://www.pbr-book.org/3ed-2018/Monte_Carlo_Integration/2D_Sampling_with_Multidimensional_Transformations.html#Piecewise-Constant2DDistributions + + function envmapDistribution(image) { + const data = image.data; + + const cdfImage = { + width: image.width + 2, + height: image.height + 1 + }; + + const cdf = makeTextureArray$1(cdfImage.width, cdfImage.height, 2); + + for (let y = 0; y < image.height; y++) { + const sinTheta = Math.sin(Math.PI * (y + 0.5) / image.height); + for (let x = 0; x < image.width; x++) { + const i = 3 * (y * image.width + x); + let r = data[i]; + let g = data[i + 1]; + let b = data[i + 2]; + let luminance = 0.2126 * r + 0.7152 * g + 0.0722 * b; + luminance *= sinTheta; + cdf.set(x + 2, y, 0, cdf.get(x + 1, y, 0) + luminance / image.width); + cdf.set(x + 1, y, 1, luminance); + } + + const rowIntegral = cdf.get(cdfImage.width - 1, y, 0); + + for (let x = 1; x < cdf.width; x++) { + cdf.set(x, y, 0, cdf.get(x, y, 0) / rowIntegral); + cdf.set(x, y, 1, cdf.get(x, y, 1) / rowIntegral); + } + + cdf.set(0, y + 1, 0, cdf.get(0, y, 0) + rowIntegral / image.height); + cdf.set(0, y, 1, rowIntegral); + } + + const integral = cdf.get(0, cdf.height - 1, 0); + + for (let y = 0; y < cdf.height; y++) { + cdf.set(0, y, 0, cdf.get(0, y, 0) / integral); + cdf.set(0, y, 1, cdf.get(0, y, 1) / integral); + } + cdfImage.data = cdf.array; + + return cdfImage; + } + + + function makeTextureArray$1(width, height, channels) { + const array = new Float32Array(channels * width * height); + + return { + set(x, y, channel, val) { + array[channels * (y * width + x) + channel] = val; + }, + get(x, y, channel) { + return array[channels * (y * width + x) + channel]; + }, + width, + height, + channels, + array + }; + } + + function unrollLoop(indexName, start, limit, step, code) { + let unrolled = `int ${indexName};\n`; + + for (let i = start; (step > 0 && i < limit) || (step < 0 && i > limit); i += step) { + unrolled += `${indexName} = ${i};\n`; + unrolled += code; + } + + return unrolled; + } + + var rayTraceCore = ` + #define STANDARD 0 + #define THIN_GLASS 1 #define THICK_GLASS 2 #define SHADOW_CATCHER 3 @@ -1208,6 +2132,8 @@ // https://www.w3.org/WAI/GL/wiki/Relative_luminance const vec3 luminance = vec3(0.2126, 0.7152, 0.0722); + #define RAY_MAX_DISTANCE 9999.0 + struct Ray { vec3 o; vec3 d; @@ -1224,7 +2150,6 @@ float roughness; float metalness; int materialType; - int meshId; }; struct Camera { @@ -1301,48 +2226,18 @@ vec4 textureLinear(sampler2D map, vec2 uv) { var intersect = ` -uniform highp isampler2D indices; uniform sampler2D positions; uniform sampler2D normals; uniform sampler2D uvs; uniform sampler2D bvh; -uniform Materials { - vec4 colorAndMaterialType[NUM_MATERIALS]; - vec4 roughnessMetalnessNormalScale[NUM_MATERIALS]; - - #if defined(NUM_DIFFUSE_MAPS) || defined(NUM_NORMAL_MAPS) || defined(NUM_PBR_MAPS) - ivec4 diffuseNormalRoughnessMetalnessMapIndex[NUM_MATERIALS]; - #endif - - #if defined(NUM_DIFFUSE_MAPS) || defined(NUM_NORMAL_MAPS) - vec4 diffuseNormalMapSize[NUM_DIFFUSE_NORMAL_MAPS]; - #endif - - #if defined(NUM_PBR_MAPS) - vec2 pbrMapSize[NUM_PBR_MAPS]; - #endif -} materials; - -#ifdef NUM_DIFFUSE_MAPS - uniform mediump sampler2DArray diffuseMap; -#endif - -#ifdef NUM_NORMAL_MAPS - uniform mediump sampler2DArray normalMap; -#endif - -#ifdef NUM_PBR_MAPS - uniform mediump sampler2DArray pbrMap; -#endif - struct Triangle { vec3 p0; vec3 p1; vec3 p2; }; -void surfaceInteractionFromIntersection(inout SurfaceInteraction si, Triangle tri, vec3 barycentric, ivec3 index, vec3 faceNormal, int materialIndex) { +void surfaceInteractionFromBVH(inout SurfaceInteraction si, Triangle tri, vec3 barycentric, ivec3 index, vec3 faceNormal, int materialIndex) { si.hit = true; si.faceNormal = faceNormal; si.position = barycentric.x * tri.p0 + barycentric.y * tri.p1 + barycentric.z * tri.p2; @@ -1353,90 +2248,30 @@ void surfaceInteractionFromIntersection(inout SurfaceInteraction si, Triangle tr vec3 n0 = texelFetch(normals, i0, 0).xyz; vec3 n1 = texelFetch(normals, i1, 0).xyz; vec3 n2 = texelFetch(normals, i2, 0).xyz; - si.normal = normalize(barycentric.x * n0 + barycentric.y * n1 + barycentric.z * n2); - - si.color = materials.colorAndMaterialType[materialIndex].xyz; - si.roughness = materials.roughnessMetalnessNormalScale[materialIndex].x; - si.metalness = materials.roughnessMetalnessNormalScale[materialIndex].y; - - si.materialType = int(materials.colorAndMaterialType[materialIndex].w); - - // TODO: meshId should be the actual mesh id instead of the material id, which can be shared amoung meshes. - // This will involve storing the mesh id AND the material id in the BVH texture - si.meshId = materialIndex + 1; // +1 so that the mesh id is never 0 + vec3 normal = normalize(barycentric.x * n0 + barycentric.y * n1 + barycentric.z * n2); #if defined(NUM_DIFFUSE_MAPS) || defined(NUM_NORMAL_MAPS) || defined(NUM_PBR_MAPS) vec2 uv0 = texelFetch(uvs, i0, 0).xy; vec2 uv1 = texelFetch(uvs, i1, 0).xy; vec2 uv2 = texelFetch(uvs, i2, 0).xy; vec2 uv = fract(barycentric.x * uv0 + barycentric.y * uv1 + barycentric.z * uv2); + #else + vec2 uv = vec2(); #endif - #ifdef NUM_DIFFUSE_MAPS - int diffuseMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].x; - if (diffuseMapIndex >= 0) { - si.color *= texture(diffuseMap, vec3(uv * materials.diffuseNormalMapSize[diffuseMapIndex].xy, diffuseMapIndex)).rgb; - } - #endif + si.materialType = int(getMatType(materialIndex)); + si.color = getMatColor(materialIndex, uv); + si.roughness = getMatRoughness(materialIndex, uv); + si.metalness = getMatMetalness(materialIndex, uv); #ifdef NUM_NORMAL_MAPS - int normalMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].y; - if (normalMapIndex >= 0) { - vec2 duv02 = uv0 - uv2; - vec2 duv12 = uv1 - uv2; - vec3 dp02 = tri.p0 - tri.p2; - vec3 dp12 = tri.p1 - tri.p2; - - // Method One - // http://www.pbr-book.org/3ed-2018/Shapes/Triangle_Meshes.html#fragment-Computetrianglepartialderivatives-0 - // Compute tangent vectors relative to the face normal. These vectors won't necessarily be orthogonal to the smoothed normal - // This means the TBN matrix won't be orthogonal which is technically incorrect. - // This is Three.js's method (https://github.com/mrdoob/three.js/blob/dev/src/renderers/shaders/ShaderChunk/normalmap_pars_fragment.glsl.js) - // -------------- - // float scale = sign(duv02.x * duv12.y - duv02.y * duv12.x); - // vec3 dpdu = normalize((duv12.y * dp02 - duv02.y * dp12) * scale); - // vec3 dpdv = normalize((-duv12.x * dp02 + duv02.x * dp12) * scale); - - // Method Two - // Compute tangent vectors as in Method One but apply Gram-Schmidt process to make vectors orthogonal to smooth normal - // This might inadvertently flip coordinate space orientation - // -------------- - // float scale = sign(duv02.x * duv12.y - duv02.y * duv12.x); - // vec3 dpdu = normalize((duv12.y * dp02 - duv02.y * dp12) * scale); - // dpdu = (dpdu - dot(dpdu, si.normal) * si.normal); // Gram-Schmidt process - // vec3 dpdv = cross(si.normal, dpdu) * scale; - - // Method Three - // http://www.thetenthplanet.de/archives/1180 - // Compute co-tangent and co-bitangent vectors - // These vectors are orthongal and maintain a consistent coordinate space - // -------------- - vec3 dp12perp = cross(dp12, si.normal); - vec3 dp02perp = cross(si.normal, dp02); - vec3 dpdu = dp12perp * duv02.x + dp02perp * duv12.x; - vec3 dpdv = dp12perp * duv02.y + dp02perp * duv12.y; - float invmax = inversesqrt(max(dot(dpdu, dpdu), dot(dpdv, dpdv))); - dpdu *= invmax; - dpdv *= invmax; - - vec3 n = 2.0 * texture(normalMap, vec3(uv * materials.diffuseNormalMapSize[normalMapIndex].zw, normalMapIndex)).rgb - 1.0; - n.xy *= materials.roughnessMetalnessNormalScale[materialIndex].zw; - - mat3 tbn = mat3(dpdu, dpdv, si.normal); - - si.normal = normalize(tbn * n); - } - #endif - - #ifdef NUM_PBR_MAPS - int roughnessMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].z; - int metalnessMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].w; - if (roughnessMapIndex >= 0) { - si.roughness *= texture(pbrMap, vec3(uv * materials.pbrMapSize[roughnessMapIndex].xy, roughnessMapIndex)).g; - } - if (metalnessMapIndex >= 0) { - si.metalness *= texture(pbrMap, vec3(uv * materials.pbrMapSize[metalnessMapIndex].xy, metalnessMapIndex)).b; - } + vec3 dp1 = tri.p0 - tri.p2; + vec3 dp2 = tri.p1 - tri.p2; + vec2 duv1 = uv0 - uv2; + vec2 duv2 = uv1 - uv2; + si.normal = getMatNormal(materialIndex, uv, normal, dp1, dp2, duv1, duv2); + #else + si.normal = normal; #endif } @@ -1537,8 +2372,8 @@ int maxDimension(vec3 v) { } // Traverse BVH, find closest triangle intersection, and return surface information -SurfaceInteraction intersectScene(inout Ray ray) { - SurfaceInteraction si; +void intersectScene(inout Ray ray, inout SurfaceInteraction si) { + si.hit = false; int maxDim = maxDimension(abs(ray.d)); @@ -1595,16 +2430,14 @@ SurfaceInteraction intersectScene(inout Ray ray) { ray.tMax = hit.t; int materialIndex = floatBitsToInt(r2.w); vec3 faceNormal = r2.xyz; - surfaceInteractionFromIntersection(si, tri, hit.barycentric, index, faceNormal, materialIndex); + surfaceInteractionFromBVH(si, tri, hit.barycentric, index, faceNormal, materialIndex); } } } // Values must be clamped outside of intersection loop. Clamping inside the loop produces incorrect numbers on some devices. - si.roughness = clamp(si.roughness, 0.03, 1.0); + si.roughness = clamp(si.roughness, ROUGHNESS_MIN, 1.0); si.metalness = clamp(si.metalness, 0.0, 1.0); - - return si; } bool intersectSceneShadow(inout Ray ray) { @@ -1662,9 +2495,37 @@ bool intersectSceneShadow(inout Ray ray) { } } - return false; -} + return false; +} + +`; + + var surfaceInteractionDirect = ` + + uniform sampler2D gPosition; + uniform sampler2D gNormal; + uniform sampler2D gFaceNormal; + uniform sampler2D gColor; + uniform sampler2D gMatProps; + + void surfaceInteractionDirect(vec2 coord, inout SurfaceInteraction si) { + si.position = texture(gPosition, coord).xyz; + + vec4 normalMaterialType = texture(gNormal, coord); + + si.normal = normalize(normalMaterialType.xyz); + si.materialType = int(normalMaterialType.w); + + si.faceNormal = normalize(texture(gFaceNormal, coord).xyz); + + si.color = texture(gColor, coord).rgb; + vec4 matProps = texture(gMatProps, coord); + si.roughness = matProps.x; + si.metalness = matProps.y; + + si.hit = dot(si.normal, si.normal) > 0.0 ? true : false; + } `; var random = ` @@ -1681,8 +2542,6 @@ uniform float strataSize; // This allows us to use stratified sampling for each random variable in our path tracing int sampleIndex = 0; -const highp float maxUint = 1.0 / 4294967295.0; - float pixelSeed; void initRandom() { @@ -2055,852 +2914,471 @@ void sampleMaterial(SurfaceInteraction si, int bounce, inout Path path) { mat3 basis = orthonormalBasis(si.normal); vec3 viewDir = -path.ray.d; - vec2 diffuseOrSpecular = randomSampleVec2(); - - vec3 lightDir = diffuseOrSpecular.x < mix(0.5, 0.0, si.metalness) ? - lightDirDiffuse(si.faceNormal, viewDir, basis, randomSampleVec2()) : - lightDirSpecular(si.faceNormal, viewDir, basis, si.roughness, randomSampleVec2()); - - bool lastBounce = bounce == BOUNCES; - - // Add path contribution - path.li += path.beta * ( - importanceSampleLight(si, viewDir, lastBounce, randomSampleVec2()) + - importanceSampleMaterial(si, viewDir, lastBounce, lightDir) - ); - - // Get new path direction - - lightDir = diffuseOrSpecular.y < mix(0.5, 0.0, si.metalness) ? - lightDirDiffuse(si.faceNormal, viewDir, basis, randomSampleVec2()) : - lightDirSpecular(si.faceNormal, viewDir, basis, si.roughness, randomSampleVec2()); - - float cosThetaL = dot(si.normal, lightDir); - - float scatteringPdf; - vec3 brdf = materialBrdf(si, viewDir, lightDir, cosThetaL, 1.0, scatteringPdf); - - path.beta *= abs(cosThetaL) * brdf / scatteringPdf; - - initRay(path.ray, si.position + EPS * lightDir, lightDir); - - // If new ray direction is pointing into the surface, - // the light path is physically impossible and we terminate the path. - float orientation = dot(si.faceNormal, viewDir) * cosThetaL; - path.abort = orientation < 0.0; - - path.specularBounce = false; -} - -`; - - var sampleShadowCatcher = ` - -#ifdef USE_SHADOW_CATCHER - -float importanceSampleLightShadowCatcher(SurfaceInteraction si, vec3 viewDir, vec2 random, inout float alpha) { - float li; - - float lightPdf; - vec2 uv; - vec3 lightDir = sampleEnvmap(random, uv, lightPdf); - - float cosThetaL = dot(si.normal, lightDir); - - float orientation = dot(si.faceNormal, viewDir) * cosThetaL; - if (orientation < 0.0) { - return li; - } - - float occluded = 1.0; - - Ray ray; - initRay(ray, si.position + EPS * lightDir, lightDir); - if (intersectSceneShadow(ray)) { - occluded = 0.0; - } - - float irr = dot(luminance, textureLinear(envmap, uv).rgb); - - // lambertian BRDF - float brdf = INVPI; - float scatteringPdf = abs(cosThetaL) * INVPI; - - float weight = powerHeuristic(lightPdf, scatteringPdf); - - float lightEq = irr * brdf * abs(cosThetaL) * weight / lightPdf; - - alpha += lightEq; - li += occluded * lightEq; - - return li; -} - -float importanceSampleMaterialShadowCatcher(SurfaceInteraction si, vec3 viewDir, vec3 lightDir, inout float alpha) { - float li; - - float cosThetaL = dot(si.normal, lightDir); - - float orientation = dot(si.faceNormal, viewDir) * cosThetaL; - if (orientation < 0.0) { - return li; - } - - float occluded = 1.0; - - Ray ray; - initRay(ray, si.position + EPS * lightDir, lightDir); - if (intersectSceneShadow(ray)) { - occluded = 0.0; - } - - vec2 uv = cartesianToEquirect(lightDir); - - float lightPdf = envmapPdf(uv); - - float irr = dot(luminance, textureLinear(envmap, uv).rgb); - - // lambertian BRDF - float brdf = INVPI; - float scatteringPdf = abs(cosThetaL) * INVPI; - - float weight = powerHeuristic(scatteringPdf, lightPdf); - - float lightEq = irr * brdf * abs(cosThetaL) * weight / scatteringPdf; - - alpha += lightEq; - li += occluded * lightEq; - - return li; -} - -void sampleShadowCatcher(SurfaceInteraction si, int bounce, inout Path path) { - mat3 basis = orthonormalBasis(si.normal); - vec3 viewDir = -path.ray.d; - vec3 color = bounce > 1 && !path.specularBounce ? sampleEnvmapFromDirection(-viewDir) : sampleBackgroundFromDirection(-viewDir); - - vec3 lightDir = lightDirDiffuse(si.faceNormal, viewDir, basis, randomSampleVec2()); - - float alphaBounce = 0.0; - - vec3 li = path.beta * color * ( - importanceSampleLightShadowCatcher(si, viewDir, randomSampleVec2(), alphaBounce) + - importanceSampleMaterialShadowCatcher(si, viewDir, lightDir, alphaBounce) - ); - - // alphaBounce contains the lighting of the shadow catcher *without* shadows - alphaBounce = alphaBounce == 0.0 ? 1.0 : alphaBounce; - - // in post processing step, we divide by alpha to obtain the percentage of light relative to shadow for the shadow catcher - path.alpha *= alphaBounce; - - // we only want the alpha division to affect the shadow catcher - // factor in alpha to the previous light, so that dividing by alpha with the previous light cancels out this contribution - path.li *= alphaBounce; - - // add path contribution - path.li += li; - - // Get new path direction - - lightDir = lightDirDiffuse(si.faceNormal, viewDir, basis, randomSampleVec2()); - - float cosThetaL = dot(si.normal, lightDir); - - // lambertian brdf with terms cancelled - path.beta *= color; - - initRay(path.ray, si.position + EPS * lightDir, lightDir); - - // If new ray direction is pointing into the surface, - // the light path is physically impossible and we terminate the path. - float orientation = dot(si.faceNormal, viewDir) * cosThetaL; - path.abort = orientation < 0.0; - - path.specularBounce = false; - - // advance dimension index by unused stratified samples - const int usedSamples = 6; - sampleIndex += SAMPLES_PER_MATERIAL - usedSamples; -} - -#endif - -`; - - var sampleGlass = ` - -#ifdef USE_GLASS - -void sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) { - vec3 viewDir = -path.ray.d; - float cosTheta = dot(si.normal, viewDir); - - float F = si.materialType == THIN_GLASS ? - fresnelSchlick(abs(cosTheta), R0) : // thin glass - fresnelSchlickTIR(cosTheta, R0, IOR); // thick glass - - vec3 lightDir; - - float reflectionOrRefraction = randomSample(); - - if (reflectionOrRefraction < F) { - lightDir = reflect(-viewDir, si.normal); - } else { - lightDir = si.materialType == THIN_GLASS ? - refract(-viewDir, sign(cosTheta) * si.normal, INV_IOR_THIN) : // thin glass - refract(-viewDir, sign(cosTheta) * si.normal, cosTheta < 0.0 ? IOR : INV_IOR); // thick glass - path.beta *= si.color; - } - - initRay(path.ray, si.position + EPS * lightDir, lightDir); - - // advance sample index by unused stratified samples - const int usedSamples = 1; - sampleIndex += SAMPLES_PER_MATERIAL - usedSamples; - - path.li += bounce == BOUNCES ? path.beta * sampleBackgroundFromDirection(lightDir) : vec3(0.0); -} - -#endif - -`; - - // import sampleGlass from './chunks/sampleGlassMicrofacet.glsl'; - - var fragment = { - includes: [ - core, - textureLinear, - intersect, - random, - envmap, - bsdf, - sample, - sampleMaterial, - sampleGlass, - sampleShadowCatcher, - ], - outputs: ['light', 'position'], - source: (defines) => ` - void bounce(inout Path path, int i, inout SurfaceInteraction si) { - if (path.abort) { - return; - } - - si = intersectScene(path.ray); - - if (!si.hit) { - if (path.specularBounce) { - path.li += path.beta * sampleBackgroundFromDirection(path.ray.d); - } - - path.abort = true; - } else { - #ifdef USE_GLASS - if (si.materialType == THIN_GLASS || si.materialType == THICK_GLASS) { - sampleGlassSpecular(si, i, path); - } - #endif - #ifdef USE_SHADOW_CATCHER - if (si.materialType == SHADOW_CATCHER) { - sampleShadowCatcher(si, i, path); - } - #endif - if (si.materialType == STANDARD) { - sampleMaterial(si, i, path); - } - - // Russian Roulette sampling - if (i >= 2) { - float q = 1.0 - dot(path.beta, luminance); - if (randomSample() < q) { - path.abort = true; - } - path.beta /= 1.0 - q; - } - } - } + vec2 diffuseOrSpecular = randomSampleVec2(); - // Path tracing integrator as described in - // http://www.pbr-book.org/3ed-2018/Light_Transport_I_Surface_Reflection/Path_Tracing.html# - vec4 integrator(inout Ray ray, inout SurfaceInteraction si) { - Path path; - path.ray = ray; - path.li = vec3(0); - path.alpha = 1.0; - path.beta = vec3(1.0); - path.specularBounce = true; - path.abort = false; + vec3 lightDir = diffuseOrSpecular.x < mix(0.5, 0.0, si.metalness) ? + lightDirDiffuse(si.faceNormal, viewDir, basis, randomSampleVec2()) : + lightDirSpecular(si.faceNormal, viewDir, basis, si.roughness, randomSampleVec2()); - bounce(path, 1, si); + bool lastBounce = bounce == BOUNCES; - SurfaceInteraction indirectSi; + // Add path contribution + path.li += path.beta * ( + importanceSampleLight(si, viewDir, lastBounce, randomSampleVec2()) + + importanceSampleMaterial(si, viewDir, lastBounce, lightDir) + ); - // Manually unroll for loop. - // Some hardware fails to interate over a GLSL loop, so we provide this workaround - // for (int i = 1; i < defines.bounces + 1, i += 1) - // equivelant to - ${unrollLoop('i', 2, defines.BOUNCES + 1, 1, ` - bounce(path, i, indirectSi); - `)} + // Get new path direction - return vec4(path.li, path.alpha); + if (lastBounce) { + return; } - void main() { - initRandom(); + lightDir = diffuseOrSpecular.y < mix(0.5, 0.0, si.metalness) ? + lightDirDiffuse(si.faceNormal, viewDir, basis, randomSampleVec2()) : + lightDirSpecular(si.faceNormal, viewDir, basis, si.roughness, randomSampleVec2()); - vec2 vCoordAntiAlias = vCoord + jitter; + float cosThetaL = dot(si.normal, lightDir); - vec3 direction = normalize(vec3(vCoordAntiAlias - 0.5, -1.0) * vec3(camera.aspect, 1.0, camera.fov)); + float scatteringPdf; + vec3 brdf = materialBrdf(si, viewDir, lightDir, cosThetaL, 1.0, scatteringPdf); - // Thin lens model with depth-of-field - // http://www.pbr-book.org/3ed-2018/Camera_Models/Projective_Camera_Models.html#TheThinLensModelandDepthofField - // vec2 lensPoint = camera.aperture * sampleCircle(randomSampleVec2()); - // vec3 focusPoint = -direction * camera.focus / direction.z; // intersect ray direction with focus plane + path.beta *= abs(cosThetaL) * brdf / scatteringPdf; - // vec3 origin = vec3(lensPoint, 0.0); - // direction = normalize(focusPoint - origin); + initRay(path.ray, si.position + EPS * lightDir, lightDir); - // origin = vec3(camera.transform * vec4(origin, 1.0)); - // direction = mat3(camera.transform) * direction; + // If new ray direction is pointing into the surface, + // the light path is physically impossible and we terminate the path. + float orientation = dot(si.faceNormal, viewDir) * cosThetaL; + path.abort = orientation < 0.0; - vec3 origin = camera.transform[3].xyz; - direction = mat3(camera.transform) * direction; + path.specularBounce = false; +} - Ray cam; - initRay(cam, origin, direction); +`; - SurfaceInteraction si; + var sampleShadowCatcher = ` - vec4 liAndAlpha = integrator(cam, si); +#ifdef USE_SHADOW_CATCHER - if (dot(si.position, si.position) == 0.0) { - si.position = origin + direction * RAY_MAX_DISTANCE; - } +float importanceSampleLightShadowCatcher(SurfaceInteraction si, vec3 viewDir, vec2 random, inout float alpha) { + float li; - if (!(liAndAlpha.x < INF && liAndAlpha.x > -EPS)) { - liAndAlpha = vec4(0, 0, 0, 1); - } + float lightPdf; + vec2 uv; + vec3 lightDir = sampleEnvmap(random, uv, lightPdf); - out_light = liAndAlpha; - out_position = vec4(si.position, si.meshId); + float cosThetaL = dot(si.normal, lightDir); - // Stratified Sampling Sample Count Test - // --------------- - // Uncomment the following code - // Then observe the colors of the image - // If: - // * The resulting image is pure black - // Extra samples are being passed to the shader that aren't being used. - // * The resulting image contains red - // Not enough samples are being passed to the shader - // * The resulting image contains only white with some black - // All samples are used by the shader. Correct result! + float orientation = dot(si.faceNormal, viewDir) * cosThetaL; + if (orientation < 0.0) { + return li; + } - // fragColor = vec4(0, 0, 0, 1); - // if (sampleIndex == SAMPLING_DIMENSIONS) { - // fragColor = vec4(1, 1, 1, 1); - // } else if (sampleIndex > SAMPLING_DIMENSIONS) { - // fragColor = vec4(1, 0, 0, 1); - // } -} -` - }; + float occluded = 1.0; - function mergeMeshesToGeometry(meshes) { + Ray ray; + initRay(ray, si.position + EPS * lightDir, lightDir); + if (intersectSceneShadow(ray)) { + occluded = 0.0; + } - let vertexCount = 0; - let indexCount = 0; + float irr = dot(luminance, textureLinear(envmap, uv).rgb); - const geometryAndMaterialIndex = []; - const materialIndexMap = new Map(); + // lambertian BRDF + float brdf = INVPI; + float scatteringPdf = abs(cosThetaL) * INVPI; - for (const mesh of meshes) { - const geometry = cloneBufferGeometry(mesh.geometry, ['position', 'normal', 'uv']); + float weight = powerHeuristic(lightPdf, scatteringPdf); - const index = geometry.getIndex(); - if (!index) { - addFlatGeometryIndices(geometry); - } + float lightEq = irr * brdf * abs(cosThetaL) * weight / lightPdf; - geometry.applyMatrix(mesh.matrixWorld); + alpha += lightEq; + li += occluded * lightEq; - if (!geometry.getAttribute('normal')) { - geometry.computeVertexNormals(); - } else { - geometry.normalizeNormals(); - } + return li; +} - vertexCount += geometry.getAttribute('position').count; - indexCount += geometry.getIndex().count; +float importanceSampleMaterialShadowCatcher(SurfaceInteraction si, vec3 viewDir, vec3 lightDir, inout float alpha) { + float li; - const material = mesh.material; - let materialIndex = materialIndexMap.get(material); - if (materialIndex === undefined) { - materialIndex = materialIndexMap.size; - materialIndexMap.set(material, materialIndex); - } + float cosThetaL = dot(si.normal, lightDir); - geometryAndMaterialIndex.push({ - geometry, - materialIndex - }); - } + float orientation = dot(si.faceNormal, viewDir) * cosThetaL; + if (orientation < 0.0) { + return li; + } - const { geometry, materialIndices } = mergeGeometry(geometryAndMaterialIndex, vertexCount, indexCount); + float occluded = 1.0; - return { - geometry, - materialIndices, - materials: Array.from(materialIndexMap.keys()) - }; + Ray ray; + initRay(ray, si.position + EPS * lightDir, lightDir); + if (intersectSceneShadow(ray)) { + occluded = 0.0; } - function mergeGeometry(geometryAndMaterialIndex, vertexCount, indexCount) { - const position = new THREE$1.BufferAttribute(new Float32Array(3 * vertexCount), 3, false); - const normal = new THREE$1.BufferAttribute(new Float32Array(3 * vertexCount), 3, false); - const uv = new THREE$1.BufferAttribute(new Float32Array(2 * vertexCount), 2, false); - const index = new THREE$1.BufferAttribute(new Uint32Array(indexCount), 1, false); + vec2 uv = cartesianToEquirect(lightDir); - const materialIndices = []; + float lightPdf = envmapPdf(uv); - const bg = new THREE$1.BufferGeometry(); - bg.addAttribute('position', position); - bg.addAttribute('normal', normal); - bg.addAttribute('uv', uv); - bg.setIndex(index); + float irr = dot(luminance, textureLinear(envmap, uv).rgb); - let currentVertex = 0; - let currentIndex = 0; + // lambertian BRDF + float brdf = INVPI; + float scatteringPdf = abs(cosThetaL) * INVPI; - for (const { geometry, materialIndex } of geometryAndMaterialIndex) { - const vertexCount = geometry.getAttribute('position').count; - bg.merge(geometry, currentVertex); + float weight = powerHeuristic(scatteringPdf, lightPdf); - const meshIndex = geometry.getIndex(); - for (let i = 0; i < meshIndex.count; i++) { - index.setX(currentIndex + i, currentVertex + meshIndex.getX(i)); - } + float lightEq = irr * brdf * abs(cosThetaL) * weight / scatteringPdf; - const triangleCount = meshIndex.count / 3; - for (let i = 0; i < triangleCount; i++) { - materialIndices.push(materialIndex); - } + alpha += lightEq; + li += occluded * lightEq; - currentVertex += vertexCount; - currentIndex += meshIndex.count; - } + return li; +} - return { geometry: bg, materialIndices }; - } +void sampleShadowCatcher(SurfaceInteraction si, int bounce, inout Path path) { + mat3 basis = orthonormalBasis(si.normal); + vec3 viewDir = -path.ray.d; + vec3 color = bounce > 1 && !path.specularBounce ? sampleEnvmapFromDirection(-viewDir) : sampleBackgroundFromDirection(-viewDir); - // Similar to buffergeometry.clone(), except we only copy - // specific attributes instead of everything - function cloneBufferGeometry(bufferGeometry, attributes) { - const newGeometry = new THREE$1.BufferGeometry(); + vec3 lightDir = lightDirDiffuse(si.faceNormal, viewDir, basis, randomSampleVec2()); - for (const name of attributes) { - const attrib = bufferGeometry.getAttribute(name); - if (attrib) { - newGeometry.addAttribute(name, attrib.clone()); - } - } + float alphaBounce = 0.0; - const index = bufferGeometry.getIndex(); - if (index) { - newGeometry.setIndex(index); - } + vec3 li = path.beta * color * ( + importanceSampleLightShadowCatcher(si, viewDir, randomSampleVec2(), alphaBounce) + + importanceSampleMaterialShadowCatcher(si, viewDir, lightDir, alphaBounce) + ); - return newGeometry; - } + // alphaBounce contains the lighting of the shadow catcher *without* shadows + alphaBounce = alphaBounce == 0.0 ? 1.0 : alphaBounce; - function addFlatGeometryIndices(geometry) { - const position = geometry.getAttribute('position'); + // in post processing step, we divide by alpha to obtain the percentage of light relative to shadow for the shadow catcher + path.alpha *= alphaBounce; - if (!position) { - console.warn('No position attribute'); - return; - } + // we only want the alpha division to affect the shadow catcher + // factor in alpha to the previous light, so that dividing by alpha with the previous light cancels out this contribution + path.li *= alphaBounce; - const index = new Uint32Array(position.count); + // add path contribution + path.li += li; - for (let i = 0; i < index.length; i++) { - index[i] = i; - } + // Get new path direction - geometry.setIndex(new THREE$1.BufferAttribute(index, 1, false)); + lightDir = lightDirDiffuse(si.faceNormal, viewDir, basis, randomSampleVec2()); - return geometry; - } + float cosThetaL = dot(si.normal, lightDir); - /* - Stratified Sampling - http://www.pbr-book.org/3ed-2018/Sampling_and_Reconstruction/Stratified_Sampling.html + // lambertian brdf with terms cancelled + path.beta *= color; - Repeatedly sampling random numbers between [0, 1) has the effect of producing numbers that are coincidentally clustered together, - instead of being evenly spaced across the domain. - This produces low quality results for the path tracer since clustered samples send too many rays in similar directions. + initRay(path.ray, si.position + EPS * lightDir, lightDir); + + // If new ray direction is pointing into the surface, + // the light path is physically impossible and we terminate the path. + float orientation = dot(si.faceNormal, viewDir) * cosThetaL; + path.abort = orientation < 0.0; - We can reduce the amount of clustering of random numbers by using stratified sampling. - Stratification divides the [0, 1) range into partitions, or stratum, of equal size. - Each invocation of the stratified sampler draws one uniform random number from one stratum from a shuffled sequence of stratums. - When every stratum has been sampled once, this sequence is shuffled again and the process repeats. + path.specularBounce = false; - The returned sample ranges between [0, numberOfStratum). - The integer part ideintifies the stratum (the first stratum being 0). - The fractional part is the random number. + // advance dimension index by unused stratified samples + const int usedSamples = 6; + sampleIndex += SAMPLES_PER_MATERIAL - usedSamples; +} - To obtain the stratified sample between [0, 1), divide the returned sample by the stratum count. - */ +#endif - function makeStratifiedSampler(strataCount, dimensions) { - const strata = []; - const l = strataCount ** dimensions; - for (let i = 0; i < l; i++) { - strata[i] = i; - } +`; - let index = strata.length; + var sampleGlass = ` - const sample = []; +#ifdef USE_GLASS - function restart() { - index = 0; - } +void sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) { + vec3 viewDir = -path.ray.d; + float cosTheta = dot(si.normal, viewDir); - function next() { - if (index >= strata.length) { - shuffle(strata); - restart(); - } - let stratum = strata[index++]; + float F = si.materialType == THIN_GLASS ? + fresnelSchlick(abs(cosTheta), R0) : // thin glass + fresnelSchlickTIR(cosTheta, R0, IOR); // thick glass - for (let i = 0; i < dimensions; i++) { - sample[i] = stratum % strataCount + Math.random(); - stratum = Math.floor(stratum / strataCount); - } + vec3 lightDir; - return sample; - } + float reflectionOrRefraction = randomSample(); - return { - next, - restart, - strataCount - }; + if (reflectionOrRefraction < F) { + lightDir = reflect(-viewDir, si.normal); + } else { + lightDir = si.materialType == THIN_GLASS ? + refract(-viewDir, sign(cosTheta) * si.normal, INV_IOR_THIN) : // thin glass + refract(-viewDir, sign(cosTheta) * si.normal, cosTheta < 0.0 ? IOR : INV_IOR); // thick glass + path.beta *= si.color; } - /* - Stratified Sampling - http://www.pbr-book.org/3ed-2018/Sampling_and_Reconstruction/Stratified_Sampling.html - - It is computationally unfeasible to compute stratified sampling for large dimensions (>2) - Instead, we can compute stratified sampling for lower dimensional patterns that sum to the high dimension - e.g. instead of sampling a 6D domain, we sample a 2D + 2D + 2D domain. - This reaps many benefits of stratification while still allowing for small strata sizes. - */ + initRay(path.ray, si.position + EPS * lightDir, lightDir); - function makeStratifiedSamplerCombined(strataCount, listOfDimensions) { - const strataObjs = []; + // advance sample index by unused stratified samples + const int usedSamples = 1; + sampleIndex += SAMPLES_PER_MATERIAL - usedSamples; - for (const dim of listOfDimensions) { - strataObjs.push(makeStratifiedSampler(strataCount, dim)); - } + path.li += bounce == BOUNCES ? path.beta * sampleBackgroundFromDirection(lightDir) : vec3(0.0); +} - const combined = []; +#endif - function next() { - let i = 0; +`; - for (const strata of strataObjs) { - const nums = strata.next(); + var fragment$1 = { + includes: [ + constants$1, + rayTraceCore, + textureLinear, + materialBuffer, + intersect, + surfaceInteractionDirect, + random, + envmap, + bsdf, + sample, + sampleMaterial, + sampleGlass, + sampleShadowCatcher, + ], + outputs: ['light'], + source: (defines) => ` + void bounce(inout Path path, int i, inout SurfaceInteraction si) { + if (!si.hit) { + if (path.specularBounce) { + path.li += path.beta * sampleBackgroundFromDirection(path.ray.d); + } - for (const num of nums) { - combined[i++] = num; + path.abort = true; + } else { + #ifdef USE_GLASS + if (si.materialType == THIN_GLASS || si.materialType == THICK_GLASS) { + sampleGlassSpecular(si, i, path); + } + #endif + #ifdef USE_SHADOW_CATCHER + if (si.materialType == SHADOW_CATCHER) { + sampleShadowCatcher(si, i, path); } + #endif + if (si.materialType == STANDARD) { + sampleMaterial(si, i, path); } - return combined; - } - - function restart() { - for (const strata of strataObjs) { - strata.restart(); + // Russian Roulette sampling + if (i >= 2) { + float q = 1.0 - dot(path.beta, luminance); + if (randomSample() < q) { + path.abort = true; + } + path.beta /= 1.0 - q; } } - - return { - next, - restart, - strataCount - }; } - function makeTexture(gl, params) { - let { - width = null, - height = null, - - // A single HTMLImageElement, ImageData, or TypedArray, - // Or an array of any of these objects. In this case an Array Texture will be created - data = null, + // Path tracing integrator as described in + // http://www.pbr-book.org/3ed-2018/Light_Transport_I_Surface_Reflection/Path_Tracing.html# + vec4 integrator(inout Ray ray) { + Path path; + path.ray = ray; + path.li = vec3(0); + path.alpha = 1.0; + path.beta = vec3(1.0); + path.specularBounce = true; + path.abort = false; - // If greater than 1, create an Array Texture of this length - length = 1, + SurfaceInteraction si; - // Number of channels, [1-4]. If left blank, the the function will decide the number of channels automatically from the data - channels = null, + // first surface interaction from g-buffer + surfaceInteractionDirect(vCoord, si); - // Either 'byte' or 'float' - // If left empty, the function will decide the format automatically from the data - storage = null, + // first surface interaction from ray interesction + // intersectScene(path.ray, si); - // Reverse the texture across the y-axis. - flipY = false, + bounce(path, 1, si); - // sampling properties - gammaCorrection = false, - wrapS = gl.REPEAT, - wrapT = gl.REPEAT, - minFilter = gl.LINEAR, - magFilter = gl.LINEAR, - } = params; + // Manually unroll for loop. + // Some hardware fails to iterate over a GLSL loop, so we provide this workaround + // for (int i = 1; i < defines.bounces + 1, i += 1) + // equivelant to + ${unrollLoop('i', 2, defines.BOUNCES + 1, 1, ` + if (!path.abort) { + intersectScene(path.ray, si); + bounce(path, i, si); + } + `)} - width = width || data.width || 0; - height = height || data.height || 0; + return vec4(path.li, path.alpha); + } - const texture = gl.createTexture(); + void main() { + initRandom(); - let target; - let dataArray; + vec2 vCoordAntiAlias = vCoord + jitter; - // if data is a JS array but not a TypedArray, assume data is an array of images and create a GL Array Texture - if (Array.isArray(data)) { - dataArray = data; - data = dataArray[0]; - } + vec3 direction = normalize(vec3(vCoordAntiAlias - 0.5, -1.0) * vec3(camera.aspect, 1.0, camera.fov)); - target = dataArray || length > 1 ? gl.TEXTURE_2D_ARRAY : gl.TEXTURE_2D; + // Thin lens model with depth-of-field + // http://www.pbr-book.org/3ed-2018/Camera_Models/Projective_Camera_Models.html#TheThinLensModelandDepthofField + // vec2 lensPoint = camera.aperture * sampleCircle(randomSampleVec2()); + // vec3 focusPoint = -direction * camera.focus / direction.z; // intersect ray direction with focus plane - gl.activeTexture(gl.TEXTURE0); - gl.bindTexture(target, texture); + // vec3 origin = vec3(lensPoint, 0.0); + // direction = normalize(focusPoint - origin); - gl.texParameteri(target, gl.TEXTURE_WRAP_S, wrapS); - gl.texParameteri(target, gl.TEXTURE_WRAP_T, wrapT); - gl.texParameteri(target, gl.TEXTURE_MIN_FILTER, minFilter); - gl.texParameteri(target, gl.TEXTURE_MAG_FILTER, magFilter); + // origin = vec3(camera.transform * vec4(origin, 1.0)); + // direction = mat3(camera.transform) * direction; - if (!channels) { - if (data && data.length) { - channels = data.length / (width * height); // infer number of channels from data size - } else { - channels = 4; - } - } + vec3 origin = camera.transform[3].xyz; + direction = mat3(camera.transform) * direction; - channels = clamp(channels, 1, 4); + Ray cam; + initRay(cam, origin, direction); - const format = [ - gl.RED, - gl.RG, - gl.RGB, - gl.RGBA - ][channels - 1]; + vec4 liAndAlpha = integrator(cam); - const isByteArray = - storage === 'byte' || - data instanceof Uint8Array || - data instanceof HTMLImageElement || - data instanceof HTMLCanvasElement || - data instanceof ImageData; + if (!(liAndAlpha.x < INF && liAndAlpha.x > -EPS)) { + liAndAlpha = vec4(0, 0, 0, 1); + } - const isFloatArray = - storage === 'float' || - data instanceof Float32Array; + out_light = liAndAlpha; - let type; - let internalFormat; - if (isByteArray) { - type = gl.UNSIGNED_BYTE; - internalFormat = [ - gl.R8, - gl.RG8, - gammaCorrection ? gl.SRGB8 : gl.RGB8, - gammaCorrection ? gl.SRGB8_ALPHA8 : gl.RGBA8 - ][channels - 1]; - } else if (isFloatArray) { - type = gl.FLOAT; - internalFormat = [ - gl.R32F, - gl.RG32F, - gl.RGB32F, - gl.RGBA32F - ][channels - 1]; - } else { - console.error('Texture of unknown type:', storage || data); - } + // Stratified Sampling Sample Count Test + // --------------- + // Uncomment the following code + // Then observe the colors of the image + // If: + // * The resulting image is pure black + // Extra samples are being passed to the shader that aren't being used. + // * The resulting image contains red + // Not enough samples are being passed to the shader + // * The resulting image contains only white with some black + // All samples are used by the shader. Correct result! - if (dataArray) { - gl.texStorage3D(target, 1, internalFormat, width, height, dataArray.length); - for (let i = 0; i < dataArray.length; i++) { - // if layer is an HTMLImageElement, use the .width and .height properties of each layer - // otherwise use the max size of the array texture - const layerWidth = dataArray[i].width || width; - const layerHeight = dataArray[i].height || height; + // fragColor = vec4(0, 0, 0, 1); + // if (sampleIndex == SAMPLING_DIMENSIONS) { + // fragColor = vec4(1, 1, 1, 1); + // } else if (sampleIndex > SAMPLING_DIMENSIONS) { + // fragColor = vec4(1, 0, 0, 1); + // } +} +` + }; - gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, Array.isArray(flipY) ? flipY[i] : flipY); + /* + Stratified Sampling + http://www.pbr-book.org/3ed-2018/Sampling_and_Reconstruction/Stratified_Sampling.html - gl.texSubImage3D(target, 0, 0, 0, i, layerWidth, layerHeight, 1, format, type, dataArray[i]); - } - } else if (length > 1) { - // create empty array texture - gl.texStorage3D(target, 1, internalFormat, width, height, length); - } else { - gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, flipY); - gl.texStorage2D(target, 1, internalFormat, width, height); - if (data) { - gl.texSubImage2D(target, 0, 0, 0, width, height, format, type, data); - } - } + Repeatedly sampling random numbers between [0, 1) has the effect of producing numbers that are coincidentally clustered together, + instead of being evenly spaced across the domain. + This produces low quality results for the path tracer since clustered samples send too many rays in similar directions. - // return state to default - gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, false); + We can reduce the amount of clustering of random numbers by using stratified sampling. + Stratification divides the [0, 1) range into partitions, or stratum, of equal size. + Each invocation of the stratified sampler draws one uniform random number from one stratum from a shuffled sequence of stratums. + When every stratum has been sampled once, this sequence is shuffled again and the process repeats. - return { - target, - texture - }; - } + The returned sample ranges between [0, numberOfStratum). + The integer part ideintifies the stratum (the first stratum being 0). + The fractional part is the random number. - // retrieve textures used by meshes, grouping textures from meshes shared by *the same* mesh property - function getTexturesFromMaterials(meshes, textureNames) { - const textureMap = {}; + To obtain the stratified sample between [0, 1), divide the returned sample by the stratum count. + */ - for (const name of textureNames) { - const textures = []; - textureMap[name] = { - indices: texturesFromMaterials(meshes, name, textures), - textures - }; + function makeStratifiedSampler(strataCount, dimensions) { + const strata = []; + const l = strataCount ** dimensions; + for (let i = 0; i < l; i++) { + strata[i] = i; } - return textureMap; - } + let index = strata.length; - // retrieve textures used by meshes, grouping textures from meshes shared *across all* mesh properties - function mergeTexturesFromMaterials(meshes, textureNames) { - const textureMap = { - textures: [], - indices: {} - }; + const sample = []; - for (const name of textureNames) { - textureMap.indices[name] = texturesFromMaterials(meshes, name, textureMap.textures); + function restart() { + index = 0; } - return textureMap; - } - - function texturesFromMaterials(materials, textureName, textures) { - const indices = []; + function next() { + if (index >= strata.length) { + shuffle(strata); + restart(); + } + let stratum = strata[index++]; - for (const material of materials) { - if (!material[textureName]) { - indices.push(-1); - } else { - let index = textures.length; - for (let i = 0; i < textures.length; i++) { - if (textures[i] === material[textureName]) { - // Reuse existing duplicate texture. - index = i; - break; - } - } - if (index === textures.length) { - // New texture. Add texture to list. - textures.push(material[textureName]); - } - indices.push(index); + for (let i = 0; i < dimensions; i++) { + sample[i] = stratum % strataCount + Math.random(); + stratum = Math.floor(stratum / strataCount); } + + return sample; } - return indices; + return { + next, + restart, + strataCount + }; } - // Upload arrays to uniform buffer objects - // Packs different arrays into vec4's to take advantage of GLSL's std140 memory layout - - function uploadBuffers(gl, program, bufferData) { - const materialBuffer = makeUniformBuffer(gl, program, 'Materials'); + /* + Stratified Sampling + http://www.pbr-book.org/3ed-2018/Sampling_and_Reconstruction/Stratified_Sampling.html - const { - color = [], - roughness = [], - metalness = [], - normalScale = [], - type = [], - diffuseMapIndex = [], - diffuseMapSize = [], - normalMapIndex = [], - normalMapSize = [], - roughnessMapIndex = [], - metalnessMapIndex = [], - pbrMapSize = [], - } = bufferData; + It is computationally unfeasible to compute stratified sampling for large dimensions (>2) + Instead, we can compute stratified sampling for lower dimensional patterns that sum to the high dimension + e.g. instead of sampling a 6D domain, we sample a 2D + 2D + 2D domain. + This reaps many benefits of stratification while still allowing for small strata sizes. + */ - materialBuffer.set('Materials.colorAndMaterialType[0]', interleave( - { data: [].concat(...color.map(d => d.toArray())), channels: 3 }, - { data: type, channels: 1} - )); + function makeStratifiedSamplerCombined(strataCount, listOfDimensions) { + const strataObjs = []; - materialBuffer.set('Materials.roughnessMetalnessNormalScale[0]', interleave( - { data: roughness, channels: 1 }, - { data: metalness, channels: 1 }, - { data: [].concat(...normalScale.map(d => d.toArray())), channels: 2 } - )); + for (const dim of listOfDimensions) { + strataObjs.push(makeStratifiedSampler(strataCount, dim)); + } - materialBuffer.set('Materials.diffuseNormalRoughnessMetalnessMapIndex[0]', interleave( - { data: diffuseMapIndex, channels: 1 }, - { data: normalMapIndex, channels: 1 }, - { data: roughnessMapIndex, channels: 1 }, - { data: metalnessMapIndex, channels: 1 } - )); + const combined = []; - materialBuffer.set('Materials.diffuseNormalMapSize[0]', interleave( - { data: diffuseMapSize, channels: 2 }, - { data: normalMapSize, channels: 2 } - )); + function next() { + let i = 0; - materialBuffer.set('Materials.pbrMapSize[0]', pbrMapSize); + for (const strata of strataObjs) { + const nums = strata.next(); - materialBuffer.bind(0); - } + for (const num of nums) { + combined[i++] = num; + } + } - function interleave(...arrays) { - const maxLength = arrays.reduce((m, a) => { - return Math.max(m, a.data.length / a.channels); - }, 0); + return combined; + } - const interleaved = []; - for (let i = 0; i < maxLength; i++) { - for (let j = 0; j < arrays.length; j++) { - const { data, channels } = arrays[j]; - for (let c = 0; c < channels; c++) { - interleaved.push(data[i * channels + c]); - } + function restart() { + for (const strata of strataObjs) { + strata.restart(); } } - return interleaved; + return { + next, + restart, + strataCount + }; } function makeRayTracePass(gl, { bounces, // number of global illumination bounces + decomposedScene, fullscreenQuad, + materialBuffer, + mergedMesh, optionalExtensions, - scene, }) { bounces = clamp(bounces, 1, 6); @@ -2920,7 +3398,7 @@ void sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) { let samples; const renderPass = makeRenderPassFromScene({ - bounces, fullscreenQuad, gl, optionalExtensions, samplingDimensions, scene + bounces, decomposedScene, fullscreenQuad, gl, materialBuffer, mergedMesh, optionalExtensions, samplingDimensions, }); function setSize(width, height) { @@ -2931,9 +3409,9 @@ void sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) { function setNoise(noiseImage) { renderPass.setTexture('noise', makeTexture(gl, { data: noiseImage, - minFilter: gl.NEAREST, - magFilter: gl.NEAREST, - storage: 'float' + wrapS: gl.REPEAT, + wrapT: gl.REPEAT, + storage: 'halfFloat', })); } @@ -2947,12 +3425,19 @@ void sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) { renderPass.setUniform('jitter', x, y); } + function setGBuffers({ position, normal, faceNormal, color, matProps }) { + renderPass.setTexture('gPosition', position); + renderPass.setTexture('gNormal', normal); + renderPass.setTexture('gFaceNormal', faceNormal); + renderPass.setTexture('gColor', color); + renderPass.setTexture('gMatProps', matProps); + } + function nextSeed() { renderPass.setUniform('stratifiedSamples[0]', samples.next()); } function setStrataCount(strataCount) { - if (strataCount > 1 && strataCount !== samples.strataCount) { // reinitailizing random has a performance cost. we can skip it if // * strataCount is 1, since a strataCount of 1 works with any sized StratifiedRandomCombined @@ -2984,6 +3469,7 @@ void sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) { outputLocs: renderPass.outputLocs, setCamera, setJitter, + setGBuffers, setNoise, setSize, setStrataCount, @@ -2991,34 +3477,25 @@ void sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) { } function makeRenderPassFromScene({ bounces, + decomposedScene, fullscreenQuad, gl, + materialBuffer, + mergedMesh, optionalExtensions, samplingDimensions, - scene, }) { const { OES_texture_float_linear } = optionalExtensions; - const { meshes, directionalLights, ambientLights, environmentLights } = decomposeScene(scene); - if (meshes.length === 0) { - throw 'RayTracingRenderer: Scene contains no renderable meshes.'; - } - - // merge meshes in scene to a single, static geometry - const { geometry, materials, materialIndices } = mergeMeshesToGeometry(meshes); + const { background, directionalLights, ambientLights, environmentLights } = decomposedScene; - // extract textures shared by meshes in scene - const maps = getTexturesFromMaterials(materials, ['map', 'normalMap']); - const pbrMap = mergeTexturesFromMaterials(materials, ['roughnessMap', 'metalnessMap']); + const { geometry, materials, materialIndices } = mergedMesh; // create bounding volume hierarchy from a static scene - const bvh = bvhAccel(geometry, materialIndices); + const bvh = bvhAccel(geometry); const flattenedBvh = flattenBvh(bvh); const numTris = geometry.index.count / 3; - const useGlass = materials.some(m => m.transparent); - const useShadowCatcher = materials.some(m => m.shadowCatcher); - const renderPass = makeRenderPass(gl, { defines: { OES_texture_float_linear, @@ -3026,60 +3503,19 @@ void sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) { INDEX_COLUMNS: textureDimensionsFromArray(numTris).columnsLog, VERTEX_COLUMNS: textureDimensionsFromArray(geometry.attributes.position.count).columnsLog, STACK_SIZE: flattenedBvh.maxDepth, - NUM_TRIS: numTris, - NUM_MATERIALS: materials.length, - NUM_DIFFUSE_MAPS: maps.map.textures.length, - NUM_NORMAL_MAPS: maps.normalMap.textures.length, - NUM_DIFFUSE_NORMAL_MAPS: Math.max(maps.map.textures.length, maps.normalMap.textures.length), - NUM_PBR_MAPS: pbrMap.textures.length, BOUNCES: bounces, - USE_GLASS: useGlass, - USE_SHADOW_CATCHER: useShadowCatcher, - SAMPLING_DIMENSIONS: samplingDimensions.reduce((a, b) => a + b) + USE_GLASS: materials.some(m => m.transparent), + USE_SHADOW_CATCHER: materials.some(m => m.shadowCatcher), + SAMPLING_DIMENSIONS: samplingDimensions.reduce((a, b) => a + b), + ...materialBuffer.defines }, - fragment, + fragment: fragment$1, vertex: fullscreenQuad.vertexShader }); - const bufferData = {}; - - bufferData.color = materials.map(m => m.color); - bufferData.roughness = materials.map(m => m.roughness); - bufferData.metalness = materials.map(m => m.metalness); - bufferData.normalScale = materials.map(m => m.normalScale); - - bufferData.type = materials.map(m => { - if (m.shadowCatcher) { - return ShadowCatcherMaterial; - } - if (m.transparent) { - return m.solid ? ThickMaterial : ThinMaterial; - } - }); - - if (maps.map.textures.length > 0) { - const { relativeSizes, texture } = makeTextureArray$1(gl, maps.map.textures, true); - renderPass.setTexture('diffuseMap', texture); - bufferData.diffuseMapSize = relativeSizes; - bufferData.diffuseMapIndex = maps.map.indices; - } - - if (maps.normalMap.textures.length > 0) { - const { relativeSizes, texture } = makeTextureArray$1(gl, maps.normalMap.textures, false); - renderPass.setTexture('normalMap', texture); - bufferData.normalMapSize = relativeSizes; - bufferData.normalMapIndex = maps.normalMap.indices; - } - - if (pbrMap.textures.length > 0) { - const { relativeSizes, texture } = makeTextureArray$1(gl, pbrMap.textures, false); - renderPass.setTexture('pbrMap', texture); - bufferData.pbrMapSize = relativeSizes; - bufferData.roughnessMapIndex = pbrMap.indices.roughnessMap; - bufferData.metalnessMapIndex = pbrMap.indices.metalnessMap; - } - - uploadBuffers(gl, renderPass.program, bufferData); + renderPass.setTexture('diffuseMap', materialBuffer.textures.diffuseMap); + renderPass.setTexture('normalMap', materialBuffer.textures.normalMap); + renderPass.setTexture('pbrMap', materialBuffer.textures.pbrMap); renderPass.setTexture('positions', makeDataTexture(gl, geometry.getAttribute('position').array, 3)); @@ -3092,6 +3528,7 @@ void sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) { const envImage = generateEnvMapFromSceneComponents(directionalLights, ambientLights, environmentLights); const envImageTextureObject = makeTexture(gl, { data: envImage.data, + storage: 'halfFloat', minFilter: OES_texture_float_linear ? gl.LINEAR : gl.NEAREST, magFilter: OES_texture_float_linear ? gl.LINEAR : gl.NEAREST, width: envImage.width, @@ -3101,10 +3538,11 @@ void sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) { renderPass.setTexture('envmap', envImageTextureObject); let backgroundImageTextureObject; - if (scene.background) { - const backgroundImage = generateBackgroundMapFromSceneBackground(scene.background); + if (background) { + const backgroundImage = generateBackgroundMapFromSceneBackground(background); backgroundImageTextureObject = makeTexture(gl, { data: backgroundImage.data, + storage: 'halfFloat', minFilter: OES_texture_float_linear ? gl.LINEAR : gl.NEAREST, magFilter: OES_texture_float_linear ? gl.LINEAR : gl.NEAREST, width: backgroundImage.width, @@ -3116,57 +3554,16 @@ void sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) { renderPass.setTexture('backgroundMap', backgroundImageTextureObject); - const distribution = envmapDistribution(envImage); - - renderPass.setTexture('envmapDistribution', makeTexture(gl, { - data: distribution.data, - minFilter: gl.NEAREST, - magFilter: gl.NEAREST, - width: distribution.width, - height: distribution.height, - })); - - return renderPass; - } - - function decomposeScene(scene) { - const meshes = []; - const directionalLights = []; - const ambientLights = []; - const environmentLights = []; - scene.traverse(child => { - if (child.isMesh) { - if (!child.geometry || !child.geometry.getAttribute('position')) { - console.warn(child, 'must have a geometry property with a position attribute'); - } - else if (!(child.material.isMeshStandardMaterial)) { - console.warn(child, 'must use MeshStandardMaterial in order to be rendered.'); - } else { - meshes.push(child); - } - } - if (child.isDirectionalLight) { - directionalLights.push(child); - } - if (child.isAmbientLight) { - ambientLights.push(child); - } - if (child.isEnvironmentLight) { - if (environmentLights.length > 1) { - console.warn(environmentLights, 'only one environment light can be used per scene'); - } - // Valid lights have HDR texture map in RGBEEncoding - if (isHDRTexture(child)) { - environmentLights.push(child); - } else { - console.warn(child, 'environment light does not use color value or map with THREE.RGBEEncoding'); - } - } - }); - - return { - meshes, directionalLights, ambientLights, environmentLights - }; + const distribution = envmapDistribution(envImage); + + renderPass.setTexture('envmapDistribution', makeTexture(gl, { + data: distribution.data, + storage: 'halfFloat', + width: distribution.width, + height: distribution.height, + })); + + return renderPass; } function textureDimensionsFromArray(count) { @@ -3185,76 +3582,197 @@ void sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) { const textureDim = textureDimensionsFromArray(dataArray.length / channels); return makeTexture(gl, { data: padArray(dataArray, channels * textureDim.size), - minFilter: gl.NEAREST, - magFilter: gl.NEAREST, width: textureDim.columns, height: textureDim.rows, }); } - function makeTextureArray$1(gl, textures, gammaCorrection = false) { - const images = textures.map(t => t.image); - const flipY = textures.map(t => t.flipY); - const { maxSize, relativeSizes } = maxImageSize(images); + // expand array to the given length + function padArray(typedArray, length) { + const newArray = new typedArray.constructor(length); + newArray.set(typedArray); + return newArray; + } - // create GL Array Texture from individual textures - const texture = makeTexture(gl, { - width: maxSize.width, - height: maxSize.height, - gammaCorrection, - data: images, - flipY, - channels: 3 - }); + var fragment$2 = { + outputs: ['light'], + includes: [textureLinear], + source: ` + in vec2 vCoord; - return { - texture, - relativeSizes - }; + uniform mediump sampler2D light; + uniform mediump sampler2D position; + uniform vec2 lightScale; + uniform vec2 previousLightScale; + + uniform mediump sampler2D previousLight; + uniform mediump sampler2D previousPosition; + + uniform mat4 historyCamera; + uniform float blendAmount; + uniform vec2 jitter; + + vec2 reproject(vec3 position) { + vec4 historyCoord = historyCamera * vec4(position, 1.0); + return 0.5 * historyCoord.xy / historyCoord.w + 0.5; } - function maxImageSize(images) { - const maxSize = { - width: 0, - height: 0 - }; + float getMeshId(sampler2D meshIdTex, vec2 vCoord) { + return floor(texture(meshIdTex, vCoord).w); + } - for (const image of images) { - maxSize.width = Math.max(maxSize.width, image.width); - maxSize.height = Math.max(maxSize.height, image.height); + void main() { + vec3 currentPosition = textureLinear(position, vCoord).xyz; + float currentMeshId = getMeshId(position, vCoord); + + vec4 currentLight = texture(light, lightScale * vCoord); + + if (currentMeshId == 0.0) { + out_light = currentLight; + return; } - const relativeSizes = []; - for (const image of images) { - relativeSizes.push(image.width / maxSize.width); - relativeSizes.push(image.height / maxSize.height); + vec2 hCoord = reproject(currentPosition) - jitter; + + vec2 hSizef = previousLightScale * vec2(textureSize(previousLight, 0)); + vec2 hSizeInv = 1.0 / hSizef; + ivec2 hSize = ivec2(hSizef); + + vec2 hTexelf = hCoord * hSizef - 0.5; + ivec2 hTexel = ivec2(hTexelf); + vec2 f = fract(hTexelf); + + ivec2 texel[] = ivec2[]( + hTexel + ivec2(0, 0), + hTexel + ivec2(1, 0), + hTexel + ivec2(0, 1), + hTexel + ivec2(1, 1) + ); + + float weights[] = float[]( + (1.0 - f.x) * (1.0 - f.y), + f.x * (1.0 - f.y), + (1.0 - f.x) * f.y, + f.x * f.y + ); + + vec4 history; + float sum; + + // bilinear sampling, rejecting samples that don't have a matching mesh id + for (int i = 0; i < 4; i++) { + vec2 gCoord = (vec2(texel[i]) + 0.5) * hSizeInv; + + float histMeshId = getMeshId(previousPosition, gCoord); + + float isValid = histMeshId != currentMeshId || any(greaterThanEqual(texel[i], hSize)) ? 0.0 : 1.0; + + float weight = isValid * weights[i]; + history += weight * texelFetch(previousLight, texel[i], 0); + sum += weight; } - return { maxSize, relativeSizes }; - } + if (sum > 0.0) { + history /= sum; + } else { + // If all samples of bilinear fail, try a 3x3 box filter + hTexel = ivec2(hTexelf + 0.5); - // expand array to the given length - function padArray(typedArray, length) { - const newArray = new typedArray.constructor(length); - newArray.set(typedArray); - return newArray; + for (int x = -1; x <= 1; x++) { + for (int y = -1; y <= 1; y++) { + ivec2 texel = hTexel + ivec2(x, y); + vec2 gCoord = (vec2(texel) + 0.5) * hSizeInv; + + float histMeshId = getMeshId(previousPosition, gCoord); + + float isValid = histMeshId != currentMeshId || any(greaterThanEqual(texel, hSize)) ? 0.0 : 1.0; + + float weight = isValid; + vec4 h = texelFetch(previousLight, texel, 0); + history += weight * h; + sum += weight; + } + } + history = sum > 0.0 ? history / sum : history; + } + + if (history.w > MAX_SAMPLES) { + history.xyz *= MAX_SAMPLES / history.w; + history.w = MAX_SAMPLES; + } + + out_light = blendAmount * history + currentLight; } +` + }; - function isHDRTexture(texture) { - return texture.map - && texture.map.image - && (texture.map.encoding === THREE$1.RGBEEncoding || texture.map.encoding === THREE$1.LinearEncoding); + function makeReprojectPass(gl, params) { + const { + fullscreenQuad, + maxReprojectedSamples, + } = params; + + const renderPass = makeRenderPass(gl, { + defines: { + MAX_SAMPLES: maxReprojectedSamples.toFixed(1) + }, + vertex: fullscreenQuad.vertexShader, + fragment: fragment$2 + }); + + const historyCamera = new THREE$1.Matrix4(); + + function setPreviousCamera(camera) { + historyCamera.multiplyMatrices(camera.projectionMatrix, camera.matrixWorldInverse); + + renderPass.setUniform('historyCamera', historyCamera.elements); + } + + function setJitter(x, y) { + renderPass.setUniform('jitter', x, y); + } + + function draw(params) { + const { + blendAmount, + light, + lightScale, + position, + previousLight, + previousLightScale, + previousPosition, + } = params; + + renderPass.setUniform('blendAmount', blendAmount); + renderPass.setUniform('lightScale', lightScale.x, lightScale.y); + renderPass.setUniform('previousLightScale', previousLightScale.x, previousLightScale.y); + + renderPass.setTexture('light', light); + renderPass.setTexture('position', position); + renderPass.setTexture('previousLight', previousLight); + renderPass.setTexture('previousPosition', previousPosition); + + renderPass.useProgram(); + fullscreenQuad.draw(); + } + + return { + draw, + setJitter, + setPreviousCamera, + }; } - var fragment$1 = { + var fragment$3 = { includes: [textureLinear], outputs: ['color'], source: ` in vec2 vCoord; - uniform mediump sampler2D light; + uniform sampler2D light; + uniform sampler2D position; - uniform vec2 textureScale; + uniform vec2 lightScale; // Tonemapping functions from THREE.js @@ -3283,15 +3801,62 @@ void sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) { return clamp((color * (2.51 * color + 0.03)) / (color * (2.43 * color + 0.59) + 0.14), vec3(0.0), vec3(1.0)); } + #ifdef EDGE_PRESERVING_UPSCALE + vec4 getUpscaledLight(vec2 coord) { + float meshId = texture(position, coord).w; + + vec2 sizef = lightScale * vec2(textureSize(position, 0)); + vec2 texelf = coord * sizef - 0.5; + ivec2 texel = ivec2(texelf); + vec2 f = fract(texelf); + + ivec2 texels[] = ivec2[]( + texel + ivec2(0, 0), + texel + ivec2(1, 0), + texel + ivec2(0, 1), + texel + ivec2(1, 1) + ); + + float weights[] = float[]( + (1.0 - f.x) * (1.0 - f.y), + f.x * (1.0 - f.y), + (1.0 - f.x) * f.y, + f.x * f.y + ); + + vec4 upscaledLight; + float sum; + for (int i = 0; i < 4; i++) { + vec2 pCoord = (vec2(texels[i]) + 0.5) / sizef; + float isValid = texture(position, pCoord).w == meshId ? 1.0 : 0.0; + float weight = isValid * weights[i]; + upscaledLight += weight * texelFetch(light, texels[i], 0); + sum += weight; + } + + if (sum > 0.0) { + upscaledLight /= sum; + } else { + upscaledLight = texture(light, lightScale * coord); + } + + return upscaledLight; + } + #endif + void main() { - vec4 tex = texture(light, textureScale * vCoord); + #ifdef EDGE_PRESERVING_UPSCALE + vec4 upscaledLight = getUpscaledLight(vCoord); + #else + vec4 upscaledLight = texture(light, lightScale * vCoord); + #endif // alpha channel stores the number of samples progressively rendered // divide the sum of light by alpha to obtain average contribution of light // in addition, alpha contains a scale factor for the shadow catcher material // dividing by alpha normalizes the brightness of the shadow catcher to match the background envmap. - vec3 light = tex.rgb / tex.a; + vec3 light = upscaledLight.rgb / upscaledLight.a; light *= EXPOSURE; @@ -3315,84 +3880,48 @@ void sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) { function makeToneMapPass(gl, params) { const { fullscreenQuad, - // optionalExtensions, toneMappingParams } = params; - // const { OES_texture_float_linear } = optionalExtensions; - const { toneMapping, whitePoint, exposure } = toneMappingParams; - - const renderPass = makeRenderPass(gl, { - gl, - defines: { - // OES_texture_float_linear, - TONE_MAPPING: toneMapFunctions[toneMapping] || 'linear', - WHITE_POINT: whitePoint.toExponential(), // toExponential allows integers to be represented as GLSL floats - EXPOSURE: exposure.toExponential() - }, - vertex: fullscreenQuad.vertexShader, - fragment: fragment$1, - }); - - function draw(params) { - const { - light, - textureScale - } = params; - - renderPass.setUniform('textureScale', textureScale.x, textureScale.y); - - renderPass.setTexture('light', light); - - renderPass.useProgram(); - fullscreenQuad.draw(); - } - - return { - draw - }; - } - - function makeFramebuffer(gl, { attachments }) { - - const framebuffer = gl.createFramebuffer(); - - function bind() { - gl.bindFramebuffer(gl.FRAMEBUFFER, framebuffer); - } - - function unbind() { - gl.bindFramebuffer(gl.FRAMEBUFFER, null); - } - - function init() { - bind(); + const renderPassConfig = { + gl, + defines: { + TONE_MAPPING: toneMapFunctions[toneMappingParams.toneMapping] || 'linear', + WHITE_POINT: toneMappingParams.whitePoint.toExponential(), // toExponential allows integers to be represented as GLSL floats + EXPOSURE: toneMappingParams.exposure.toExponential() + }, + vertex: fullscreenQuad.vertexShader, + fragment: fragment$3, + }; - const drawBuffers = []; + renderPassConfig.defines.EDGE_PRESERVING_UPSCALE = true; + const renderPassUpscale = makeRenderPass(gl, renderPassConfig); - for (let location in attachments) { - location = Number(location); + renderPassConfig.defines.EDGE_PRESERVING_UPSCALE = false; + const renderPassNative = makeRenderPass(gl, renderPassConfig); - if (location === undefined) { - console.error('invalid location'); - } + function draw(params) { + const { + light, + lightScale, + position + } = params; - const tex = attachments[location]; - gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0 + location, tex.target, tex.texture, 0); - drawBuffers.push(gl.COLOR_ATTACHMENT0 + location); - } + const renderPass = + lightScale.x !== 1 && lightScale.y !== 1 ? + renderPassUpscale : + renderPassNative; - gl.drawBuffers(drawBuffers); + renderPass.setUniform('lightScale', lightScale.x, lightScale.y); + renderPass.setTexture('light', light); + renderPass.setTexture('position', position); - unbind(); + renderPass.useProgram(); + fullscreenQuad.draw(); } - init(); - return { - attachments, - bind, - unbind + draw }; } @@ -3532,166 +4061,6 @@ void sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) { } } - var fragment$2 = { - outputs: ['light'], - source: ` - in vec2 vCoord; - - uniform mediump sampler2D light; - uniform mediump sampler2D position; - uniform vec2 textureScale; - - uniform mediump sampler2D previousLight; - uniform mediump sampler2D previousPosition; - uniform vec2 previousTextureScale; - - uniform mat4 historyCamera; - uniform float blendAmount; - uniform vec2 jitter; - - vec2 reproject(vec3 position) { - vec4 historyCoord = historyCamera * vec4(position, 1.0); - return 0.5 * historyCoord.xy / historyCoord.w + 0.5; - } - - void main() { - vec2 scaledCoord = textureScale * vCoord; - - vec4 positionTex = texture(position, scaledCoord); - vec4 lightTex = texture(light, scaledCoord); - - vec3 currentPosition = positionTex.xyz; - float currentMeshId = positionTex.w; - - vec2 hCoord = reproject(currentPosition) - jitter; - - vec2 hSizef = previousTextureScale * vec2(textureSize(previousPosition, 0)); - ivec2 hSize = ivec2(hSizef); - - vec2 hTexelf = hCoord * hSizef - 0.5; - ivec2 hTexel = ivec2(hTexelf); - vec2 f = fract(hTexelf); - - ivec2 texel[] = ivec2[]( - hTexel + ivec2(0, 0), - hTexel + ivec2(1, 0), - hTexel + ivec2(0, 1), - hTexel + ivec2(1, 1) - ); - - float weights[] = float[]( - (1.0 - f.x) * (1.0 - f.y), - f.x * (1.0 - f.y), - (1.0 - f.x) * f.y, - f.x * f.y - ); - - vec4 history; - float sum; - - // bilinear sampling, rejecting samples that don't have a matching mesh id - for (int i = 0; i < 4; i++) { - float histMeshId = texelFetch(previousPosition, texel[i], 0).w; - - float isValid = histMeshId != currentMeshId || any(greaterThanEqual(texel[i], hSize)) ? 0.0 : 1.0; - // float isValid = 0.0; - - float weight = isValid * weights[i]; - history += weight * texelFetch(previousLight, texel[i], 0); - sum += weight; - } - - if (sum > 0.0) { - history /= sum; - } else { - // If all samples of bilinear fail, try a 3x3 box filter - hTexel = ivec2(hTexelf + 0.5); - - for (int x = -1; x <= 1; x++) { - for (int y = -1; y <= 1; y++) { - ivec2 texel = hTexel + ivec2(x, y); - - float histMeshId = texelFetch(previousPosition, texel, 0).w; - - float isValid = histMeshId != currentMeshId || any(greaterThanEqual(texel, hSize)) ? 0.0 : 1.0; - - float weight = isValid; - vec4 h = texelFetch(previousLight, texel, 0); - history += weight * h; - sum += weight; - } - } - history = sum > 0.0 ? history / sum : history; - } - - if (history.w > MAX_SAMPLES) { - history.xyz *= MAX_SAMPLES / history.w; - history.w = MAX_SAMPLES; - } - - out_light = blendAmount * history + lightTex; - - } -` - }; - - function makeReprojectPass(gl, params) { - const { - fullscreenQuad, - maxReprojectedSamples, - } = params; - - const renderPass = makeRenderPass(gl, { - defines: { - MAX_SAMPLES: maxReprojectedSamples.toFixed(1) - }, - vertex: fullscreenQuad.vertexShader, - fragment: fragment$2 - }); - - const historyCamera = new THREE$1.Matrix4(); - - function setPreviousCamera(camera) { - historyCamera.multiplyMatrices(camera.projectionMatrix, camera.matrixWorldInverse); - - renderPass.setUniform('historyCamera', historyCamera.elements); - } - - function setJitter(x, y) { - renderPass.setUniform('jitter', x, y); - } - - function draw(params) { - const { - blendAmount, - light, - position, - previousLight, - previousPosition, - textureScale, - previousTextureScale, - } = params; - - renderPass.setUniform('blendAmount', blendAmount); - renderPass.setUniform('textureScale', textureScale.x, textureScale.y); - renderPass.setUniform('previousTextureScale', previousTextureScale.x, previousTextureScale.y); - - renderPass.setTexture('light', light); - renderPass.setTexture('position', position); - renderPass.setTexture('previousLight', previousLight); - renderPass.setTexture('previousPosition', previousPosition); - - renderPass.useProgram(); - fullscreenQuad.draw(); - } - - return { - draw, - setJitter, - setPreviousCamera, - }; - } - var noiseBase64 = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAEAAAABAEAAAAADfkvJBAAAbsklEQVR4nA3UhQIIvBoA0E830810M91MN9PNdDPd/ulmupluppvpZrqZbqabe89DHCiDv5GzaossZGYBp2PFIFqKdmMXIKW85edCB/RT11SD3JMQidRlL7n2ufRH1jVkFUNVc3NaZ7DP0T7/112kM1Qc3RDG0K/4uN7CPC7OmtFRZK3Jy3fhSSySKIZXopTsnIhN69JjLHJYYnfpZu44hnV+UkhG/lPd/D+fIVwWtdhhupVPJmtsLFIhjHA7UUqY4fPIQ2qdKxviqH2sugJ2nC+1ZdV0vEF3RGNcMd4KdvIXaJnujdPrKj4ifkeX2f04avjEbqO0ogI/rD7zhmy6GKG/2w32IetIX5vE9DbrS+CNy4sbmgXoiaug48lV4bVKZgluwPujd+Ioa+KjuntypepEEvl/YYCYTq6w4aaReGMShwLkC4nvq7jFKJmLpoepHJTag/h2aMklShou+tyip5wm67P2/CnvH7K6zuq+KGvy2rkkrR4mc4dpUNTEFHDId9TXQiST3RxHO0lHNgNFIA/Ub1kC0pOlNBf77EtyZ0ejxvikzySL8C8hNWyyc1GvcBCusv/otvBO3YSj+KvvRlKgoNaF/GEB64prsx8qFRwVJcRmMk8l5E5swfHMPuhlr9DmtrLeqs7KOrCMQSpeGW/zH5F2dc0AXZhcp9IthLZyuxpHrkNnp0JfnsY+55XkAtgSOvsWzps8uoJ5GtpAXRWZ5TK9cEM1WVRWC81ZUstPZHHkC7GDjZfl7BJ+VcXkI8RfVIMW0Jq95oxE0R+MDQnMX97DPhYjEXzHM0LvUNyODhdDCvJdNmXlfFp0RsbBNclTj8hpXofsCgVYsAnwPRTNTiTLxZkQW43BmK6wHk7Y0iSdXIfyK8/aQULdx1/hJc0JkRE/UgNDc/dGZWanTCs2WQ0W6Xh7PZGuDMXEaLtIRMZcZAM4ieOwO661Qf4xVyhLOOA2mLe0JyvIDrBhUA42ioUiMmrHJ9te6jwtbQ6xWrKf/ED3qKJ0qvzO2of57KkcyMBvNZndbLTX/iWNaWTezm9E8cleKOSEXK1B3LDfeGk4yx/b7L5+uAvp6UVC/UYAhvPLvSwTWm+qqO5saYjh79LadBJaAR90ct9S/GGZ7Q1zhKyTOUJ9MzT85IldVjLLduUOqovEaASJbXeZ37oFv0w/sOGhvMzpVrL/2MeQx8+ldfQU/QBXIqn8NtHAHjCzaTJk+CDS0e6Wk8N7GEDgoR4rG5M/Zig/LD6hEr6VHmxzmijoKu/oZ+p84oEeiwegquE7pBZPYXEoyLeQ66wRicLXmOzWoib6mq6KUoWxuriq62OQh647TUmn0RuuIjtPfuEkcMQtwJ/IaJabRRe9fRX2Q8Z1L2UNlMclpfMFdKYr+XkVEeb6vChZuOBfhNl+l/hly9L0/mzYIxPhBq4oimlnB273mkgwnr+S7Vnp8Fff8/3VC7IJCtqZ9AxZRnujo3wjmQ9n7WtayxwgvUhUNtJ0UjlEU9vPFhePxDLfkl6z43hhdQSW+xbyKooJEEwqTOkL1VHWc1vReFaVxbcnTGM2Uq1XNXRPos0bdtI8VBKXcZdCV1dNpLcL3DE7Cqfmi2w5JGhGFqATTUhzy7sG2+a0II4ZtupikC488mt9abdTvpYXVALXBU6wNzYLXUTPQwTxH/nNttjKDA7pQT47mopOQmxzW/f3GVhXWoguEUl5EHcUoKm8LdpiMoZV9JONpzZa7wa7hG4XzxvquHj2s5lsIrFbtrbew3+SKbiK6Ry+whAyXrTBC0kgDfwZHNOMNRnwOjHVVICdOGVo6LuFsn6GTKN6u4IeZqtN7B6vzlegD7ioW8i/u430kbtO2pABrgTPwb+xchSZ7jK/V6KxPEWK+K+oBXFmeuikt+HzrIU66KQsI9bRaGqQfKqSkMNumbnN4/ljkFsPxqnDElSF32L17D8UhxbUI8xnuwk/0znwXXcGGmD4QpPo5n6kTod70Zb2oI8Y6pFJKiuLoab7bXBEj+CXFTOH4A4kV/1JNjNRLrexaEX5Ht0xQ1RRskzmhCd+rmnFi9hLeqHe7svy7Lq+/+Mq6am+A/X8e+iptvqcbIjzqCOfbW6SpKQ22gPt8HgTFUMPd9kWgKd2O45Pr0EuOlK8waXFfriga7sXrLlKZZbrgeaPnmsrurd+n2H8hugjc+i1OCpJj2vYPyQ27+lT6/f4JM0c6sJIHwm/8AJS4tXuuo6g9qOCjvOZIrI9ZpaaauQAjwb9eTG0RMYPr2y5AHv8YhZLHvZl+DdQqrI5Z1L4QawT/FOLoQCOLR+EyTIrjcqb6YtiA4mg0/L27reYYg7JpvSVOM7G+p2uIb1iJ0hE+/DvvLW+qqfL034nLU5GQh02j8aHi/aDLS2b4ncYk/OcE+V+hhNqmF2rs1j4a1qziXYgaaDWQRetSbOwC60J8VhFSIf62k2osy7FXqpdrDAdZbuQxf5ZOCGLy6Reago9xBydmN9HBdUqX9VtUYdIKZOGbGAFxEDXjLxDmeVXsd5WIOmlhN0kqe2r84o1upy+z9KLRjY/ui5qGkhNiqoL5iXN6hPbeyGa+ckKwRM6l51Ao+EG/yKruXNsrWvHkuDPKKctS4bYRnq7eIQX+at4s8lD2ovy+D/xlXUWuf2jsNiNQx9xDRwjLAgJUSd5AvfTD80U0Qk91fP8DTkBfaXx1Qhv7FMXifZRMw0MlxtxVFVNzoOTrnjoK9ObCZy5HOwjbWgTib1kFo3BJa9t7oojdJK5RpGcifO66LQ2xuIHBvxcnMcLdEoUWc0QjVhs0k3f4dnoXvREODRB5KWJ2UFTX60WcXERxFQ7uo9mDz1YVbzQddDBHQ3QxD0MPfBnsdX+p9+xg+Sybmtum4hKoJW+CG0NGSQxP/TC0AulZ1tozfATr9Ld/QfURp1kg2FqaOQ2QBZ9JNyCoeQfO0eS+SOCa0lLshW6hnulWqHi/qrMTj6Z03gzB/LMzuaXmZXJSUm7nSKACjQDVzafbiNTqUayYpjDNpqhqIzf4SfRU/KF6S+vo0MhAS/v36BoolU4JbKQO3S3nmAL88puH0GoN6tF3vg2rCzscLVcUbmKzHS/dFroBdGk8bP4Hx8DRotKtJdMa4YZKhvR2OgbnULv+lzYUfjhFusD6KaLR8aHFSSPjYmT2MP6tU1L76u4uqJYrqawEqqpW+Onm4G6KIw2CU0Z29/EIc9gKVwjH3wxNV5v8fmxVunIGB94PxYBV+I3RRM4IO8x7Ab6ZXi3aoEeoUXmtzqHVrGCsrUYpOvIFXSMgX4YQp1Qmp6xf/Ae8gR1U19NUzEdSOjApK9nPuoItqt5HE7TXPIm3sff2fm+SbioN9GcPLltyTLKeeGBjGr668sYsfuymdjM8uHjYqL5BLn4SFqRdjbnZJKgyFHIA51lEjEebtEMfqN7LlORlgreiM3B26G2g82iqssbZBQq6k+rGn5J+MMvsVRus95vMpFR9K9K4errLmJFSMO/iepoBu6CfptR4QzqxpOYH6ERP4xmqS4uKzz3V2RS0SnMNwnYKvdW5Bd16FdS0kWlDeQ2VIMEJtgeVJ7GZIdDYQldWQ6UVK2mM1l000/MRyn5GpGZDkRbQ1RUCs/HLcMDV4hV1/OkEZFpRX+f5zfSHGQR7W2obdeiMnK3qQarTK7wEiq5vTqWXayqhyF4By5l6+HDPKK4AZtVRnoHjVBv8Syd1VocyY2UP9g8c15PpXBNVIET8MnVd8/oNlaGcnZJBZoQ7uAe4SjJAWNdX3AkNrQTQ+ClmMxO23i4nXseStC+4agkPDYeChdcOzLRJ2f/2S+ukJqsW/tvKoN4bP5/sOpHxuN5qC3p5VbaizIefWBKkKWkCc+DO5paPAHAP7wQj+VFRVp/zhPy3Ufw+8I4VsE1QVPtS1ZLf6eJ5Qr3Se3GxfURld71EhvEHJXVbLdJzUL/2nk6nX1mGcxdXUpvIg2gt7rADrkoYq0ogKbYXyK1pOwljuEO0rykAh5k2pMp6hR7rVO7h3IY2Y6gOYpsBqhWfp/sQcbbZa6m7uge0dx8pUgjd9GY5CyUldNEXX3L5JRLaHP2G5UhDtfnn8Qk3sak8Y1dUR5BatyTnyTR2PWwnCVCZe09NdwLG8tpvl3nJCd8dfzPNFMp1Wb4YuuihKIPWkP2k5I0o4OVJB96wDby2Oy2TAwv9VAxh8dFJ9EvU1S390Pdekx8d0jrxgik35GaLDoeZR7ZhH4IqyzO+/WiNzkkGNrOm8MvN4dmom9kbtuCzgy14K097SrhJuoeDEMJ7CI5Tjwn+3AmfjkUQpXUTR+DzdDPKVRgh23w1c0MUoI1EYchky6st4hefmS4bhZhr5vJ9/QYfUpbywukv9iib4S8msMqOE6iqH86px6L3oubJike6fJBB1ODDTZb6V+fAvapLL6DTGQ+2hm2k1svL8litoeKxZaRIXq2/U3HsDb6ghQBJqP4OB29iP4Lv/FaVZlctV9QM5tC1UGRbCWRBSfQs/UOFAGtlhX8VJJMLTD7VQY6HRU23ehdXAYlJHN5FlkRvXQHdDzx2I8Lx1A3sxTd8MXdOjVKH4BCOp2pIx6zrHwar6qO6uYB3FaXXdYNycNXCUNlY9TFLwq5SFuemg60UdhieVa8hml4v/2sHOsDNV1JGM5zmx/U2qKhk/lq+7jXaCuuYxaTPba1OuMHhY16GiuJVonzKBUtjEDVtwPxJP+cXUaRfD/1w5zS0Ulr9DXcQPnIK39Xdgkn+WJahGzGkI1cda/xFhfNn6KP1R7c2Y4JZSBnWK26kkJhs51E/tGk8m5oInvSjOI5risjuorqlI8X0oZh+JmKQeuhn7KLjKmvmd6iCVnIKtMH5KOM6zGu5nP5hmixMLo8Ge0P6jWyD0ukR7F0lqIPEMc/gv0OIsqZvCSug8eZ964gnYXr+LsqPmojHrG0apiIzg6TtkyHc7BHIDzTXuL/yQ38Dhsnm5OPfCorYK/LFTKPOU4xr+m/6WzydVCmPWwM5+UuN9e1Ce/8TRbfdJVzbCrWQJTUO+R8V5Ouh6m6T2jpqllYDfew5Ylcb1teraRxUFb8xxp6zFWH+eqtbIhzomc+DRunqvv3doVoKfOEJGoRKilzmAt4B69k+0FyN0m2ED5ss6NkNLTbn1LDAmHU/QDBj5oU8j9cxLxi2dUd+z5E8RfNT9NUHvApzRU/Bv1R0MEPlER9Nzuhpb/lhmsLxUJfP8EkYWdUCbyW3QzlbTco4AfhKEDNUfeY7pLt8U/a063mUaGD+4wtofwtmo0L2WWqlSxHErH0aDltYsbwqHqNq2CnuJ3qdKjJh/hlYYrsKLKwwTy2eOnzyrIMB1A0rmhiNc3Iz9tkvJt44ZqhJQ70F+jhW8CIgNQuO49/Q8bcJ5NxWlaVj6Yx/VVIZWeY2uK+zuw3hSEhIu2hE5NLfiC9p//I7vq6i6+fioJwF2Uyf2lzHoGt521FPlUJrH+AioQzvJtcJnaGEwHewSXxGFExyX7y81hVsQGng6shr9lG74TM5KdX/LyLIevpKyin6sz/Qj/0MjTQh2g594Yct6NVPL5QNUC3QlX/RR3hOXE9th5Nhf2hBswWfdVZVJsvMQNoGnOVfvNx6Qudgo9Ra/hMVJV8wdF1XQwFSYqwzgxjkVQ9kS+cZjHEhzAK6qMKYlZIjg+ZGqIvykCWBy4T0dlkBykCq33WsIAOAoJaQjH/V5w1uekes5plQOPRfBuTFmGvWRueVX9VW2V7GcccoE90CTSW7cXzaU+9hdflUeUTkk001/PDCAnbTRXb2h4jPeCZ2O0Gh1JuOu2M97PnZjBd6QrJDuqBL60+kuH4BK+Fo8uzLjmaoO4Z4DvsCpZM9DJtlWKvUEnVmTVVj/SOUFmOxBHCZV7CJJETIKA8rIuZKavxzKaxvQSlxD/exg9g130ifoH20pBJPKAz2F+bwyVUq2Qrd98mshdVNhVTtjJXSFx4wzegSfhAKECfcY1u4Wamu3pPqogO+Fu4bifDU1MZRfepxAh8EeLYn0i4Ey6NWwYD4Yhp6hfK8uiGimFPubcsYXiI/nO58QmN5V4+zm1kpdl3AtoeFLF0MT0Wbqk5KJ37rmqFTWYR+4vLsGN4BM3uGoYUJgLv5irINGiw+upKhA3qOIxkiQjVGfR+uo7dRAv4B1WLbqApcD472903Hz2T6/0jmR6G0xWmEWz2g3U7uYZF1FNgKX7PK5p85lXoGMBAMzzA17Kb+EnZmFfk/eghNI4W9r1pGjGZ14YvbIHcHQbYy/Cbb0FTcW61x83ySGRGjc0SOC/qqKE+p28MfV0hfJhNV0P4VdGQdICcYrKPz/Lb306IfSKl+66z83LiKPokGeuq4pI5oqFMzY6FSQC50RXxgifnnckXEUfkZS9kFNJCn0b38Q4aWXRRt2Rl/pLMkll4fdwuPNaRXW11xT1lBdE2KfBblwAdDz/dNhIJtSZZzFtdWq+BqHZPKB8ukbZwCkf0Ne19X1hMFAvsLZIWFyPGnTe36TC9Ej8U5Tkk8J/0Ai9JpnCJ7iLz+VWzFqqEdyaXGqSWk8I4vYovWonifKW2Iok7p8boFaozGsinis86MpknWoeJoazD4OW5UEXvcxNoUvdDdDdP5Ag7V2xypbHy/eGcjY56yF2qGQwUz1xSaE2jit++h9mpYZpqYwuYyrAGT+QlXDsjVSrUXcwiiaCxfsYOm2lmszyrh4tY/LbrY9+GQqK8+SdSyYO2qsmqbvEi+old7nrCaL1Ed7Gx8B05gJ82C1FGFds3FM9tDvUJa9E4vNJVZTLzy89i2dg4sLQmFMGZ8TkH61lUf4Q94D1xRPTYMZst/IK9vjhskJdJeTdKfXNMdOfvVR5eDS3STUlGczIYHEvdhxZ2LR1ud/NYpqYIMqEs7P6yTbIpz8eru61QjH4mg1AybF17mgESqAN4PRnl8uvTsBpT9SlsJ4tgBKtjIZXua36TRmirSIo+iqX8FIol7pKx5CNEox1EdpGC3WWR5C4/Qf+wm3Rc9Z+fhdraPGi8KsWdT0Y7idMylzVwldSXGf1MeGZSiFGe+1tin67kr6ixag26TYYaSi771i5ueEjr+U4+neqPY6H37KaEFzBGFqfpuZIXUEsyIJST01xd2walDwvtGd0Xr7al/ALSXKbRNHSh1/xe9cHVDs+1hv7ul6xPX5ppZAjlZm446vuIsuiiW+rf8Yhmil+Bc0N3Ej3UxAXcTzWdZxEhaN3HRJaX5VMyyR3jLXxZDTnkbrsM3cA1eD52UGL2imx3xA7FB2wN+c9Opo3UG3rZDeIn9Wz2kCfTRVwEesH2oCn0MRHFzZWZcHm4y8GmVp/4BBzd7pXZbBd+3Kehjfw/N0duh2e4hTmuouCuvjrbo4uZaX5DqOyT+PxsJXTBMIOfstFd2/BF/8fnyximG1rFk/Bb6AWOywqHHSYhPhjy0zjuOWSndcUAMwVVtGtDZrFT1FCF+Bboxaz+wYujXVBNPSRt3TBel3xHhVk/9xASyFLqjEhr+/FFxMh7YiKktkftn5CDNDW7xTd7kcU1MJRWMm9Vb55YbVIl5D36BxqFk6osFmqjl8GTjLp7qCnHWMPa24NoufkdWuo7+j/zxUx0N+hbaBqQW6VGia52kcsnkb1p1/I5vgo26CIertrZgMfT8jqxrkeJfAMtwmAWX95Uo/g814vXll5BStHMzzG50EN8RE4g1WgWNNwtUpG10jl8S1zZvvfT7Urzi5eCKOEtweoMJWKejoFKoTY0TliqpCCU+WsqI7ywhpzipVFyeKKikfE+o63t11qguWAP/Wau6OEQE52l5dkq3BGeqwimFMnktyn4J4uoS3aNakAj8XbqStjpC/nXpL354q/zo3SxATjjuEtpr7H5uiodjVHoivbLhvoxnCDdMdZn/RMz0x/k0UIz3lv/EdN0K3pYdrO72VeeH24La2aqJ7wjWeFLhjlus/jC89FaKC05oN6biWqpgGjYshGQTpdTP8ggEQ9mkuTmgqglsFkrE4UBUNreIbnEMHcE9xRN8P2wlZTjr0xKv1HOEvn531ApJFLt1WdXRk/UKSyjmdxIkke903Ftc7EEC1PVDiaNfToRT/c2j0km6I6mKqcW44GqobuOOyp4goU26hWewpfxE/QZaoo2+L50vx5N8rmG/IefiDeJeuqDiAUFwjqeWX3VU11fdoFn04N9PVhNJoSdZoDMztbZ42YhfaMvueW4Irkmp+sS+hlJLmL5y6aI2KYvhGr6kG1kopid1vuiNlY4aXO5KhJmmTo8AWmF8/qUugcq5rLxb7gCiunu2jnQhZ2C2CGD6gw71CMzw13kQ0xEVogsZdVtHHjLD4j7LiIvxpxswLwYRguoCG6H7isSi/qwwQ0Rp8U4/IeuNq/oSDsDfto8dJx9ExJJyVqwX3S9Hi2TazjLCsNtu1984NXMdnbPLbaTdCv1Xpf02+UTqMZe8QWquBlDKoeEtp3e6+qTa7gV+SnG+VIhOeWop/0g56o0EFf+QC1wOdwRPyJH1U/AvgPJYffZMqEtzo4jhfoiKdOyrT7uqqA1NIvricqK3ei1gBW8DwE5zM8Jl3CCUC8MRpH0EbscEoihOptLBntDP+/CH5RWLkfvQhn1TCahR/w201XcYEvUGZbJbnajXRWyh/Xgt/TqkIBOcEXkPBsZHtiaaKlMbWbDSdGf7ab3aSl51fe3qf3nMM3e9vF5W5/BwQT/21ZQ611W2YGPtb8hHbuuiBP+nG6Op6HVqJUlEMUexs1YH5qbTBILRCY2nORVUeh0V1X/hwrwJuy5u2KWupx0Bj1NXtBsuKkezra58+Ez9NGN1R3x0VRindg7mRGZMA8XNOd4jXCIL+IfXYMAN3RSbVUT+oTFdmfMOl1R72SvPQtpwl95zZUxn+g9MtnVMOvDbXVcRnOd+Hr6iDcWH0g6/xRvD99FYtwJR/YlbD05AmFUneyl71x3W17k8xNRMrnJR1djaUGxlsThY6ARjgBPUSc7kkeH/GQIKilgG+8KRCv8mVLcW+Z300I7NBzNJ0XZZhSR1OPSLmHdMOJF8Wf5HzD9K5zFFXG/sFIewu1RPFSOrULH1JTwUR1UMdUvNQAv5jHwTb3KxuWt8StXkuz3mfklNIcc0z3DPyhn9opkrClsVI/xqRBbwytYQq7gQTYNXi4bmGPyjk+CYuiHfj8fp3vDMZ+QZSRvzW6Yq7OilGQHFMfx3GyZXBa2DMa7S2YeuWeHyMy6p3lo29LNtDR3rq5Ljf+RI2guPkcHy9rkF2mJEvvqNI+4jRUs50FfgWy+u5uDaynIAq15dF4tPIB9KIp8L7PDUv1NVoWWJht6iQrIdfgcLu05vsbHBkGc5mECeyC2spv8F4rG++C80ICkoNXwOlIwXEOJzSyX23UIU0h/mklVoY9lfNdVL/E36VD20u4QbVxm6GeKyfGkEvrFUqPR/H9s/XjiBWp1EAAAAABJRU5ErkJggg=='; function makeRenderingPipeline({ @@ -3711,21 +4080,25 @@ void sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) { // higher number results in faster convergence over time, but with lower quality initial samples const strataCount = 6; + const decomposedScene = decomposeScene(scene); + + const mergedMesh = mergeMeshesToGeometry(decomposedScene.meshes); + + const materialBuffer = makeMaterialBuffer(gl, mergedMesh.materials); + const fullscreenQuad = makeFullscreenQuad(gl); - const rayTracePass = makeRayTracePass(gl, { bounces, fullscreenQuad, optionalExtensions, scene }); + const rayTracePass = makeRayTracePass(gl, { bounces, decomposedScene, fullscreenQuad, materialBuffer, mergedMesh, optionalExtensions, scene }); const reprojectPass = makeReprojectPass(gl, { fullscreenQuad, maxReprojectedSamples }); - const toneMapPass = makeToneMapPass(gl, { - fullscreenQuad, optionalExtensions, toneMappingParams - }); + const toneMapPass = makeToneMapPass(gl, { fullscreenQuad, toneMappingParams }); + + const gBufferPass = makeGBufferPass(gl, { materialBuffer, mergedMesh }); // used to sample only a portion of the scene to the HDR Buffer to prevent the GPU from locking up from excessive computation const tileRender = makeTileRender(gl); - const clearToBlack = new Float32Array([0, 0, 0, 0]); - let ready = false; const noiseImage = new Image(); noiseImage.src = noiseBase64; @@ -3734,6 +4107,14 @@ void sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) { ready = true; }; + let sampleCount = 0; + + let sampleRenderedCallback = () => {}; + + const lastCamera = new THREE$1.PerspectiveCamera(); + lastCamera.position.set(1, 1, 1); + lastCamera.updateMatrixWorld(); + let screenWidth = 0; let screenHeight = 0; @@ -3748,29 +4129,19 @@ void sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) { let reprojectBuffer; let reprojectBackBuffer; - let lastToneMappedScale; - let lastToneMappedTexture; - - const lastCamera = new THREE$1.PerspectiveCamera(); - lastCamera.position.set(1, 1, 1); - lastCamera.updateMatrixWorld(); - - let sampleCount = 0; + let gBuffer; + let gBufferBack; - let sampleRenderedCallback = () => {}; + let lastToneMappedTexture; + let lastToneMappedScale; function initFrameBuffers(width, height) { - const floatTex = () => makeTexture(gl, { width, height, storage: 'float' }); - const makeHdrBuffer = () => makeFramebuffer(gl, { - attachments: { - [rayTracePass.outputLocs.light]: floatTex(), - [rayTracePass.outputLocs.position]: floatTex(), - } - }); + color: { 0: makeTexture(gl, { width, height, storage: 'float', magFilter: gl.LINEAR, minFilter: gl.LINEAR }) } + }); const makeReprojectBuffer = () => makeFramebuffer(gl, { - attachments: { 0: floatTex() } + color: { 0: makeTexture(gl, { width, height, storage: 'float', magFilter: gl.LINEAR, minFilter: gl.LINEAR }) } }); hdrBuffer = makeHdrBuffer(); @@ -3779,8 +4150,28 @@ void sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) { reprojectBuffer = makeReprojectBuffer(); reprojectBackBuffer = makeReprojectBuffer(); + const normalBuffer = makeTexture(gl, { width, height, storage: 'halfFloat' }); + const faceNormalBuffer = makeTexture(gl, { width, height, storage: 'halfFloat' }); + const colorBuffer = makeTexture(gl, { width, height, storage: 'byte', channels: 3 }); + const matProps = makeTexture(gl, { width, height, storage: 'byte', channels: 2 }); + const depthTarget = makeDepthTarget(gl, width, height); + + const makeGBuffer = () => makeFramebuffer(gl, { + color: { + [gBufferPass.outputLocs.position]: makeTexture(gl, { width, height, storage: 'float' }), + [gBufferPass.outputLocs.normal]: normalBuffer, + [gBufferPass.outputLocs.faceNormal]: faceNormalBuffer, + [gBufferPass.outputLocs.color]: colorBuffer, + [gBufferPass.outputLocs.matProps]: matProps, + }, + depth: depthTarget + }); + + gBuffer = makeGBuffer(); + gBufferBack = makeGBuffer(); + + lastToneMappedTexture = hdrBuffer.color[rayTracePass.outputLocs.light]; lastToneMappedScale = fullscreenScale; - lastToneMappedTexture = hdrBuffer.attachments[rayTracePass.outputLocs.light]; } function swapReprojectBuffer() { @@ -3789,6 +4180,12 @@ void sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) { reprojectBackBuffer = temp; } + function swapGBuffer() { + let temp = gBuffer; + gBuffer = gBufferBack; + gBufferBack = temp; + } + function swapHdrBuffer() { let temp = hdrBuffer; hdrBuffer = hdrBackBuffer; @@ -3798,8 +4195,9 @@ void sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) { // Shaders will read from the back buffer and draw to the front buffer // Buffers are swapped after every render function swapBuffers() { - swapHdrBuffer(); swapReprojectBuffer(); + swapGBuffer(); + swapHdrBuffer(); } function setSize(w, h) { @@ -3828,6 +4226,24 @@ void sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) { cam1.focus === cam2.focus; } + function updateSeed(width, height, useJitter = true) { + rayTracePass.setSize(width, height); + + const jitterX = useJitter ? (Math.random() - 0.5) / width : 0; + const jitterY = useJitter ? (Math.random() - 0.5) / height : 0; + gBufferPass.setJitter(jitterX, jitterY); + rayTracePass.setJitter(jitterX, jitterY); + reprojectPass.setJitter(jitterX, jitterY); + + if (sampleCount === 0) { + rayTracePass.setStrataCount(1); + } else if (sampleCount === numUniformSamples) { + rayTracePass.setStrataCount(strataCount); + } else { + rayTracePass.nextSeed(); + } + } + function clearBuffer(buffer) { buffer.bind(); gl.clear(gl.COLOR_BUFFER_BIT); @@ -3841,8 +4257,6 @@ void sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) { gl.blendFunc(gl.ONE, gl.ONE); gl.enable(gl.BLEND); - gl.clearBufferfv(gl.COLOR, rayTracePass.outputLocs.position, clearToBlack); - gl.viewport(0, 0, width, height); rayTracePass.draw(); @@ -3857,15 +4271,32 @@ void sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) { buffer.unbind(); } - function toneMapToScreen(lightTexture, textureScale) { + function toneMapToScreen(lightTexture, lightScale) { gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight); toneMapPass.draw({ light: lightTexture, - textureScale + lightScale, + position: gBuffer.color[gBufferPass.outputLocs.position], }); lastToneMappedTexture = lightTexture; - lastToneMappedScale = textureScale; + lastToneMappedScale = lightScale; + } + + function renderGBuffer() { + gBuffer.bind(); + gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT); + gl.viewport(0, 0, screenWidth, screenHeight); + gBufferPass.draw(); + gBuffer.unbind(); + + rayTracePass.setGBuffers({ + position: gBuffer.color[gBufferPass.outputLocs.position], + normal: gBuffer.color[gBufferPass.outputLocs.normal], + faceNormal: gBuffer.color[gBufferPass.outputLocs.faceNormal], + color: gBuffer.color[gBufferPass.outputLocs.color], + matProps: gBuffer.color[gBufferPass.outputLocs.matProps] + }); } function renderTile(buffer, x, y, width, height) { @@ -3875,26 +4306,6 @@ void sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) { gl.disable(gl.SCISSOR_TEST); } - function updateSeed(width, height) { - rayTracePass.setSize(width, height); - - const jitterX = (Math.random() - 0.5) / width; - const jitterY = (Math.random() - 0.5) / height; - rayTracePass.setJitter(jitterX, jitterY); - reprojectPass.setJitter(jitterX, jitterY); - - if (sampleCount === 0) { - rayTracePass.setStrataCount(1); - } else if (sampleCount === numUniformSamples) { - rayTracePass.setStrataCount(strataCount); - } else { - rayTracePass.nextSeed(); - } - - rayTracePass.bindTextures(); - } - - function drawPreview(camera, lastCamera) { if (sampleCount > 0) { swapBuffers(); @@ -3904,27 +4315,32 @@ void sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) { tileRender.reset(); setPreviewBufferDimensions(); + updateSeed(previewWidth, previewHeight, false); + rayTracePass.setCamera(camera); + gBufferPass.setCamera(camera); reprojectPass.setPreviousCamera(lastCamera); lastCamera.copy(camera); - updateSeed(previewWidth, previewHeight); + renderGBuffer(); + + rayTracePass.bindTextures(); newSampleToBuffer(hdrBuffer, previewWidth, previewHeight); reprojectBuffer.bind(); gl.viewport(0, 0, previewWidth, previewHeight); reprojectPass.draw({ blendAmount: 1.0, - light: hdrBuffer.attachments[rayTracePass.outputLocs.light], - position: hdrBuffer.attachments[rayTracePass.outputLocs.position], - textureScale: previewScale, + light: hdrBuffer.color[0], + lightScale: previewScale, + position: gBuffer.color[gBufferPass.outputLocs.position], previousLight: lastToneMappedTexture, - previousPosition: hdrBackBuffer.attachments[rayTracePass.outputLocs.position], - previousTextureScale: lastToneMappedScale, + previousLightScale: lastToneMappedScale, + previousPosition: gBufferBack.color[gBufferPass.outputLocs.position], }); reprojectBuffer.unbind(); - toneMapToScreen(reprojectBuffer.attachments[0], previewScale); + toneMapToScreen(reprojectBuffer.color[0], previewScale); swapBuffers(); } @@ -3939,7 +4355,9 @@ void sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) { reprojectPass.setPreviousCamera(lastCamera); } - updateSeed(screenWidth, screenHeight); + updateSeed(screenWidth, screenHeight, true); + renderGBuffer(); + rayTracePass.bindTextures(); } renderTile(hdrBuffer, x, y, tileWidth, tileHeight); @@ -3955,18 +4373,18 @@ void sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) { gl.viewport(0, 0, screenWidth, screenHeight); reprojectPass.draw({ blendAmount, - light: hdrBuffer.attachments[rayTracePass.outputLocs.light], - position: hdrBuffer.attachments[rayTracePass.outputLocs.position], - textureScale: fullscreenScale, - previousLight: reprojectBackBuffer.attachments[0], - previousPosition: hdrBackBuffer.attachments[rayTracePass.outputLocs.position], - previousTextureScale: previewScale, + light: hdrBuffer.color[0], + lightScale: fullscreenScale, + position: gBuffer.color[gBufferPass.outputLocs.position], + previousLight: reprojectBackBuffer.color[0], + previousLightScale: previewScale, + previousPosition: gBufferBack.color[gBufferPass.outputLocs.position], }); reprojectBuffer.unbind(); - toneMapToScreen(reprojectBuffer.attachments[0], fullscreenScale); + toneMapToScreen(reprojectBuffer.color[0], fullscreenScale); } else { - toneMapToScreen(hdrBuffer.attachments[rayTracePass.outputLocs.light], fullscreenScale); + toneMapToScreen(hdrBuffer.color[0], fullscreenScale); } sampleRenderedCallback(sampleCount); @@ -3993,6 +4411,9 @@ void sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) { return; } + swapGBuffer(); + swapReprojectBuffer(); + if (sampleCount === 0) { reprojectPass.setPreviousCamera(lastCamera); } @@ -4000,34 +4421,34 @@ void sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) { if (!areCamerasEqual(camera, lastCamera)) { sampleCount = 0; rayTracePass.setCamera(camera); + gBufferPass.setCamera(camera); lastCamera.copy(camera); - swapHdrBuffer(); clearBuffer(hdrBuffer); } else { sampleCount++; } - updateSeed(screenWidth, screenHeight); + updateSeed(screenWidth, screenHeight, true); + renderGBuffer(); + + rayTracePass.bindTextures(); addSampleToBuffer(hdrBuffer, screenWidth, screenHeight); reprojectBuffer.bind(); gl.viewport(0, 0, screenWidth, screenHeight); reprojectPass.draw({ blendAmount: 1.0, - light: hdrBuffer.attachments[rayTracePass.outputLocs.light], - position: hdrBuffer.attachments[rayTracePass.outputLocs.position], - previousLight: reprojectBackBuffer.attachments[0], - previousPosition: hdrBackBuffer.attachments[rayTracePass.outputLocs.position], - textureScale: fullscreenScale, - previousTextureScale: fullscreenScale - + light: hdrBuffer.color[0], + lightScale: fullscreenScale, + position: gBuffer.color[gBufferPass.outputLocs.position], + previousLight: lastToneMappedTexture, + previousLightScale: lastToneMappedScale, + previousPosition: gBufferBack.color[gBufferPass.outputLocs.position], }); reprojectBuffer.unbind(); - toneMapToScreen(reprojectBuffer.attachments[0], fullscreenScale); - - swapReprojectBuffer(); + toneMapToScreen(reprojectBuffer.color[0], fullscreenScale); } return { @@ -4061,7 +4482,7 @@ void sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) { const gl = canvas.getContext('webgl2', { alpha: false, - depth: false, + depth: true, stencil: false, antialias: false, powerPreference: 'high-performance', diff --git a/package-lock.json b/package-lock.json index 9825e1b..882e2bf 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "ray-tracing-renderer", - "version": "0.5.0", + "version": "0.6.0", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/package.json b/package.json index 5559229..4201341 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "ray-tracing-renderer", - "version": "0.5.0", + "version": "0.6.0", "description": "A [Three.js](https://github.com/mrdoob/three.js/) renderer which utilizes path tracing to render a scene with true photorealism. The renderer supports global illumination, reflections, soft shadows, and realistic environment lighting.", "main": "build/RayTracingRenderer.js", "scripts": {