From 656fda835d16d2efa85f5686f8af23969165110d Mon Sep 17 00:00:00 2001 From: Francisco Avila Date: Wed, 11 Dec 2019 13:34:17 -0800 Subject: [PATCH] Release 0.3.0 --- build/RayTracingRenderer.es5.js | 1708 +++++++------ build/RayTracingRenderer.js | 4043 +++++++++++++++++-------------- package-lock.json | 2 +- package.json | 2 +- 4 files changed, 3132 insertions(+), 2623 deletions(-) diff --git a/build/RayTracingRenderer.es5.js b/build/RayTracingRenderer.es5.js index 2cd7956..c2cb0a0 100644 --- a/build/RayTracingRenderer.es5.js +++ b/build/RayTracingRenderer.es5.js @@ -529,268 +529,6 @@ }; } - // Manually performs linear filtering if the extension OES_texture_float_linear is not supported - function textureLinear (defines) { - return "\n\n vec4 textureLinear(sampler2D map, vec2 uv) {\n #ifdef OES_texture_float_linear\n return texture(map, uv);\n #else\n vec2 size = vec2(textureSize(map, 0));\n vec2 texelSize = 1.0 / size;\n\n uv = uv * size - 0.5;\n vec2 f = fract(uv);\n uv = floor(uv) + 0.5;\n\n vec4 s1 = texture(map, (uv + vec2(0, 0)) * texelSize);\n vec4 s2 = texture(map, (uv + vec2(1, 0)) * texelSize);\n vec4 s3 = texture(map, (uv + vec2(0, 1)) * texelSize);\n vec4 s4 = texture(map, (uv + vec2(1, 1)) * texelSize);\n\n return mix(mix(s1, s2, f.x), mix(s3, s4, f.x), f.y);\n #endif\n }\n"; - } - - function intersect (defines) { - return "\n\nuniform highp isampler2D indices;\nuniform sampler2D positions;\nuniform sampler2D normals;\nuniform sampler2D uvs;\nuniform sampler2D bvh;\n\nuniform Materials {\n vec4 colorAndMaterialType[NUM_MATERIALS];\n vec4 roughnessMetalnessNormalScale[NUM_MATERIALS];\n\n #if defined(NUM_DIFFUSE_MAPS) || defined(NUM_NORMAL_MAPS) || defined(NUM_PBR_MAPS)\n ivec4 diffuseNormalRoughnessMetalnessMapIndex[NUM_MATERIALS];\n #endif\n\n #if defined(NUM_DIFFUSE_MAPS) || defined(NUM_NORMAL_MAPS)\n vec4 diffuseNormalMapSize[".concat(Math.max(defines.NUM_DIFFUSE_MAPS, defines.NUM_NORMAL_MAPS), "];\n #endif\n\n #if defined(NUM_PBR_MAPS)\n vec2 pbrMapSize[NUM_PBR_MAPS];\n #endif\n} materials;\n\n#ifdef NUM_DIFFUSE_MAPS\n uniform mediump sampler2DArray diffuseMap;\n#endif\n\n#ifdef NUM_NORMAL_MAPS\n uniform mediump sampler2DArray normalMap;\n#endif\n\n#ifdef NUM_PBR_MAPS\n uniform mediump sampler2DArray pbrMap;\n#endif\n\nstruct Triangle {\n vec3 p0;\n vec3 p1;\n vec3 p2;\n};\n\nvoid surfaceInteractionFromIntersection(inout SurfaceInteraction si, Triangle tri, vec3 barycentric, ivec3 index, vec3 faceNormal, int materialIndex) {\n si.hit = true;\n si.faceNormal = faceNormal;\n si.position = barycentric.x * tri.p0 + barycentric.y * tri.p1 + barycentric.z * tri.p2;\n ivec2 i0 = unpackTexel(index.x, VERTEX_COLUMNS);\n ivec2 i1 = unpackTexel(index.y, VERTEX_COLUMNS);\n ivec2 i2 = unpackTexel(index.z, VERTEX_COLUMNS);\n\n vec3 n0 = texelFetch(normals, i0, 0).xyz;\n vec3 n1 = texelFetch(normals, i1, 0).xyz;\n vec3 n2 = texelFetch(normals, i2, 0).xyz;\n si.normal = normalize(barycentric.x * n0 + barycentric.y * n1 + barycentric.z * n2);\n\n si.color = materials.colorAndMaterialType[materialIndex].xyz;\n si.roughness = materials.roughnessMetalnessNormalScale[materialIndex].x;\n si.metalness = materials.roughnessMetalnessNormalScale[materialIndex].y;\n\n si.materialType = int(materials.colorAndMaterialType[materialIndex].w);\n\n #if defined(NUM_DIFFUSE_MAPS) || defined(NUM_NORMAL_MAPS) || defined(NUM_PBR_MAPS)\n vec2 uv0 = texelFetch(uvs, i0, 0).xy;\n vec2 uv1 = texelFetch(uvs, i1, 0).xy;\n vec2 uv2 = texelFetch(uvs, i2, 0).xy;\n vec2 uv = fract(barycentric.x * uv0 + barycentric.y * uv1 + barycentric.z * uv2);\n #endif\n\n #ifdef NUM_DIFFUSE_MAPS\n int diffuseMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].x;\n if (diffuseMapIndex >= 0) {\n si.color *= texture(diffuseMap, vec3(uv * materials.diffuseNormalMapSize[diffuseMapIndex].xy, diffuseMapIndex)).rgb;\n }\n #endif\n\n #ifdef NUM_NORMAL_MAPS\n int normalMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].y;\n if (normalMapIndex >= 0) {\n vec2 duv02 = uv0 - uv2;\n vec2 duv12 = uv1 - uv2;\n vec3 dp02 = tri.p0 - tri.p2;\n vec3 dp12 = tri.p1 - tri.p2;\n\n // Method One\n // http://www.pbr-book.org/3ed-2018/Shapes/Triangle_Meshes.html#fragment-Computetrianglepartialderivatives-0\n // Compute tangent vectors relative to the face normal. These vectors won't necessarily be orthogonal to the smoothed normal\n // This means the TBN matrix won't be orthogonal which is technically incorrect.\n // This is Three.js's method (https://github.com/mrdoob/three.js/blob/dev/src/renderers/shaders/ShaderChunk/normalmap_pars_fragment.glsl.js)\n // --------------\n // float scale = sign(duv02.x * duv12.y - duv02.y * duv12.x);\n // vec3 dpdu = normalize((duv12.y * dp02 - duv02.y * dp12) * scale);\n // vec3 dpdv = normalize((-duv12.x * dp02 + duv02.x * dp12) * scale);\n\n // Method Two\n // Compute tangent vectors as in Method One but apply Gram-Schmidt process to make vectors orthogonal to smooth normal\n // This might inadvertently flip coordinate space orientation\n // --------------\n // float scale = sign(duv02.x * duv12.y - duv02.y * duv12.x);\n // vec3 dpdu = normalize((duv12.y * dp02 - duv02.y * dp12) * scale);\n // dpdu = (dpdu - dot(dpdu, si.normal) * si.normal); // Gram-Schmidt process\n // vec3 dpdv = cross(si.normal, dpdu) * scale;\n\n // Method Three\n // http://www.thetenthplanet.de/archives/1180\n // Compute co-tangent and co-bitangent vectors\n // These vectors are orthongal and maintain a consistent coordinate space\n // --------------\n vec3 dp12perp = cross(dp12, si.normal);\n vec3 dp02perp = cross(si.normal, dp02);\n vec3 dpdu = dp12perp * duv02.x + dp02perp * duv12.x;\n vec3 dpdv = dp12perp * duv02.y + dp02perp * duv12.y;\n float invmax = inversesqrt(max(dot(dpdu, dpdu), dot(dpdv, dpdv)));\n dpdu *= invmax;\n dpdv *= invmax;\n\n vec3 n = 2.0 * texture(normalMap, vec3(uv * materials.diffuseNormalMapSize[normalMapIndex].zw, normalMapIndex)).rgb - 1.0;\n n.xy *= materials.roughnessMetalnessNormalScale[materialIndex].zw;\n\n mat3 tbn = mat3(dpdu, dpdv, si.normal);\n\n si.normal = normalize(tbn * n);\n }\n #endif\n\n #ifdef NUM_PBR_MAPS\n int roughnessMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].z;\n int metalnessMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].w;\n if (roughnessMapIndex >= 0) {\n si.roughness *= texture(pbrMap, vec3(uv * materials.pbrMapSize[roughnessMapIndex].xy, roughnessMapIndex)).g;\n }\n if (metalnessMapIndex >= 0) {\n si.metalness *= texture(pbrMap, vec3(uv * materials.pbrMapSize[metalnessMapIndex].xy, metalnessMapIndex)).b;\n }\n #endif\n}\n\nstruct TriangleIntersect {\n float t;\n vec3 barycentric;\n};\n\n// Triangle-ray intersection\n// Faster than the classic M\xF6ller\u2013Trumbore intersection algorithm\n// http://www.pbr-book.org/3ed-2018/Shapes/Triangle_Meshes.html#TriangleIntersection\nTriangleIntersect intersectTriangle(Ray r, Triangle tri, int maxDim, vec3 shear) {\n TriangleIntersect ti;\n vec3 d = r.d;\n\n // translate vertices based on ray origin\n vec3 p0t = tri.p0 - r.o;\n vec3 p1t = tri.p1 - r.o;\n vec3 p2t = tri.p2 - r.o;\n\n // permute components of triangle vertices\n if (maxDim == 0) {\n p0t = p0t.yzx;\n p1t = p1t.yzx;\n p2t = p2t.yzx;\n } else if (maxDim == 1) {\n p0t = p0t.zxy;\n p1t = p1t.zxy;\n p2t = p2t.zxy;\n }\n\n // apply shear transformation to translated vertex positions\n p0t.xy += shear.xy * p0t.z;\n p1t.xy += shear.xy * p1t.z;\n p2t.xy += shear.xy * p2t.z;\n\n // compute edge function coefficients\n vec3 e = vec3(\n p1t.x * p2t.y - p1t.y * p2t.x,\n p2t.x * p0t.y - p2t.y * p0t.x,\n p0t.x * p1t.y - p0t.y * p1t.x\n );\n\n // check if intersection is inside triangle\n if (any(lessThan(e, vec3(0))) && any(greaterThan(e, vec3(0)))) {\n return ti;\n }\n\n float det = e.x + e.y + e.z;\n\n // not needed?\n // if (det == 0.) {\n // return ti;\n // }\n\n p0t.z *= shear.z;\n p1t.z *= shear.z;\n p2t.z *= shear.z;\n float tScaled = (e.x * p0t.z + e.y * p1t.z + e.z * p2t.z);\n\n // not needed?\n // if (sign(det) != sign(tScaled)) {\n // return ti;\n // }\n\n // check if closer intersection already exists\n if (abs(tScaled) > abs(r.tMax * det)) {\n return ti;\n }\n\n float invDet = 1. / det;\n ti.t = tScaled * invDet;\n ti.barycentric = e * invDet;\n\n return ti;\n}\n\nstruct Box {\n vec3 min;\n vec3 max;\n};\n\n// Branchless ray/box intersection\n// https://tavianator.com/fast-branchless-raybounding-box-intersections/\nfloat intersectBox(Ray r, Box b) {\n vec3 tBot = (b.min - r.o) * r.invD;\n vec3 tTop = (b.max - r.o) * r.invD;\n vec3 tNear = min(tBot, tTop);\n vec3 tFar = max(tBot, tTop);\n float t0 = max(tNear.x, max(tNear.y, tNear.z));\n float t1 = min(tFar.x, min(tFar.y, tFar.z));\n\n return (t0 > t1 || t0 > r.tMax) ? -1.0 : (t0 > 0.0 ? t0 : t1);\n}\n\nint maxDimension(vec3 v) {\n return v.x > v.y ? (v.x > v.z ? 0 : 2) : (v.y > v.z ? 1 : 2);\n}\n\n// Traverse BVH, find closest triangle intersection, and return surface information\nSurfaceInteraction intersectScene(inout Ray ray) {\n SurfaceInteraction si;\n\n int maxDim = maxDimension(abs(ray.d));\n\n // Permute space so that the z dimension is the one where the absolute value of the ray's direction is largest.\n // Then create a shear transformation that aligns ray direction with the +z axis\n vec3 shear;\n if (maxDim == 0) {\n shear = vec3(-ray.d.y, -ray.d.z, 1.0) * ray.invD.x;\n } else if (maxDim == 1) {\n shear = vec3(-ray.d.z, -ray.d.x, 1.0) * ray.invD.y;\n } else {\n shear = vec3(-ray.d.x, -ray.d.y, 1.0) * ray.invD.z;\n }\n\n int nodesToVisit[STACK_SIZE];\n int stack = 0;\n\n nodesToVisit[0] = 0;\n\n while(stack >= 0) {\n int i = nodesToVisit[stack--];\n\n vec4 r1 = fetchData(bvh, i, BVH_COLUMNS);\n vec4 r2 = fetchData(bvh, i + 1, BVH_COLUMNS);\n\n int splitAxisOrNumPrimitives = floatBitsToInt(r1.w);\n\n if (splitAxisOrNumPrimitives >= 0) {\n // Intersection is a bounding box. Test for box intersection and keep traversing BVH\n int splitAxis = splitAxisOrNumPrimitives;\n\n Box bbox = Box(r1.xyz, r2.xyz);\n\n if (intersectBox(ray, bbox) > 0.0) {\n // traverse near node to ray first, and far node to ray last\n if (ray.d[splitAxis] > 0.0) {\n nodesToVisit[++stack] = floatBitsToInt(r2.w);\n nodesToVisit[++stack] = i + 2;\n } else {\n nodesToVisit[++stack] = i + 2;\n nodesToVisit[++stack] = floatBitsToInt(r2.w);\n }\n }\n } else {\n ivec3 index = floatBitsToInt(r1.xyz);\n Triangle tri = Triangle(\n fetchData(positions, index.x, VERTEX_COLUMNS).xyz,\n fetchData(positions, index.y, VERTEX_COLUMNS).xyz,\n fetchData(positions, index.z, VERTEX_COLUMNS).xyz\n );\n TriangleIntersect hit = intersectTriangle(ray, tri, maxDim, shear);\n\n if (hit.t > 0.0) {\n ray.tMax = hit.t;\n int materialIndex = floatBitsToInt(r2.w);\n vec3 faceNormal = r2.xyz;\n surfaceInteractionFromIntersection(si, tri, hit.barycentric, index, faceNormal, materialIndex);\n }\n }\n }\n\n // Values must be clamped outside of intersection loop. Clamping inside the loop produces incorrect numbers on some devices.\n si.roughness = clamp(si.roughness, 0.03, 1.0);\n si.metalness = clamp(si.metalness, 0.0, 1.0);\n\n return si;\n}\n\nbool intersectSceneShadow(inout Ray ray) {\n int maxDim = maxDimension(abs(ray.d));\n\n // Permute space so that the z dimension is the one where the absolute value of the ray's direction is largest.\n // Then create a shear transformation that aligns ray direction with the +z axis\n vec3 shear;\n if (maxDim == 0) {\n shear = vec3(-ray.d.y, -ray.d.z, 1.0) * ray.invD.x;\n } else if (maxDim == 1) {\n shear = vec3(-ray.d.z, -ray.d.x, 1.0) * ray.invD.y;\n } else {\n shear = vec3(-ray.d.x, -ray.d.y, 1.0) * ray.invD.z;\n }\n\n int nodesToVisit[STACK_SIZE];\n int stack = 0;\n\n nodesToVisit[0] = 0;\n\n while(stack >= 0) {\n int i = nodesToVisit[stack--];\n\n vec4 r1 = fetchData(bvh, i, BVH_COLUMNS);\n vec4 r2 = fetchData(bvh, i + 1, BVH_COLUMNS);\n\n int splitAxisOrNumPrimitives = floatBitsToInt(r1.w);\n\n if (splitAxisOrNumPrimitives >= 0) {\n int splitAxis = splitAxisOrNumPrimitives;\n\n Box bbox = Box(r1.xyz, r2.xyz);\n\n if (intersectBox(ray, bbox) > 0.0) {\n if (ray.d[splitAxis] > 0.0) {\n nodesToVisit[++stack] = floatBitsToInt(r2.w);\n nodesToVisit[++stack] = i + 2;\n } else {\n nodesToVisit[++stack] = i + 2;\n nodesToVisit[++stack] = floatBitsToInt(r2.w);\n }\n }\n } else {\n ivec3 index = floatBitsToInt(r1.xyz);\n Triangle tri = Triangle(\n fetchData(positions, index.x, VERTEX_COLUMNS).xyz,\n fetchData(positions, index.y, VERTEX_COLUMNS).xyz,\n fetchData(positions, index.z, VERTEX_COLUMNS).xyz\n );\n\n if (intersectTriangle(ray, tri, maxDim, shear).t > 0.0) {\n return true;\n }\n }\n }\n\n return false;\n}\n"); - } - - function random (defines) { - return "\n\n// Noise texture used to generate a different random number for each pixel.\n// We use blue noise in particular, but any type of noise will work.\nuniform sampler2D noise;\n\nuniform float stratifiedSamples[SAMPLING_DIMENSIONS];\nuniform float strataSize;\nuniform float useStratifiedSampling;\n\n// Every time we call randomSample() in the shader, and for every call to render,\n// we want that specific bit of the shader to fetch a sample from the same position in stratifiedSamples\n// This allows us to use stratified sampling for each random variable in our path tracing\nint sampleIndex = 0;\n\nconst highp float maxUint = 1.0 / 4294967295.0;\n\nfloat pixelSeed;\nhighp uint randState;\n\n// simple integer hashing function\n// https://en.wikipedia.org/wiki/Xorshift\nuint xorshift(uint x) {\n x ^= x << 13u;\n x ^= x >> 17u;\n x ^= x << 5u;\n return x;\n}\n\nvoid initRandom() {\n vec2 noiseSize = vec2(textureSize(noise, 0));\n\n // tile the small noise texture across the entire screen\n pixelSeed = texture(noise, vCoord / (pixelSize * noiseSize)).r;\n\n // white noise used if stratified sampling is disabled\n // produces more balanced path tracing for 1 sample-per-pixel renders\n randState = xorshift(xorshift(floatBitsToUint(vCoord.x)) * xorshift(floatBitsToUint(vCoord.y)));\n}\n\nfloat randomSample() {\n randState = xorshift(randState);\n\n float stratifiedSample = stratifiedSamples[sampleIndex++];\n\n float random = mix(\n float(randState) * maxUint, // white noise\n fract((stratifiedSample + pixelSeed) * strataSize), // blue noise + stratified samples\n useStratifiedSampling\n );\n\n // transform random number between [0, 1] to (0, 1)\n return EPS + (1.0 - 2.0 * EPS) * random;\n}\n\nvec2 randomSampleVec2() {\n return vec2(randomSample(), randomSample());\n}\n"; - } - - // Sample the environment map using a cumulative distribution function as described in - // http://www.pbr-book.org/3ed-2018/Light_Transport_I_Surface_Reflection/Sampling_Light_Sources.html#InfiniteAreaLights - function envmap (defines) { - return "\n\nuniform sampler2D envmap;\nuniform sampler2D envmapDistribution;\n\nvec2 cartesianToEquirect(vec3 pointOnSphere) {\n float phi = mod(atan(-pointOnSphere.z, -pointOnSphere.x), TWOPI);\n float theta = acos(pointOnSphere.y);\n return vec2(phi * 0.5 * INVPI, theta * INVPI);\n}\n\nfloat getEnvmapV(float u, out int vOffset, out float pdf) {\n ivec2 size = textureSize(envmap, 0);\n\n int left = 0;\n int right = size.y + 1; // cdf length is the length of the envmap + 1\n while (left < right) {\n int mid = (left + right) >> 1;\n float s = texelFetch(envmapDistribution, ivec2(0, mid), 0).x;\n if (s <= u) {\n left = mid + 1;\n } else {\n right = mid;\n }\n }\n vOffset = left - 1;\n\n // x channel is cumulative distribution of envmap luminance\n // y channel is partial probability density of envmap luminance\n vec2 s0 = texelFetch(envmapDistribution, ivec2(0, vOffset), 0).xy;\n vec2 s1 = texelFetch(envmapDistribution, ivec2(0, vOffset + 1), 0).xy;\n\n pdf = s0.y;\n\n return (float(vOffset) + (u - s0.x) / (s1.x - s0.x)) / float(size.y);\n}\n\nfloat getEnvmapU(float u, int vOffset, out float pdf) {\n ivec2 size = textureSize(envmap, 0);\n\n int left = 0;\n int right = size.x + 1; // cdf length is the length of the envmap + 1\n while (left < right) {\n int mid = (left + right) >> 1;\n float s = texelFetch(envmapDistribution, ivec2(1 + mid, vOffset), 0).x;\n if (s <= u) {\n left = mid + 1;\n } else {\n right = mid;\n }\n }\n int uOffset = left - 1;\n\n // x channel is cumulative distribution of envmap luminance\n // y channel is partial probability density of envmap luminance\n vec2 s0 = texelFetch(envmapDistribution, ivec2(1 + uOffset, vOffset), 0).xy;\n vec2 s1 = texelFetch(envmapDistribution, ivec2(1 + uOffset + 1, vOffset), 0).xy;\n\n pdf = s0.y;\n\n return (float(uOffset) + (u - s0.x) / (s1.x - s0.x)) / float(size.x);\n}\n\n// Perform two binary searches to find light direction.\nvec3 sampleEnvmap(vec2 random, out vec2 uv, out float pdf) {\n vec2 partialPdf;\n int vOffset;\n\n uv.y = getEnvmapV(random.x, vOffset, partialPdf.y);\n uv.x = getEnvmapU(random.y, vOffset, partialPdf.x);\n\n float phi = uv.x * TWOPI;\n float theta = uv.y * PI;\n float cosTheta = cos(theta);\n float sinTheta = sin(theta);\n float cosPhi = cos(phi);\n float sinPhi = sin(phi);\n\n vec3 dir = vec3(-sinTheta * cosPhi, cosTheta, -sinTheta * sinPhi);\n\n pdf = partialPdf.x * partialPdf.y * INVPI2 / (2.0 * sinTheta);\n\n return dir;\n}\n\nfloat envmapPdf(vec2 uv) {\n vec2 size = vec2(textureSize(envmap, 0));\n\n float sinTheta = sin(uv.y * PI);\n\n uv *= size;\n\n float partialX = texelFetch(envmapDistribution, ivec2(1.0 + uv.x, uv.y), 0).y;\n float partialY = texelFetch(envmapDistribution, ivec2(0, uv.y), 0).y;\n\n return partialX * partialY * INVPI2 / (2.0 * sinTheta);\n}\n\nvec3 sampleEnvmapFromDirection(vec3 d) {\n vec2 uv = cartesianToEquirect(d);\n return textureLinear(envmap, uv).rgb;\n}\n\n"; - } - - function bsdf (defines) { - return "\n\n// Computes the exact value of the Fresnel factor\n// https://seblagarde.wordpress.com/2013/04/29/memo-on-fresnel-equations/\nfloat fresnel(float cosTheta, float eta, float invEta) {\n eta = cosTheta > 0.0 ? eta : invEta;\n cosTheta = abs(cosTheta);\n\n float gSquared = eta * eta + cosTheta * cosTheta - 1.0;\n\n if (gSquared < 0.0) {\n return 1.0;\n }\n\n float g = sqrt(gSquared);\n\n float a = (g - cosTheta) / (g + cosTheta);\n float b = (cosTheta * (g + cosTheta) - 1.0) / (cosTheta * (g - cosTheta) + 1.0);\n\n return 0.5 * a * a * (1.0 + b * b);\n}\n\nfloat fresnelSchlickWeight(float cosTheta) {\n float w = 1.0 - cosTheta;\n return (w * w) * (w * w) * w;\n}\n\n// Computes Schlick's approximation of the Fresnel factor\n// Assumes ray is moving from a less dense to a more dense medium\nfloat fresnelSchlick(float cosTheta, float r0) {\n return mix(fresnelSchlickWeight(cosTheta), 1.0, r0);\n}\n\n// Computes Schlick's approximation of Fresnel factor\n// Accounts for total internal reflection if ray is moving from a more dense to a less dense medium\nfloat fresnelSchlickTIR(float cosTheta, float r0, float ni) {\n\n // moving from a more dense to a less dense medium\n if (cosTheta < 0.0) {\n float inv_eta = ni;\n float SinT2 = inv_eta * inv_eta * (1.0f - cosTheta * cosTheta);\n if (SinT2 > 1.0) {\n return 1.0; // total internal reflection\n }\n cosTheta = sqrt(1.0f - SinT2);\n }\n\n return mix(fresnelSchlickWeight(cosTheta), 1.0, r0);\n}\n\nfloat trowbridgeReitzD(float cosTheta, float alpha2) {\n float e = cosTheta * cosTheta * (alpha2 - 1.0) + 1.0;\n return alpha2 / (PI * e * e);\n}\n\nfloat trowbridgeReitzLambda(float cosTheta, float alpha2) {\n float cos2Theta = cosTheta * cosTheta;\n float tan2Theta = (1.0 - cos2Theta) / cos2Theta;\n return 0.5 * (-1.0 + sqrt(1.0 + alpha2 * tan2Theta));\n}\n\n// An implementation of Disney's principled BRDF\n// https://disney-animation.s3.amazonaws.com/library/s2012_pbs_disney_brdf_notes_v2.pdf\nvec3 materialBrdf(SurfaceInteraction si, vec3 viewDir, vec3 lightDir, float cosThetaL, float diffuseWeight, out float pdf) {\n vec3 halfVector = normalize(viewDir + lightDir);\n\n cosThetaL = abs(cosThetaL);\n float cosThetaV = abs(dot(si.normal, viewDir));\n float cosThetaH = abs(dot(si.normal, halfVector));\n float cosThetaD = abs(dot(lightDir, halfVector));\n\n float alpha2 = (si.roughness * si.roughness) * (si.roughness * si.roughness);\n\n float F = fresnelSchlick(cosThetaD, mix(R0, 0.6, si.metalness));\n float D = trowbridgeReitzD(cosThetaH, alpha2);\n\n float roughnessRemapped = 0.5 + 0.5 * si.roughness;\n float alpha2Remapped = (roughnessRemapped * roughnessRemapped) * (roughnessRemapped * roughnessRemapped);\n\n float G = 1.0 / (1.0 + trowbridgeReitzLambda(cosThetaV, alpha2Remapped) + trowbridgeReitzLambda(cosThetaL, alpha2Remapped));\n\n float specular = F * D * G / (4.0 * cosThetaV * cosThetaL);\n float specularPdf = D * cosThetaH / (4.0 * cosThetaD);\n\n float f = -0.5 + 2.0 * cosThetaD * cosThetaD * si.roughness;\n float diffuse = diffuseWeight * INVPI * (1.0 + f * fresnelSchlickWeight(cosThetaL)) * (1.0 + f * fresnelSchlickWeight(cosThetaV));\n float diffusePdf = cosThetaL * INVPI;\n\n pdf = mix(0.5 * (specularPdf + diffusePdf), specularPdf, si.metalness);\n\n return mix(si.color * diffuse + specular, si.color * specular, si.metalness);\n}\n\n"; - } - - function sample (defines) { - return "\n\n// https://graphics.pixar.com/library/OrthonormalB/paper.pdf\nmat3 orthonormalBasis(vec3 n) {\n float zsign = n.z >= 0.0 ? 1.0 : -1.0;\n float a = -1.0 / (zsign + n.z);\n float b = n.x * n.y * a;\n vec3 s = vec3(1.0 + zsign * n.x * n.x * a, zsign * b, -zsign * n.x);\n vec3 t = vec3(b, zsign + n.y * n.y * a, -n.y);\n return mat3(s, t, n);\n}\n\n// http://www.pbr-book.org/3ed-2018/Monte_Carlo_Integration/2D_Sampling_with_Multidimensional_Transformations.html#SamplingaUnitDisk\nvec2 sampleCircle(vec2 p) {\n p = 2.0 * p - 1.0;\n\n bool greater = abs(p.x) > abs(p.y);\n\n float r = greater ? p.x : p.y;\n float theta = greater ? 0.25 * PI * p.y / p.x : PI * (0.5 - 0.25 * p.x / p.y);\n\n return r * vec2(cos(theta), sin(theta));\n}\n\n// http://www.pbr-book.org/3ed-2018/Monte_Carlo_Integration/2D_Sampling_with_Multidimensional_Transformations.html#Cosine-WeightedHemisphereSampling\nvec3 cosineSampleHemisphere(vec2 p) {\n vec2 h = sampleCircle(p);\n float z = sqrt(max(0.0, 1.0 - h.x * h.x - h.y * h.y));\n return vec3(h, z);\n}\n\n\n// http://www.pbr-book.org/3ed-2018/Light_Transport_I_Surface_Reflection/Sampling_Reflection_Functions.html#MicrofacetBxDFs\n// Instead of Beckmann distrubtion, we use the GTR2 (GGX) distrubtion as covered in Disney's Principled BRDF paper\nvec3 lightDirSpecular(vec3 faceNormal, vec3 viewDir, mat3 basis, float roughness, vec2 random) {\n float phi = TWOPI * random.y;\n float alpha = roughness * roughness;\n float cosTheta = sqrt((1.0 - random.x) / (1.0 + (alpha * alpha - 1.0) * random.x));\n float sinTheta = sqrt(1.0 - cosTheta * cosTheta);\n\n vec3 halfVector = basis * sign(dot(faceNormal, viewDir)) * vec3(sinTheta * cos(phi), sinTheta * sin(phi), cosTheta);\n\n vec3 lightDir = reflect(-viewDir, halfVector);\n\n return lightDir;\n}\n\nvec3 lightDirDiffuse(vec3 faceNormal, vec3 viewDir, mat3 basis, vec2 random) {\n return basis * sign(dot(faceNormal, viewDir)) * cosineSampleHemisphere(random);\n}\n\nfloat powerHeuristic(float f, float g) {\n return (f * f) / (f * f + g * g);\n}\n\n"; - } - - // Estimate the direct lighting integral using multiple importance sampling - // http://www.pbr-book.org/3ed-2018/Light_Transport_I_Surface_Reflection/Direct_Lighting.html#EstimatingtheDirectLightingIntegral - function sampleMaterial (defines) { - return "\n\nvec3 importanceSampleLight(SurfaceInteraction si, vec3 viewDir, bool lastBounce, vec2 random) {\n vec3 li;\n\n float lightPdf;\n vec2 uv;\n vec3 lightDir = sampleEnvmap(random, uv, lightPdf);\n\n float cosThetaL = dot(si.normal, lightDir);\n\n float orientation = dot(si.faceNormal, viewDir) * cosThetaL;\n if (orientation < 0.0) {\n return li;\n }\n\n float diffuseWeight = 1.0;\n Ray ray;\n initRay(ray, si.position + EPS * lightDir, lightDir);\n if (intersectSceneShadow(ray)) {\n if (lastBounce) {\n diffuseWeight = 0.0;\n } else {\n return li;\n }\n }\n\n vec3 irr = textureLinear(envmap, uv).xyz;\n\n float scatteringPdf;\n vec3 brdf = materialBrdf(si, viewDir, lightDir, cosThetaL, diffuseWeight, scatteringPdf);\n\n float weight = powerHeuristic(lightPdf, scatteringPdf);\n\n li = brdf * irr * abs(cosThetaL) * weight / lightPdf;\n\n return li;\n}\n\nvec3 importanceSampleMaterial(SurfaceInteraction si, vec3 viewDir, bool lastBounce, vec3 lightDir) {\n vec3 li;\n\n float cosThetaL = dot(si.normal, lightDir);\n\n float orientation = dot(si.faceNormal, viewDir) * cosThetaL;\n if (orientation < 0.0) {\n return li;\n }\n\n float diffuseWeight = 1.0;\n Ray ray;\n initRay(ray, si.position + EPS * lightDir, lightDir);\n if (intersectSceneShadow(ray)) {\n if (lastBounce) {\n diffuseWeight = 0.0;\n } else {\n return li;\n }\n }\n\n vec2 uv = cartesianToEquirect(lightDir);\n\n float lightPdf = envmapPdf(uv);\n\n vec3 irr = textureLinear(envmap, uv).rgb;\n\n float scatteringPdf;\n vec3 brdf = materialBrdf(si, viewDir, lightDir, cosThetaL, diffuseWeight, scatteringPdf);\n\n float weight = powerHeuristic(scatteringPdf, lightPdf);\n\n li += brdf * irr * abs(cosThetaL) * weight / scatteringPdf;\n\n return li;\n}\n\nvec3 sampleMaterial(SurfaceInteraction si, int bounce, inout Ray ray, inout vec3 beta, inout bool abort) {\n mat3 basis = orthonormalBasis(si.normal);\n vec3 viewDir = -ray.d;\n\n vec2 diffuseOrSpecular = randomSampleVec2();\n\n vec3 lightDir = diffuseOrSpecular.x < mix(0.5, 0.0, si.metalness) ?\n lightDirDiffuse(si.faceNormal, viewDir, basis, randomSampleVec2()) :\n lightDirSpecular(si.faceNormal, viewDir, basis, si.roughness, randomSampleVec2());\n\n bool lastBounce = bounce == BOUNCES;\n\n // Add path contribution\n vec3 li = beta * (\n importanceSampleLight(si, viewDir, lastBounce, randomSampleVec2()) +\n importanceSampleMaterial(si, viewDir, lastBounce, lightDir)\n );\n\n // Get new path direction\n\n lightDir = diffuseOrSpecular.y < mix(0.5, 0.0, si.metalness) ?\n lightDirDiffuse(si.faceNormal, viewDir, basis, randomSampleVec2()) :\n lightDirSpecular(si.faceNormal, viewDir, basis, si.roughness, randomSampleVec2());\n\n float cosThetaL = dot(si.normal, lightDir);\n\n float scatteringPdf;\n vec3 brdf = materialBrdf(si, viewDir, lightDir, cosThetaL, 1.0, scatteringPdf);\n\n beta *= abs(cosThetaL) * brdf / scatteringPdf;\n\n initRay(ray, si.position + EPS * lightDir, lightDir);\n\n // If new ray direction is pointing into the surface,\n // the light path is physically impossible and we terminate the path.\n float orientation = dot(si.faceNormal, viewDir) * cosThetaL;\n abort = orientation < 0.0;\n\n return li;\n}\n\n"; - } - - function sampleShadowCatcher (defines) { - return "\n\n#ifdef USE_SHADOW_CATCHER\n\nfloat importanceSampleLightShadowCatcher(SurfaceInteraction si, vec3 viewDir, vec2 random, inout float alpha) {\n float li;\n\n float lightPdf;\n vec2 uv;\n vec3 lightDir = sampleEnvmap(random, uv, lightPdf);\n\n float cosThetaL = dot(si.normal, lightDir);\n\n float orientation = dot(si.faceNormal, viewDir) * cosThetaL;\n if (orientation < 0.0) {\n return li;\n }\n\n float occluded = 1.0;\n\n Ray ray;\n initRay(ray, si.position + EPS * lightDir, lightDir);\n if (intersectSceneShadow(ray)) {\n occluded = 0.0;\n }\n\n float irr = dot(luminance, textureLinear(envmap, uv).rgb);\n\n // lambertian BRDF\n float brdf = INVPI;\n float scatteringPdf = abs(cosThetaL) * INVPI;\n\n float weight = powerHeuristic(lightPdf, scatteringPdf);\n\n float lightEq = irr * brdf * abs(cosThetaL) * weight / lightPdf;\n\n alpha += lightEq;\n li += occluded * lightEq;\n\n return li;\n}\n\nfloat importanceSampleMaterialShadowCatcher(SurfaceInteraction si, vec3 viewDir, vec3 lightDir, inout float alpha) {\n float li;\n\n float cosThetaL = dot(si.normal, lightDir);\n\n float orientation = dot(si.faceNormal, viewDir) * cosThetaL;\n if (orientation < 0.0) {\n return li;\n }\n\n float occluded = 1.0;\n\n Ray ray;\n initRay(ray, si.position + EPS * lightDir, lightDir);\n if (intersectSceneShadow(ray)) {\n occluded = 0.0;\n }\n\n vec2 uv = cartesianToEquirect(lightDir);\n\n float lightPdf = envmapPdf(uv);\n\n float irr = dot(luminance, textureLinear(envmap, uv).rgb);\n\n // lambertian BRDF\n float brdf = INVPI;\n float scatteringPdf = abs(cosThetaL) * INVPI;\n\n float weight = powerHeuristic(scatteringPdf, lightPdf);\n\n float lightEq = irr * brdf * abs(cosThetaL) * weight / scatteringPdf;\n\n alpha += lightEq;\n li += occluded * lightEq;\n\n return li;\n}\n\nvec3 sampleShadowCatcher(SurfaceInteraction si, int bounce, inout Ray ray, inout vec3 beta, inout float alpha, inout vec3 prevLi, inout bool abort) {\n mat3 basis = orthonormalBasis(si.normal);\n vec3 viewDir = -ray.d;\n vec3 color = sampleEnvmapFromDirection(-viewDir);\n\n vec3 lightDir = lightDirDiffuse(si.faceNormal, viewDir, basis, randomSampleVec2());\n\n float alphaBounce = 0.0;\n\n // Add path contribution\n vec3 li = beta * color * (\n importanceSampleLightShadowCatcher(si, viewDir, randomSampleVec2(), alphaBounce) +\n importanceSampleMaterialShadowCatcher(si, viewDir, lightDir, alphaBounce)\n );\n\n // alphaBounce contains the lighting of the shadow catcher *without* shadows\n alphaBounce = alphaBounce == 0.0 ? 1.0 : alphaBounce;\n\n // in post processing step, we divide by alpha to obtain the percentage of light relative to shadow for the shadow catcher\n alpha *= alphaBounce;\n\n // we only want the alpha division to affect the shadow catcher\n // factor in alpha to the previous light, so that dividing by alpha with the previous light cancels out this contribution\n prevLi *= alphaBounce;\n\n // Get new path direction\n\n lightDir = lightDirDiffuse(si.faceNormal, viewDir, basis, randomSampleVec2());\n\n float cosThetaL = dot(si.normal, lightDir);\n\n // lambertian brdf with terms cancelled\n beta *= color;\n\n initRay(ray, si.position + EPS * lightDir, lightDir);\n\n // If new ray direction is pointing into the surface,\n // the light path is physically impossible and we terminate the path.\n float orientation = dot(si.faceNormal, viewDir) * cosThetaL;\n abort = orientation < 0.0;\n\n // advance dimension index by unused stratified samples\n const int usedSamples = 6;\n sampleIndex += SAMPLES_PER_MATERIAL - usedSamples;\n\n return li;\n}\n\n#endif\n"; - } - - function sampleGlass (defines) { - return "\n\n#ifdef USE_GLASS\n\nvec3 sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Ray ray, inout vec3 beta) {\n vec3 viewDir = -ray.d;\n float cosTheta = dot(si.normal, viewDir);\n\n float F = si.materialType == THIN_GLASS ?\n fresnelSchlick(abs(cosTheta), R0) : // thin glass\n fresnelSchlickTIR(cosTheta, R0, IOR); // thick glass\n\n vec3 lightDir;\n\n float reflectionOrRefraction = randomSample();\n\n if (reflectionOrRefraction < F) {\n lightDir = reflect(-viewDir, si.normal);\n } else {\n lightDir = si.materialType == THIN_GLASS ?\n refract(-viewDir, sign(cosTheta) * si.normal, INV_IOR_THIN) : // thin glass\n refract(-viewDir, sign(cosTheta) * si.normal, cosTheta < 0.0 ? IOR : INV_IOR); // thick glass\n beta *= si.color;\n }\n\n initRay(ray, si.position + EPS * lightDir, lightDir);\n\n // advance sample index by unused stratified samples\n const int usedSamples = 1;\n sampleIndex += SAMPLES_PER_MATERIAL - usedSamples;\n\n return bounce == BOUNCES ? beta * sampleEnvmapFromDirection(lightDir) : vec3(0.0);\n}\n\n#endif\n\n"; - } - - function unrollLoop(indexName, start, limit, step, code) { - var unrolled = "int ".concat(indexName, ";\n"); - - for (var i = start; step > 0 && i < limit || step < 0 && i > limit; i += step) { - unrolled += "".concat(indexName, " = ").concat(i, ";\n"); - unrolled += code; - } - - return unrolled; - } - function addDefines(params) { - var defines = ''; - - for (var _i = 0, _Object$entries = Object.entries(params); _i < _Object$entries.length; _i++) { - var _Object$entries$_i = _slicedToArray(_Object$entries[_i], 2), - name = _Object$entries$_i[0], - value = _Object$entries$_i[1]; - - // don't define falsy values such as false, 0, and ''. - // this adds support for #ifdef on falsy values - if (value) { - defines += "#define ".concat(name, " ").concat(value, "\n"); - } - } - - return defines; - } - - function fragString (defines) { - return "#version 300 es\n\nprecision mediump float;\nprecision mediump int;\n\n".concat(addDefines(defines), "\n\n#define PI 3.14159265359\n#define TWOPI 6.28318530718\n#define INVPI 0.31830988618\n#define INVPI2 0.10132118364\n#define EPS 0.0005\n#define INF 1.0e999\n#define RAY_MAX_DISTANCE 9999.0\n\n#define STANDARD 0\n#define THIN_GLASS 1\n#define THICK_GLASS 2\n#define SHADOW_CATCHER 3\n\n#define SAMPLES_PER_MATERIAL 8\n\nconst float IOR = 1.5;\nconst float INV_IOR = 1.0 / IOR;\n\nconst float IOR_THIN = 1.015;\nconst float INV_IOR_THIN = 1.0 / IOR_THIN;\n\nconst float R0 = (1.0 - IOR) * (1.0 - IOR) / ((1.0 + IOR) * (1.0 + IOR));\n\n// https://www.w3.org/WAI/GL/wiki/Relative_luminance\nconst vec3 luminance = vec3(0.2126, 0.7152, 0.0722);\n\nstruct Ray {\n vec3 o;\n vec3 d;\n vec3 invD;\n float tMax;\n};\n\nstruct SurfaceInteraction {\n bool hit;\n vec3 position;\n vec3 normal; // smoothed normal from the three triangle vertices\n vec3 faceNormal; // normal of the triangle\n vec3 color;\n float roughness;\n float metalness;\n int materialType;\n};\n\nstruct Camera {\n mat4 transform;\n float aspect;\n float fov;\n float focus;\n float aperture;\n};\n\nuniform Camera camera;\nuniform vec2 pixelSize; // 1 / screenResolution\n\nin vec2 vCoord;\n\nout vec4 fragColor;\n\nvoid initRay(inout Ray ray, vec3 origin, vec3 direction) {\n ray.o = origin;\n ray.d = direction;\n ray.invD = 1.0 / ray.d;\n ray.tMax = RAY_MAX_DISTANCE;\n}\n\n// given the index from a 1D array, retrieve corresponding position from packed 2D texture\nivec2 unpackTexel(int i, int columnsLog2) {\n ivec2 u;\n u.y = i >> columnsLog2; // equivalent to (i / 2^columnsLog2)\n u.x = i - (u.y << columnsLog2); // equivalent to (i % 2^columnsLog2)\n return u;\n}\n\nvec4 fetchData(sampler2D s, int i, int columnsLog2) {\n return texelFetch(s, unpackTexel(i, columnsLog2), 0);\n}\n\nivec4 fetchData(isampler2D s, int i, int columnsLog2) {\n return texelFetch(s, unpackTexel(i, columnsLog2), 0);\n}\n\n").concat(textureLinear(), "\n").concat(intersect(defines), "\n").concat(random(), "\n").concat(envmap(), "\n").concat(bsdf(), "\n").concat(sample(), "\n").concat(sampleMaterial(), "\n").concat(sampleGlass(), "\n").concat(sampleShadowCatcher(), "\n\nstruct Path {\n Ray ray;\n vec3 li;\n float alpha;\n vec3 beta;\n bool specularBounce;\n bool abort;\n};\n\nvoid bounce(inout Path path, int i) {\n if (path.abort) {\n return;\n }\n\n SurfaceInteraction si = intersectScene(path.ray);\n\n if (!si.hit) {\n if (path.specularBounce) {\n path.li += path.beta * sampleEnvmapFromDirection(path.ray.d);\n }\n\n path.abort = true;\n } else {\n #ifdef USE_GLASS\n if (si.materialType == THIN_GLASS || si.materialType == THICK_GLASS) {\n path.li += sampleGlassSpecular(si, i, path.ray, path.beta);\n path.specularBounce = true;\n }\n #endif\n #ifdef USE_SHADOW_CATCHER\n if (si.materialType == SHADOW_CATCHER) {\n path.li += sampleShadowCatcher(si, i, path.ray, path.beta, path.alpha, path.li, path.abort);\n path.specularBounce = false;\n }\n #endif\n if (si.materialType == STANDARD) {\n path.li += sampleMaterial(si, i, path.ray, path.beta, path.abort);\n path.specularBounce = false;\n }\n\n // Russian Roulette sampling\n if (i >= 2) {\n float q = 1.0 - dot(path.beta, luminance);\n if (randomSample() < q) {\n path.abort = true;\n }\n path.beta /= 1.0 - q;\n }\n }\n}\n\n// Path tracing integrator as described in\n// http://www.pbr-book.org/3ed-2018/Light_Transport_I_Surface_Reflection/Path_Tracing.html#\nvec4 integrator(inout Ray ray) {\n Path path;\n path.ray = ray;\n path.li = vec3(0);\n path.alpha = 1.0;\n path.beta = vec3(1.0);\n path.specularBounce = true;\n path.abort = false;\n\n // Manually unroll for loop.\n // Some hardware fails to interate over a GLSL loop, so we provide this workaround\n\n ").concat(unrollLoop('i', 1, defines.BOUNCES + 1, 1, "\n // equivelant to\n // for (int i = 1; i < defines.bounces + 1, i += 1)\n bounce(path, i);\n "), "\n\n return vec4(path.li, path.alpha);\n}\n\nvoid main() {\n initRandom();\n\n vec2 vCoordAntiAlias = vCoord + pixelSize * (randomSampleVec2() - 0.5);\n\n vec3 direction = normalize(vec3(vCoordAntiAlias - 0.5, -1.0) * vec3(camera.aspect, 1.0, camera.fov));\n\n // Thin lens model with depth-of-field\n // http://www.pbr-book.org/3ed-2018/Camera_Models/Projective_Camera_Models.html#TheThinLensModelandDepthofField\n vec2 lensPoint = camera.aperture * sampleCircle(randomSampleVec2());\n vec3 focusPoint = -direction * camera.focus / direction.z; // intersect ray direction with focus plane\n\n vec3 origin = vec3(lensPoint, 0.0);\n direction = normalize(focusPoint - origin);\n\n origin = vec3(camera.transform * vec4(origin, 1.0));\n direction = mat3(camera.transform) * direction;\n\n Ray cam;\n initRay(cam, origin, direction);\n\n vec4 liAndAlpha = integrator(cam);\n\n if (!(liAndAlpha.x < INF && liAndAlpha.x > -EPS)) {\n liAndAlpha = vec4(0, 0, 0, 1);\n }\n\n fragColor = liAndAlpha;\n\n // Stratified Sampling Sample Count Test\n // ---------------\n // Uncomment the following code\n // Then observe the colors of the image\n // If:\n // * The resulting image is pure black\n // Extra samples are being passed to the shader that aren't being used.\n // * The resulting image contains red\n // Not enough samples are being passed to the shader\n // * The resulting image contains only white with some black\n // All samples are used by the shader. Correct result!\n\n // fragColor = vec4(0, 0, 0, 1);\n // if (sampleIndex == SAMPLING_DIMENSIONS) {\n // fragColor = vec4(1, 1, 1, 1);\n // } else if (sampleIndex > SAMPLING_DIMENSIONS) {\n // fragColor = vec4(1, 0, 0, 1);\n // }\n}\n"); - } - - function mergeMeshesToGeometry(meshes) { - var vertexCount = 0; - var indexCount = 0; - var geometryAndMaterialIndex = []; - var materialIndexMap = new Map(); - var _iteratorNormalCompletion = true; - var _didIteratorError = false; - var _iteratorError = undefined; - - try { - for (var _iterator = meshes[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) { - var mesh = _step.value; - - var _geometry = cloneBufferGeometry(mesh.geometry, ['position', 'normal', 'uv']); - - var index = _geometry.getIndex(); - - if (!index) { - addFlatGeometryIndices(_geometry); - } - - _geometry.applyMatrix(mesh.matrixWorld); - - if (!_geometry.getAttribute('normal')) { - _geometry.computeVertexNormals(); - } else { - _geometry.normalizeNormals(); - } - - vertexCount += _geometry.getAttribute('position').count; - indexCount += _geometry.getIndex().count; - var material = mesh.material; - var materialIndex = materialIndexMap.get(material); - - if (materialIndex === undefined) { - materialIndex = materialIndexMap.size; - materialIndexMap.set(material, materialIndex); - } - - geometryAndMaterialIndex.push({ - geometry: _geometry, - materialIndex: materialIndex - }); - } - } catch (err) { - _didIteratorError = true; - _iteratorError = err; - } finally { - try { - if (!_iteratorNormalCompletion && _iterator["return"] != null) { - _iterator["return"](); - } - } finally { - if (_didIteratorError) { - throw _iteratorError; - } - } - } - - var _mergeGeometry = mergeGeometry(geometryAndMaterialIndex, vertexCount, indexCount), - geometry = _mergeGeometry.geometry, - materialIndices = _mergeGeometry.materialIndices; - - return { - geometry: geometry, - materialIndices: materialIndices, - materials: Array.from(materialIndexMap.keys()) - }; - } - - function mergeGeometry(geometryAndMaterialIndex, vertexCount, indexCount) { - var position = new THREE$1.BufferAttribute(new Float32Array(3 * vertexCount), 3, false); - var normal = new THREE$1.BufferAttribute(new Float32Array(3 * vertexCount), 3, false); - var uv = new THREE$1.BufferAttribute(new Float32Array(2 * vertexCount), 2, false); - var index = new THREE$1.BufferAttribute(new Uint32Array(indexCount), 1, false); - var materialIndices = []; - var bg = new THREE$1.BufferGeometry(); - bg.addAttribute('position', position); - bg.addAttribute('normal', normal); - bg.addAttribute('uv', uv); - bg.setIndex(index); - var currentVertex = 0; - var currentIndex = 0; - var _iteratorNormalCompletion2 = true; - var _didIteratorError2 = false; - var _iteratorError2 = undefined; - - try { - for (var _iterator2 = geometryAndMaterialIndex[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) { - var _step2$value = _step2.value, - geometry = _step2$value.geometry, - materialIndex = _step2$value.materialIndex; - var _vertexCount = geometry.getAttribute('position').count; - bg.merge(geometry, currentVertex); - var meshIndex = geometry.getIndex(); - - for (var i = 0; i < meshIndex.count; i++) { - index.setX(currentIndex + i, currentVertex + meshIndex.getX(i)); - } - - var triangleCount = meshIndex.count / 3; - - for (var _i = 0; _i < triangleCount; _i++) { - materialIndices.push(materialIndex); - } - - currentVertex += _vertexCount; - currentIndex += meshIndex.count; - } - } catch (err) { - _didIteratorError2 = true; - _iteratorError2 = err; - } finally { - try { - if (!_iteratorNormalCompletion2 && _iterator2["return"] != null) { - _iterator2["return"](); - } - } finally { - if (_didIteratorError2) { - throw _iteratorError2; - } - } - } - - return { - geometry: bg, - materialIndices: materialIndices - }; - } // Similar to buffergeometry.clone(), except we only copy - // specific attributes instead of everything - - - function cloneBufferGeometry(bufferGeometry, attributes) { - var newGeometry = new THREE$1.BufferGeometry(); - var _iteratorNormalCompletion3 = true; - var _didIteratorError3 = false; - var _iteratorError3 = undefined; - - try { - for (var _iterator3 = attributes[Symbol.iterator](), _step3; !(_iteratorNormalCompletion3 = (_step3 = _iterator3.next()).done); _iteratorNormalCompletion3 = true) { - var name = _step3.value; - var attrib = bufferGeometry.getAttribute(name); - - if (attrib) { - newGeometry.addAttribute(name, attrib.clone()); - } - } - } catch (err) { - _didIteratorError3 = true; - _iteratorError3 = err; - } finally { - try { - if (!_iteratorNormalCompletion3 && _iterator3["return"] != null) { - _iterator3["return"](); - } - } finally { - if (_didIteratorError3) { - throw _iteratorError3; - } - } - } - - var index = bufferGeometry.getIndex(); - - if (index) { - newGeometry.setIndex(index); - } - - return newGeometry; - } - - function addFlatGeometryIndices(geometry) { - var position = geometry.getAttribute('position'); - - if (!position) { - console.warn('No position attribute'); - return; - } - - var index = new Uint32Array(position.count); - - for (var i = 0; i < index.length; i++) { - index[i] = i; - } - - geometry.setIndex(new THREE$1.BufferAttribute(index, 1, false)); - return geometry; - } - // Reorders the elements in the range [first, last) in such a way that // all elements for which the comparator c returns true // precede the elements for which comparator c returns false. @@ -1097,68 +835,6 @@ return 2 * (size.x * size.z + size.x * size.y + size.z * size.y); } - // Create a piecewise 2D cumulative distribution function of light intensity from an envmap - // http://www.pbr-book.org/3ed-2018/Monte_Carlo_Integration/2D_Sampling_with_Multidimensional_Transformations.html#Piecewise-Constant2DDistributions - function envmapDistribution(image) { - var data = image.data; - var cdfImage = { - width: image.width + 2, - height: image.height + 1 - }; - var cdf = makeTextureArray(cdfImage.width, cdfImage.height, 2); - - for (var y = 0; y < image.height; y++) { - var sinTheta = Math.sin(Math.PI * (y + 0.5) / image.height); - - for (var x = 0; x < image.width; x++) { - var i = 3 * (y * image.width + x); - var r = data[i]; - var g = data[i + 1]; - var b = data[i + 2]; - var luminance = 0.2126 * r + 0.7152 * g + 0.0722 * b; - luminance *= sinTheta; - cdf.set(x + 2, y, 0, cdf.get(x + 1, y, 0) + luminance / image.width); - cdf.set(x + 1, y, 1, luminance); - } - - var rowIntegral = cdf.get(cdfImage.width - 1, y, 0); - - for (var _x = 1; _x < cdf.width; _x++) { - cdf.set(_x, y, 0, cdf.get(_x, y, 0) / rowIntegral); - cdf.set(_x, y, 1, cdf.get(_x, y, 1) / rowIntegral); - } - - cdf.set(0, y + 1, 0, cdf.get(0, y, 0) + rowIntegral / image.height); - cdf.set(0, y, 1, rowIntegral); - } - - var integral = cdf.get(0, cdf.height - 1, 0); - - for (var _y = 0; _y < cdf.height; _y++) { - cdf.set(0, _y, 0, cdf.get(0, _y, 0) / integral); - cdf.set(0, _y, 1, cdf.get(0, _y, 1) / integral); - } - - cdfImage.data = cdf.array; - return cdfImage; - } - - function makeTextureArray(width, height, channels) { - var array = new Float32Array(channels * width * height); - return { - set: function set(x, y, channel, val) { - array[channels * (y * width + x) + channel] = val; - }, - get: function get(x, y, channel) { - return array[channels * (y * width + x) + channel]; - }, - width: width, - height: height, - channels: channels, - array: array - }; - } - // Convert image data from the RGBE format to a 32-bit floating point format // See https://www.cg.tuwien.ac.at/research/theses/matkovic/node84.html for a description of the RGBE format // Optional multiplier argument for performance optimization @@ -1213,12 +889,31 @@ // Convert image data from the RGBE format to a 32-bit floating point format var DEFAULT_MAP_RESOLUTION = { - width: 4096, - height: 2048 + width: 2048, + height: 1024 }; // Tools for generating and modify env maps for lighting from scene component data - function generateEnvMapFromSceneComponents(directionalLights, environmentLights) { + function generateBackgroundMapFromSceneBackground(background) { + var backgroundImage; + + if (background.isColor) { + backgroundImage = generateSolidMap(1, 1, background); + } else if (background.encoding === THREE$1.RGBEEncoding) { + backgroundImage = { + width: background.image.width, + height: background.image.height, + data: background.image.data + }; + backgroundImage.data = rgbeToFloat(backgroundImage.data); + } + + return backgroundImage; + } + function generateEnvMapFromSceneComponents(directionalLights, ambientLights, environmentLights) { var envImage = initializeEnvMap(environmentLights); + ambientLights.forEach(function (light) { + addAmbientLightToEnvMap(light, envImage); + }); directionalLights.forEach(function (light) { envImage.data = addDirectionalLightToEnvMap(light, envImage); }); @@ -1238,20 +933,56 @@ envImage.data = rgbeToFloat(envImage.data, environmentLight.intensity); } else { // initialize blank map - envImage = generateBlankMap(DEFAULT_MAP_RESOLUTION.width, DEFAULT_MAP_RESOLUTION.height); + envImage = generateSolidMap(DEFAULT_MAP_RESOLUTION.width, DEFAULT_MAP_RESOLUTION.height); } return envImage; } - function generateBlankMap(width, height) { + function generateSolidMap(width, height, color, intensity) { var texels = width * height; var floatBuffer = new Float32Array(texels * 3); + + if (color && color.isColor) { + setBufferToColor(floatBuffer, color, intensity); + } + return { width: width, height: height, data: floatBuffer }; } + + function setBufferToColor(buffer, color) { + var intensity = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 1; + buffer.forEach(function (part, index) { + var component = index % 3; + + if (component === 0) { + buffer[index] = color.r * intensity; + } else if (component === 1) { + buffer[index] = color.g * intensity; + } else if (component === 2) { + buffer[index] = color.b * intensity; + } + }); + return buffer; + } + + function addAmbientLightToEnvMap(light, image) { + var color = light.color; + image.data.forEach(function (part, index) { + var component = index % 3; + + if (component === 0) { + image.data[index] += color.r * light.intensity; + } else if (component === 1) { + image.data[index] += color.g * light.intensity; + } else if (component === 2) { + image.data[index] += color.b * light.intensity; + } + }); + } function addDirectionalLightToEnvMap(light, image) { var sphericalCoords = new THREE$1.Spherical(); var lightDirection = light.position.clone().sub(light.target.position); @@ -1373,21 +1104,186 @@ return target; } - // retrieve textures used by meshes, grouping textures from meshes shared by *the same* mesh property - function getTexturesFromMaterials(meshes, textureNames) { - var textureMap = {}; + // Create a piecewise 2D cumulative distribution function of light intensity from an envmap + // http://www.pbr-book.org/3ed-2018/Monte_Carlo_Integration/2D_Sampling_with_Multidimensional_Transformations.html#Piecewise-Constant2DDistributions + function envmapDistribution(image) { + var data = image.data; + var cdfImage = { + width: image.width + 2, + height: image.height + 1 + }; + var cdf = makeTextureArray(cdfImage.width, cdfImage.height, 2); + + for (var y = 0; y < image.height; y++) { + var sinTheta = Math.sin(Math.PI * (y + 0.5) / image.height); + + for (var x = 0; x < image.width; x++) { + var i = 3 * (y * image.width + x); + var r = data[i]; + var g = data[i + 1]; + var b = data[i + 2]; + var luminance = 0.2126 * r + 0.7152 * g + 0.0722 * b; + luminance *= sinTheta; + cdf.set(x + 2, y, 0, cdf.get(x + 1, y, 0) + luminance / image.width); + cdf.set(x + 1, y, 1, luminance); + } + + var rowIntegral = cdf.get(cdfImage.width - 1, y, 0); + + for (var _x = 1; _x < cdf.width; _x++) { + cdf.set(_x, y, 0, cdf.get(_x, y, 0) / rowIntegral); + cdf.set(_x, y, 1, cdf.get(_x, y, 1) / rowIntegral); + } + + cdf.set(0, y + 1, 0, cdf.get(0, y, 0) + rowIntegral / image.height); + cdf.set(0, y, 1, rowIntegral); + } + + var integral = cdf.get(0, cdf.height - 1, 0); + + for (var _y = 0; _y < cdf.height; _y++) { + cdf.set(0, _y, 0, cdf.get(0, _y, 0) / integral); + cdf.set(0, _y, 1, cdf.get(0, _y, 1) / integral); + } + + cdfImage.data = cdf.array; + return cdfImage; + } + + function makeTextureArray(width, height, channels) { + var array = new Float32Array(channels * width * height); + return { + set: function set(x, y, channel, val) { + array[channels * (y * width + x) + channel] = val; + }, + get: function get(x, y, channel) { + return array[channels * (y * width + x) + channel]; + }, + width: width, + height: height, + channels: channels, + array: array + }; + } + + // Manually performs linear filtering if the extension OES_texture_float_linear is not supported + function textureLinear (defines) { + return "\n\n vec4 textureLinear(sampler2D map, vec2 uv) {\n #ifdef OES_texture_float_linear\n return texture(map, uv);\n #else\n vec2 size = vec2(textureSize(map, 0));\n vec2 texelSize = 1.0 / size;\n\n uv = uv * size - 0.5;\n vec2 f = fract(uv);\n uv = floor(uv) + 0.5;\n\n vec4 s1 = texture(map, (uv + vec2(0, 0)) * texelSize);\n vec4 s2 = texture(map, (uv + vec2(1, 0)) * texelSize);\n vec4 s3 = texture(map, (uv + vec2(0, 1)) * texelSize);\n vec4 s4 = texture(map, (uv + vec2(1, 1)) * texelSize);\n\n return mix(mix(s1, s2, f.x), mix(s3, s4, f.x), f.y);\n #endif\n }\n"; + } + + function intersect (defines) { + return "\n\nuniform highp isampler2D indices;\nuniform sampler2D positions;\nuniform sampler2D normals;\nuniform sampler2D uvs;\nuniform sampler2D bvh;\n\nuniform Materials {\n vec4 colorAndMaterialType[NUM_MATERIALS];\n vec4 roughnessMetalnessNormalScale[NUM_MATERIALS];\n\n #if defined(NUM_DIFFUSE_MAPS) || defined(NUM_NORMAL_MAPS) || defined(NUM_PBR_MAPS)\n ivec4 diffuseNormalRoughnessMetalnessMapIndex[NUM_MATERIALS];\n #endif\n\n #if defined(NUM_DIFFUSE_MAPS) || defined(NUM_NORMAL_MAPS)\n vec4 diffuseNormalMapSize[".concat(Math.max(defines.NUM_DIFFUSE_MAPS, defines.NUM_NORMAL_MAPS), "];\n #endif\n\n #if defined(NUM_PBR_MAPS)\n vec2 pbrMapSize[NUM_PBR_MAPS];\n #endif\n} materials;\n\n#ifdef NUM_DIFFUSE_MAPS\n uniform mediump sampler2DArray diffuseMap;\n#endif\n\n#ifdef NUM_NORMAL_MAPS\n uniform mediump sampler2DArray normalMap;\n#endif\n\n#ifdef NUM_PBR_MAPS\n uniform mediump sampler2DArray pbrMap;\n#endif\n\nstruct Triangle {\n vec3 p0;\n vec3 p1;\n vec3 p2;\n};\n\nvoid surfaceInteractionFromIntersection(inout SurfaceInteraction si, Triangle tri, vec3 barycentric, ivec3 index, vec3 faceNormal, int materialIndex) {\n si.hit = true;\n si.faceNormal = faceNormal;\n si.position = barycentric.x * tri.p0 + barycentric.y * tri.p1 + barycentric.z * tri.p2;\n ivec2 i0 = unpackTexel(index.x, VERTEX_COLUMNS);\n ivec2 i1 = unpackTexel(index.y, VERTEX_COLUMNS);\n ivec2 i2 = unpackTexel(index.z, VERTEX_COLUMNS);\n\n vec3 n0 = texelFetch(normals, i0, 0).xyz;\n vec3 n1 = texelFetch(normals, i1, 0).xyz;\n vec3 n2 = texelFetch(normals, i2, 0).xyz;\n si.normal = normalize(barycentric.x * n0 + barycentric.y * n1 + barycentric.z * n2);\n\n si.color = materials.colorAndMaterialType[materialIndex].xyz;\n si.roughness = materials.roughnessMetalnessNormalScale[materialIndex].x;\n si.metalness = materials.roughnessMetalnessNormalScale[materialIndex].y;\n\n si.materialType = int(materials.colorAndMaterialType[materialIndex].w);\n\n // TODO: meshId should be the actual mesh id instead of the material id, which can be shared amoung meshes.\n // This will involve storing the mesh id AND the material id in the BVH texture\n si.meshId = materialIndex + 1; // +1 so that the mesh id is never 0\n\n #if defined(NUM_DIFFUSE_MAPS) || defined(NUM_NORMAL_MAPS) || defined(NUM_PBR_MAPS)\n vec2 uv0 = texelFetch(uvs, i0, 0).xy;\n vec2 uv1 = texelFetch(uvs, i1, 0).xy;\n vec2 uv2 = texelFetch(uvs, i2, 0).xy;\n vec2 uv = fract(barycentric.x * uv0 + barycentric.y * uv1 + barycentric.z * uv2);\n #endif\n\n #ifdef NUM_DIFFUSE_MAPS\n int diffuseMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].x;\n if (diffuseMapIndex >= 0) {\n si.color *= texture(diffuseMap, vec3(uv * materials.diffuseNormalMapSize[diffuseMapIndex].xy, diffuseMapIndex)).rgb;\n }\n #endif\n\n #ifdef NUM_NORMAL_MAPS\n int normalMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].y;\n if (normalMapIndex >= 0) {\n vec2 duv02 = uv0 - uv2;\n vec2 duv12 = uv1 - uv2;\n vec3 dp02 = tri.p0 - tri.p2;\n vec3 dp12 = tri.p1 - tri.p2;\n\n // Method One\n // http://www.pbr-book.org/3ed-2018/Shapes/Triangle_Meshes.html#fragment-Computetrianglepartialderivatives-0\n // Compute tangent vectors relative to the face normal. These vectors won't necessarily be orthogonal to the smoothed normal\n // This means the TBN matrix won't be orthogonal which is technically incorrect.\n // This is Three.js's method (https://github.com/mrdoob/three.js/blob/dev/src/renderers/shaders/ShaderChunk/normalmap_pars_fragment.glsl.js)\n // --------------\n // float scale = sign(duv02.x * duv12.y - duv02.y * duv12.x);\n // vec3 dpdu = normalize((duv12.y * dp02 - duv02.y * dp12) * scale);\n // vec3 dpdv = normalize((-duv12.x * dp02 + duv02.x * dp12) * scale);\n\n // Method Two\n // Compute tangent vectors as in Method One but apply Gram-Schmidt process to make vectors orthogonal to smooth normal\n // This might inadvertently flip coordinate space orientation\n // --------------\n // float scale = sign(duv02.x * duv12.y - duv02.y * duv12.x);\n // vec3 dpdu = normalize((duv12.y * dp02 - duv02.y * dp12) * scale);\n // dpdu = (dpdu - dot(dpdu, si.normal) * si.normal); // Gram-Schmidt process\n // vec3 dpdv = cross(si.normal, dpdu) * scale;\n\n // Method Three\n // http://www.thetenthplanet.de/archives/1180\n // Compute co-tangent and co-bitangent vectors\n // These vectors are orthongal and maintain a consistent coordinate space\n // --------------\n vec3 dp12perp = cross(dp12, si.normal);\n vec3 dp02perp = cross(si.normal, dp02);\n vec3 dpdu = dp12perp * duv02.x + dp02perp * duv12.x;\n vec3 dpdv = dp12perp * duv02.y + dp02perp * duv12.y;\n float invmax = inversesqrt(max(dot(dpdu, dpdu), dot(dpdv, dpdv)));\n dpdu *= invmax;\n dpdv *= invmax;\n\n vec3 n = 2.0 * texture(normalMap, vec3(uv * materials.diffuseNormalMapSize[normalMapIndex].zw, normalMapIndex)).rgb - 1.0;\n n.xy *= materials.roughnessMetalnessNormalScale[materialIndex].zw;\n\n mat3 tbn = mat3(dpdu, dpdv, si.normal);\n\n si.normal = normalize(tbn * n);\n }\n #endif\n\n #ifdef NUM_PBR_MAPS\n int roughnessMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].z;\n int metalnessMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].w;\n if (roughnessMapIndex >= 0) {\n si.roughness *= texture(pbrMap, vec3(uv * materials.pbrMapSize[roughnessMapIndex].xy, roughnessMapIndex)).g;\n }\n if (metalnessMapIndex >= 0) {\n si.metalness *= texture(pbrMap, vec3(uv * materials.pbrMapSize[metalnessMapIndex].xy, metalnessMapIndex)).b;\n }\n #endif\n}\n\nstruct TriangleIntersect {\n float t;\n vec3 barycentric;\n};\n\n// Triangle-ray intersection\n// Faster than the classic M\xF6ller\u2013Trumbore intersection algorithm\n// http://www.pbr-book.org/3ed-2018/Shapes/Triangle_Meshes.html#TriangleIntersection\nTriangleIntersect intersectTriangle(Ray r, Triangle tri, int maxDim, vec3 shear) {\n TriangleIntersect ti;\n vec3 d = r.d;\n\n // translate vertices based on ray origin\n vec3 p0t = tri.p0 - r.o;\n vec3 p1t = tri.p1 - r.o;\n vec3 p2t = tri.p2 - r.o;\n\n // permute components of triangle vertices\n if (maxDim == 0) {\n p0t = p0t.yzx;\n p1t = p1t.yzx;\n p2t = p2t.yzx;\n } else if (maxDim == 1) {\n p0t = p0t.zxy;\n p1t = p1t.zxy;\n p2t = p2t.zxy;\n }\n\n // apply shear transformation to translated vertex positions\n p0t.xy += shear.xy * p0t.z;\n p1t.xy += shear.xy * p1t.z;\n p2t.xy += shear.xy * p2t.z;\n\n // compute edge function coefficients\n vec3 e = vec3(\n p1t.x * p2t.y - p1t.y * p2t.x,\n p2t.x * p0t.y - p2t.y * p0t.x,\n p0t.x * p1t.y - p0t.y * p1t.x\n );\n\n // check if intersection is inside triangle\n if (any(lessThan(e, vec3(0))) && any(greaterThan(e, vec3(0)))) {\n return ti;\n }\n\n float det = e.x + e.y + e.z;\n\n // not needed?\n // if (det == 0.) {\n // return ti;\n // }\n\n p0t.z *= shear.z;\n p1t.z *= shear.z;\n p2t.z *= shear.z;\n float tScaled = (e.x * p0t.z + e.y * p1t.z + e.z * p2t.z);\n\n // not needed?\n // if (sign(det) != sign(tScaled)) {\n // return ti;\n // }\n\n // check if closer intersection already exists\n if (abs(tScaled) > abs(r.tMax * det)) {\n return ti;\n }\n\n float invDet = 1. / det;\n ti.t = tScaled * invDet;\n ti.barycentric = e * invDet;\n\n return ti;\n}\n\nstruct Box {\n vec3 min;\n vec3 max;\n};\n\n// Branchless ray/box intersection\n// https://tavianator.com/fast-branchless-raybounding-box-intersections/\nfloat intersectBox(Ray r, Box b) {\n vec3 tBot = (b.min - r.o) * r.invD;\n vec3 tTop = (b.max - r.o) * r.invD;\n vec3 tNear = min(tBot, tTop);\n vec3 tFar = max(tBot, tTop);\n float t0 = max(tNear.x, max(tNear.y, tNear.z));\n float t1 = min(tFar.x, min(tFar.y, tFar.z));\n\n return (t0 > t1 || t0 > r.tMax) ? -1.0 : (t0 > 0.0 ? t0 : t1);\n}\n\nint maxDimension(vec3 v) {\n return v.x > v.y ? (v.x > v.z ? 0 : 2) : (v.y > v.z ? 1 : 2);\n}\n\n// Traverse BVH, find closest triangle intersection, and return surface information\nSurfaceInteraction intersectScene(inout Ray ray) {\n SurfaceInteraction si;\n\n int maxDim = maxDimension(abs(ray.d));\n\n // Permute space so that the z dimension is the one where the absolute value of the ray's direction is largest.\n // Then create a shear transformation that aligns ray direction with the +z axis\n vec3 shear;\n if (maxDim == 0) {\n shear = vec3(-ray.d.y, -ray.d.z, 1.0) * ray.invD.x;\n } else if (maxDim == 1) {\n shear = vec3(-ray.d.z, -ray.d.x, 1.0) * ray.invD.y;\n } else {\n shear = vec3(-ray.d.x, -ray.d.y, 1.0) * ray.invD.z;\n }\n\n int nodesToVisit[STACK_SIZE];\n int stack = 0;\n\n nodesToVisit[0] = 0;\n\n while(stack >= 0) {\n int i = nodesToVisit[stack--];\n\n vec4 r1 = fetchData(bvh, i, BVH_COLUMNS);\n vec4 r2 = fetchData(bvh, i + 1, BVH_COLUMNS);\n\n int splitAxisOrNumPrimitives = floatBitsToInt(r1.w);\n\n if (splitAxisOrNumPrimitives >= 0) {\n // Intersection is a bounding box. Test for box intersection and keep traversing BVH\n int splitAxis = splitAxisOrNumPrimitives;\n\n Box bbox = Box(r1.xyz, r2.xyz);\n\n if (intersectBox(ray, bbox) > 0.0) {\n // traverse near node to ray first, and far node to ray last\n if (ray.d[splitAxis] > 0.0) {\n nodesToVisit[++stack] = floatBitsToInt(r2.w);\n nodesToVisit[++stack] = i + 2;\n } else {\n nodesToVisit[++stack] = i + 2;\n nodesToVisit[++stack] = floatBitsToInt(r2.w);\n }\n }\n } else {\n ivec3 index = floatBitsToInt(r1.xyz);\n Triangle tri = Triangle(\n fetchData(positions, index.x, VERTEX_COLUMNS).xyz,\n fetchData(positions, index.y, VERTEX_COLUMNS).xyz,\n fetchData(positions, index.z, VERTEX_COLUMNS).xyz\n );\n TriangleIntersect hit = intersectTriangle(ray, tri, maxDim, shear);\n\n if (hit.t > 0.0) {\n ray.tMax = hit.t;\n int materialIndex = floatBitsToInt(r2.w);\n vec3 faceNormal = r2.xyz;\n surfaceInteractionFromIntersection(si, tri, hit.barycentric, index, faceNormal, materialIndex);\n }\n }\n }\n\n // Values must be clamped outside of intersection loop. Clamping inside the loop produces incorrect numbers on some devices.\n si.roughness = clamp(si.roughness, 0.03, 1.0);\n si.metalness = clamp(si.metalness, 0.0, 1.0);\n\n return si;\n}\n\nbool intersectSceneShadow(inout Ray ray) {\n int maxDim = maxDimension(abs(ray.d));\n\n // Permute space so that the z dimension is the one where the absolute value of the ray's direction is largest.\n // Then create a shear transformation that aligns ray direction with the +z axis\n vec3 shear;\n if (maxDim == 0) {\n shear = vec3(-ray.d.y, -ray.d.z, 1.0) * ray.invD.x;\n } else if (maxDim == 1) {\n shear = vec3(-ray.d.z, -ray.d.x, 1.0) * ray.invD.y;\n } else {\n shear = vec3(-ray.d.x, -ray.d.y, 1.0) * ray.invD.z;\n }\n\n int nodesToVisit[STACK_SIZE];\n int stack = 0;\n\n nodesToVisit[0] = 0;\n\n while(stack >= 0) {\n int i = nodesToVisit[stack--];\n\n vec4 r1 = fetchData(bvh, i, BVH_COLUMNS);\n vec4 r2 = fetchData(bvh, i + 1, BVH_COLUMNS);\n\n int splitAxisOrNumPrimitives = floatBitsToInt(r1.w);\n\n if (splitAxisOrNumPrimitives >= 0) {\n int splitAxis = splitAxisOrNumPrimitives;\n\n Box bbox = Box(r1.xyz, r2.xyz);\n\n if (intersectBox(ray, bbox) > 0.0) {\n if (ray.d[splitAxis] > 0.0) {\n nodesToVisit[++stack] = floatBitsToInt(r2.w);\n nodesToVisit[++stack] = i + 2;\n } else {\n nodesToVisit[++stack] = i + 2;\n nodesToVisit[++stack] = floatBitsToInt(r2.w);\n }\n }\n } else {\n ivec3 index = floatBitsToInt(r1.xyz);\n Triangle tri = Triangle(\n fetchData(positions, index.x, VERTEX_COLUMNS).xyz,\n fetchData(positions, index.y, VERTEX_COLUMNS).xyz,\n fetchData(positions, index.z, VERTEX_COLUMNS).xyz\n );\n\n if (intersectTriangle(ray, tri, maxDim, shear).t > 0.0) {\n return true;\n }\n }\n }\n\n return false;\n}\n"); + } + + function random (defines) { + return "\n\n// Noise texture used to generate a different random number for each pixel.\n// We use blue noise in particular, but any type of noise will work.\nuniform sampler2D noise;\n\nuniform float stratifiedSamples[SAMPLING_DIMENSIONS];\nuniform float strataSize;\n\n// Every time we call randomSample() in the shader, and for every call to render,\n// we want that specific bit of the shader to fetch a sample from the same position in stratifiedSamples\n// This allows us to use stratified sampling for each random variable in our path tracing\nint sampleIndex = 0;\n\nconst highp float maxUint = 1.0 / 4294967295.0;\n\nfloat pixelSeed;\n\nvoid initRandom() {\n vec2 noiseSize = vec2(textureSize(noise, 0));\n\n // tile the small noise texture across the entire screen\n pixelSeed = texture(noise, vCoord / (pixelSize * noiseSize)).r;\n}\n\nfloat randomSample() {\n float stratifiedSample = stratifiedSamples[sampleIndex++];\n\n float random = fract((stratifiedSample + pixelSeed) * strataSize); // blue noise + stratified samples\n\n // transform random number between [0, 1] to (0, 1)\n return EPS + (1.0 - 2.0 * EPS) * random;\n}\n\nvec2 randomSampleVec2() {\n return vec2(randomSample(), randomSample());\n}\n"; + } + + // Sample the environment map using a cumulative distribution function as described in + // http://www.pbr-book.org/3ed-2018/Light_Transport_I_Surface_Reflection/Sampling_Light_Sources.html#InfiniteAreaLights + function envmap (defines) { + return "\n\nuniform sampler2D envmap;\nuniform sampler2D envmapDistribution;\nuniform sampler2D backgroundMap;\n\nvec2 cartesianToEquirect(vec3 pointOnSphere) {\n float phi = mod(atan(-pointOnSphere.z, -pointOnSphere.x), TWOPI);\n float theta = acos(pointOnSphere.y);\n return vec2(phi * 0.5 * INVPI, theta * INVPI);\n}\n\nfloat getEnvmapV(float u, out int vOffset, out float pdf) {\n ivec2 size = textureSize(envmap, 0);\n\n int left = 0;\n int right = size.y + 1; // cdf length is the length of the envmap + 1\n while (left < right) {\n int mid = (left + right) >> 1;\n float s = texelFetch(envmapDistribution, ivec2(0, mid), 0).x;\n if (s <= u) {\n left = mid + 1;\n } else {\n right = mid;\n }\n }\n vOffset = left - 1;\n\n // x channel is cumulative distribution of envmap luminance\n // y channel is partial probability density of envmap luminance\n vec2 s0 = texelFetch(envmapDistribution, ivec2(0, vOffset), 0).xy;\n vec2 s1 = texelFetch(envmapDistribution, ivec2(0, vOffset + 1), 0).xy;\n\n pdf = s0.y;\n\n return (float(vOffset) + (u - s0.x) / (s1.x - s0.x)) / float(size.y);\n}\n\nfloat getEnvmapU(float u, int vOffset, out float pdf) {\n ivec2 size = textureSize(envmap, 0);\n\n int left = 0;\n int right = size.x + 1; // cdf length is the length of the envmap + 1\n while (left < right) {\n int mid = (left + right) >> 1;\n float s = texelFetch(envmapDistribution, ivec2(1 + mid, vOffset), 0).x;\n if (s <= u) {\n left = mid + 1;\n } else {\n right = mid;\n }\n }\n int uOffset = left - 1;\n\n // x channel is cumulative distribution of envmap luminance\n // y channel is partial probability density of envmap luminance\n vec2 s0 = texelFetch(envmapDistribution, ivec2(1 + uOffset, vOffset), 0).xy;\n vec2 s1 = texelFetch(envmapDistribution, ivec2(1 + uOffset + 1, vOffset), 0).xy;\n\n pdf = s0.y;\n\n return (float(uOffset) + (u - s0.x) / (s1.x - s0.x)) / float(size.x);\n}\n\n// Perform two binary searches to find light direction.\nvec3 sampleEnvmap(vec2 random, out vec2 uv, out float pdf) {\n vec2 partialPdf;\n int vOffset;\n\n uv.y = getEnvmapV(random.x, vOffset, partialPdf.y);\n uv.x = getEnvmapU(random.y, vOffset, partialPdf.x);\n\n float phi = uv.x * TWOPI;\n float theta = uv.y * PI;\n float cosTheta = cos(theta);\n float sinTheta = sin(theta);\n float cosPhi = cos(phi);\n float sinPhi = sin(phi);\n\n vec3 dir = vec3(-sinTheta * cosPhi, cosTheta, -sinTheta * sinPhi);\n\n pdf = partialPdf.x * partialPdf.y * INVPI2 / (2.0 * sinTheta);\n\n return dir;\n}\n\nfloat envmapPdf(vec2 uv) {\n vec2 size = vec2(textureSize(envmap, 0));\n\n float sinTheta = sin(uv.y * PI);\n\n uv *= size;\n\n float partialX = texelFetch(envmapDistribution, ivec2(1.0 + uv.x, uv.y), 0).y;\n float partialY = texelFetch(envmapDistribution, ivec2(0, uv.y), 0).y;\n\n return partialX * partialY * INVPI2 / (2.0 * sinTheta);\n}\n\nvec3 sampleEnvmapFromDirection(vec3 d) {\n vec2 uv = cartesianToEquirect(d);\n return textureLinear(envmap, uv).rgb;\n}\n\nvec3 sampleBackgroundFromDirection(vec3 d) {\n vec2 uv = cartesianToEquirect(d);\n return textureLinear(backgroundMap, uv).rgb;\n}\n\n"; + } + + function bsdf (defines) { + return "\n\n// Computes the exact value of the Fresnel factor\n// https://seblagarde.wordpress.com/2013/04/29/memo-on-fresnel-equations/\nfloat fresnel(float cosTheta, float eta, float invEta) {\n eta = cosTheta > 0.0 ? eta : invEta;\n cosTheta = abs(cosTheta);\n\n float gSquared = eta * eta + cosTheta * cosTheta - 1.0;\n\n if (gSquared < 0.0) {\n return 1.0;\n }\n\n float g = sqrt(gSquared);\n\n float a = (g - cosTheta) / (g + cosTheta);\n float b = (cosTheta * (g + cosTheta) - 1.0) / (cosTheta * (g - cosTheta) + 1.0);\n\n return 0.5 * a * a * (1.0 + b * b);\n}\n\nfloat fresnelSchlickWeight(float cosTheta) {\n float w = 1.0 - cosTheta;\n return (w * w) * (w * w) * w;\n}\n\n// Computes Schlick's approximation of the Fresnel factor\n// Assumes ray is moving from a less dense to a more dense medium\nfloat fresnelSchlick(float cosTheta, float r0) {\n return mix(fresnelSchlickWeight(cosTheta), 1.0, r0);\n}\n\n// Computes Schlick's approximation of Fresnel factor\n// Accounts for total internal reflection if ray is moving from a more dense to a less dense medium\nfloat fresnelSchlickTIR(float cosTheta, float r0, float ni) {\n\n // moving from a more dense to a less dense medium\n if (cosTheta < 0.0) {\n float inv_eta = ni;\n float SinT2 = inv_eta * inv_eta * (1.0f - cosTheta * cosTheta);\n if (SinT2 > 1.0) {\n return 1.0; // total internal reflection\n }\n cosTheta = sqrt(1.0f - SinT2);\n }\n\n return mix(fresnelSchlickWeight(cosTheta), 1.0, r0);\n}\n\nfloat trowbridgeReitzD(float cosTheta, float alpha2) {\n float e = cosTheta * cosTheta * (alpha2 - 1.0) + 1.0;\n return alpha2 / (PI * e * e);\n}\n\nfloat trowbridgeReitzLambda(float cosTheta, float alpha2) {\n float cos2Theta = cosTheta * cosTheta;\n float tan2Theta = (1.0 - cos2Theta) / cos2Theta;\n return 0.5 * (-1.0 + sqrt(1.0 + alpha2 * tan2Theta));\n}\n\n// An implementation of Disney's principled BRDF\n// https://disney-animation.s3.amazonaws.com/library/s2012_pbs_disney_brdf_notes_v2.pdf\nvec3 materialBrdf(SurfaceInteraction si, vec3 viewDir, vec3 lightDir, float cosThetaL, float diffuseWeight, out float pdf) {\n vec3 halfVector = normalize(viewDir + lightDir);\n\n cosThetaL = abs(cosThetaL);\n float cosThetaV = abs(dot(si.normal, viewDir));\n float cosThetaH = abs(dot(si.normal, halfVector));\n float cosThetaD = abs(dot(lightDir, halfVector));\n\n float alpha2 = (si.roughness * si.roughness) * (si.roughness * si.roughness);\n\n float F = fresnelSchlick(cosThetaD, mix(R0, 0.6, si.metalness));\n float D = trowbridgeReitzD(cosThetaH, alpha2);\n\n float roughnessRemapped = 0.5 + 0.5 * si.roughness;\n float alpha2Remapped = (roughnessRemapped * roughnessRemapped) * (roughnessRemapped * roughnessRemapped);\n\n float G = 1.0 / (1.0 + trowbridgeReitzLambda(cosThetaV, alpha2Remapped) + trowbridgeReitzLambda(cosThetaL, alpha2Remapped));\n\n float specular = F * D * G / (4.0 * cosThetaV * cosThetaL);\n float specularPdf = D * cosThetaH / (4.0 * cosThetaD);\n\n float f = -0.5 + 2.0 * cosThetaD * cosThetaD * si.roughness;\n float diffuse = diffuseWeight * INVPI * (1.0 + f * fresnelSchlickWeight(cosThetaL)) * (1.0 + f * fresnelSchlickWeight(cosThetaV));\n float diffusePdf = cosThetaL * INVPI;\n\n pdf = mix(0.5 * (specularPdf + diffusePdf), specularPdf, si.metalness);\n\n return mix(si.color * diffuse + specular, si.color * specular, si.metalness);\n}\n\n"; + } + + function sample (defines) { + return "\n\n// https://graphics.pixar.com/library/OrthonormalB/paper.pdf\nmat3 orthonormalBasis(vec3 n) {\n float zsign = n.z >= 0.0 ? 1.0 : -1.0;\n float a = -1.0 / (zsign + n.z);\n float b = n.x * n.y * a;\n vec3 s = vec3(1.0 + zsign * n.x * n.x * a, zsign * b, -zsign * n.x);\n vec3 t = vec3(b, zsign + n.y * n.y * a, -n.y);\n return mat3(s, t, n);\n}\n\n// http://www.pbr-book.org/3ed-2018/Monte_Carlo_Integration/2D_Sampling_with_Multidimensional_Transformations.html#SamplingaUnitDisk\nvec2 sampleCircle(vec2 p) {\n p = 2.0 * p - 1.0;\n\n bool greater = abs(p.x) > abs(p.y);\n\n float r = greater ? p.x : p.y;\n float theta = greater ? 0.25 * PI * p.y / p.x : PI * (0.5 - 0.25 * p.x / p.y);\n\n return r * vec2(cos(theta), sin(theta));\n}\n\n// http://www.pbr-book.org/3ed-2018/Monte_Carlo_Integration/2D_Sampling_with_Multidimensional_Transformations.html#Cosine-WeightedHemisphereSampling\nvec3 cosineSampleHemisphere(vec2 p) {\n vec2 h = sampleCircle(p);\n float z = sqrt(max(0.0, 1.0 - h.x * h.x - h.y * h.y));\n return vec3(h, z);\n}\n\n\n// http://www.pbr-book.org/3ed-2018/Light_Transport_I_Surface_Reflection/Sampling_Reflection_Functions.html#MicrofacetBxDFs\n// Instead of Beckmann distrubtion, we use the GTR2 (GGX) distrubtion as covered in Disney's Principled BRDF paper\nvec3 lightDirSpecular(vec3 faceNormal, vec3 viewDir, mat3 basis, float roughness, vec2 random) {\n float phi = TWOPI * random.y;\n float alpha = roughness * roughness;\n float cosTheta = sqrt((1.0 - random.x) / (1.0 + (alpha * alpha - 1.0) * random.x));\n float sinTheta = sqrt(1.0 - cosTheta * cosTheta);\n\n vec3 halfVector = basis * sign(dot(faceNormal, viewDir)) * vec3(sinTheta * cos(phi), sinTheta * sin(phi), cosTheta);\n\n vec3 lightDir = reflect(-viewDir, halfVector);\n\n return lightDir;\n}\n\nvec3 lightDirDiffuse(vec3 faceNormal, vec3 viewDir, mat3 basis, vec2 random) {\n return basis * sign(dot(faceNormal, viewDir)) * cosineSampleHemisphere(random);\n}\n\nfloat powerHeuristic(float f, float g) {\n return (f * f) / (f * f + g * g);\n}\n\n"; + } + + // Estimate the direct lighting integral using multiple importance sampling + // http://www.pbr-book.org/3ed-2018/Light_Transport_I_Surface_Reflection/Direct_Lighting.html#EstimatingtheDirectLightingIntegral + function sampleMaterial (defines) { + return "\n\nvec3 importanceSampleLight(SurfaceInteraction si, vec3 viewDir, bool lastBounce, vec2 random) {\n vec3 li;\n\n float lightPdf;\n vec2 uv;\n vec3 lightDir = sampleEnvmap(random, uv, lightPdf);\n\n float cosThetaL = dot(si.normal, lightDir);\n\n float orientation = dot(si.faceNormal, viewDir) * cosThetaL;\n if (orientation < 0.0) {\n return li;\n }\n\n float diffuseWeight = 1.0;\n Ray ray;\n initRay(ray, si.position + EPS * lightDir, lightDir);\n if (intersectSceneShadow(ray)) {\n if (lastBounce) {\n diffuseWeight = 0.0;\n } else {\n return li;\n }\n }\n\n vec3 irr = textureLinear(envmap, uv).xyz;\n\n float scatteringPdf;\n vec3 brdf = materialBrdf(si, viewDir, lightDir, cosThetaL, diffuseWeight, scatteringPdf);\n\n float weight = powerHeuristic(lightPdf, scatteringPdf);\n\n li = brdf * irr * abs(cosThetaL) * weight / lightPdf;\n\n return li;\n}\n\nvec3 importanceSampleMaterial(SurfaceInteraction si, vec3 viewDir, bool lastBounce, vec3 lightDir) {\n vec3 li;\n\n float cosThetaL = dot(si.normal, lightDir);\n\n float orientation = dot(si.faceNormal, viewDir) * cosThetaL;\n if (orientation < 0.0) {\n return li;\n }\n\n float diffuseWeight = 1.0;\n Ray ray;\n initRay(ray, si.position + EPS * lightDir, lightDir);\n if (intersectSceneShadow(ray)) {\n if (lastBounce) {\n diffuseWeight = 0.0;\n } else {\n return li;\n }\n }\n\n vec2 uv = cartesianToEquirect(lightDir);\n\n float lightPdf = envmapPdf(uv);\n\n vec3 irr = textureLinear(envmap, uv).rgb;\n\n float scatteringPdf;\n vec3 brdf = materialBrdf(si, viewDir, lightDir, cosThetaL, diffuseWeight, scatteringPdf);\n\n float weight = powerHeuristic(scatteringPdf, lightPdf);\n\n li += brdf * irr * abs(cosThetaL) * weight / scatteringPdf;\n\n return li;\n}\n\nvoid sampleMaterial(SurfaceInteraction si, int bounce, inout Path path) {\n mat3 basis = orthonormalBasis(si.normal);\n vec3 viewDir = -path.ray.d;\n\n vec2 diffuseOrSpecular = randomSampleVec2();\n\n vec3 lightDir = diffuseOrSpecular.x < mix(0.5, 0.0, si.metalness) ?\n lightDirDiffuse(si.faceNormal, viewDir, basis, randomSampleVec2()) :\n lightDirSpecular(si.faceNormal, viewDir, basis, si.roughness, randomSampleVec2());\n\n bool lastBounce = bounce == BOUNCES;\n\n // Add path contribution\n path.li += path.beta * (\n importanceSampleLight(si, viewDir, lastBounce, randomSampleVec2()) +\n importanceSampleMaterial(si, viewDir, lastBounce, lightDir)\n );\n\n // Get new path direction\n\n lightDir = diffuseOrSpecular.y < mix(0.5, 0.0, si.metalness) ?\n lightDirDiffuse(si.faceNormal, viewDir, basis, randomSampleVec2()) :\n lightDirSpecular(si.faceNormal, viewDir, basis, si.roughness, randomSampleVec2());\n\n float cosThetaL = dot(si.normal, lightDir);\n\n float scatteringPdf;\n vec3 brdf = materialBrdf(si, viewDir, lightDir, cosThetaL, 1.0, scatteringPdf);\n\n path.beta *= abs(cosThetaL) * brdf / scatteringPdf;\n\n initRay(path.ray, si.position + EPS * lightDir, lightDir);\n\n // If new ray direction is pointing into the surface,\n // the light path is physically impossible and we terminate the path.\n float orientation = dot(si.faceNormal, viewDir) * cosThetaL;\n path.abort = orientation < 0.0;\n\n path.specularBounce = false;\n}\n\n"; + } + + function sampleShadowCatcher (defines) { + return "\n\n#ifdef USE_SHADOW_CATCHER\n\nfloat importanceSampleLightShadowCatcher(SurfaceInteraction si, vec3 viewDir, vec2 random, inout float alpha) {\n float li;\n\n float lightPdf;\n vec2 uv;\n vec3 lightDir = sampleEnvmap(random, uv, lightPdf);\n\n float cosThetaL = dot(si.normal, lightDir);\n\n float orientation = dot(si.faceNormal, viewDir) * cosThetaL;\n if (orientation < 0.0) {\n return li;\n }\n\n float occluded = 1.0;\n\n Ray ray;\n initRay(ray, si.position + EPS * lightDir, lightDir);\n if (intersectSceneShadow(ray)) {\n occluded = 0.0;\n }\n\n float irr = dot(luminance, textureLinear(envmap, uv).rgb);\n\n // lambertian BRDF\n float brdf = INVPI;\n float scatteringPdf = abs(cosThetaL) * INVPI;\n\n float weight = powerHeuristic(lightPdf, scatteringPdf);\n\n float lightEq = irr * brdf * abs(cosThetaL) * weight / lightPdf;\n\n alpha += lightEq;\n li += occluded * lightEq;\n\n return li;\n}\n\nfloat importanceSampleMaterialShadowCatcher(SurfaceInteraction si, vec3 viewDir, vec3 lightDir, inout float alpha) {\n float li;\n\n float cosThetaL = dot(si.normal, lightDir);\n\n float orientation = dot(si.faceNormal, viewDir) * cosThetaL;\n if (orientation < 0.0) {\n return li;\n }\n\n float occluded = 1.0;\n\n Ray ray;\n initRay(ray, si.position + EPS * lightDir, lightDir);\n if (intersectSceneShadow(ray)) {\n occluded = 0.0;\n }\n\n vec2 uv = cartesianToEquirect(lightDir);\n\n float lightPdf = envmapPdf(uv);\n\n float irr = dot(luminance, textureLinear(envmap, uv).rgb);\n\n // lambertian BRDF\n float brdf = INVPI;\n float scatteringPdf = abs(cosThetaL) * INVPI;\n\n float weight = powerHeuristic(scatteringPdf, lightPdf);\n\n float lightEq = irr * brdf * abs(cosThetaL) * weight / scatteringPdf;\n\n alpha += lightEq;\n li += occluded * lightEq;\n\n return li;\n}\n\nvoid sampleShadowCatcher(SurfaceInteraction si, int bounce, inout Path path) {\n mat3 basis = orthonormalBasis(si.normal);\n vec3 viewDir = -path.ray.d;\n vec3 color = bounce > 1 && !path.specularBounce ? sampleEnvmapFromDirection(-viewDir) : sampleBackgroundFromDirection(-viewDir);\n\n vec3 lightDir = lightDirDiffuse(si.faceNormal, viewDir, basis, randomSampleVec2());\n\n float alphaBounce = 0.0;\n\n vec3 li = path.beta * color * (\n importanceSampleLightShadowCatcher(si, viewDir, randomSampleVec2(), alphaBounce) +\n importanceSampleMaterialShadowCatcher(si, viewDir, lightDir, alphaBounce)\n );\n\n // alphaBounce contains the lighting of the shadow catcher *without* shadows\n alphaBounce = alphaBounce == 0.0 ? 1.0 : alphaBounce;\n\n // in post processing step, we divide by alpha to obtain the percentage of light relative to shadow for the shadow catcher\n path.alpha *= alphaBounce;\n\n // we only want the alpha division to affect the shadow catcher\n // factor in alpha to the previous light, so that dividing by alpha with the previous light cancels out this contribution\n path.li *= alphaBounce;\n\n // add path contribution\n path.li += li;\n\n // Get new path direction\n\n lightDir = lightDirDiffuse(si.faceNormal, viewDir, basis, randomSampleVec2());\n\n float cosThetaL = dot(si.normal, lightDir);\n\n // lambertian brdf with terms cancelled\n path.beta *= color;\n\n initRay(path.ray, si.position + EPS * lightDir, lightDir);\n\n // If new ray direction is pointing into the surface,\n // the light path is physically impossible and we terminate the path.\n float orientation = dot(si.faceNormal, viewDir) * cosThetaL;\n path.abort = orientation < 0.0;\n\n path.specularBounce = false;\n\n // advance dimension index by unused stratified samples\n const int usedSamples = 6;\n sampleIndex += SAMPLES_PER_MATERIAL - usedSamples;\n}\n\n#endif\n"; + } + + function sampleGlass (defines) { + return "\n\n#ifdef USE_GLASS\n\nvoid sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) {\n vec3 viewDir = -path.ray.d;\n float cosTheta = dot(si.normal, viewDir);\n\n float F = si.materialType == THIN_GLASS ?\n fresnelSchlick(abs(cosTheta), R0) : // thin glass\n fresnelSchlickTIR(cosTheta, R0, IOR); // thick glass\n\n vec3 lightDir;\n\n float reflectionOrRefraction = randomSample();\n\n if (reflectionOrRefraction < F) {\n lightDir = reflect(-viewDir, si.normal);\n } else {\n lightDir = si.materialType == THIN_GLASS ?\n refract(-viewDir, sign(cosTheta) * si.normal, INV_IOR_THIN) : // thin glass\n refract(-viewDir, sign(cosTheta) * si.normal, cosTheta < 0.0 ? IOR : INV_IOR); // thick glass\n path.beta *= si.color;\n }\n\n initRay(path.ray, si.position + EPS * lightDir, lightDir);\n\n // advance sample index by unused stratified samples\n const int usedSamples = 1;\n sampleIndex += SAMPLES_PER_MATERIAL - usedSamples;\n\n path.li += bounce == BOUNCES ? path.beta * sampleBackgroundFromDirection(lightDir) : vec3(0.0);\n}\n\n#endif\n\n"; + } + + function unrollLoop(indexName, start, limit, step, code) { + var unrolled = "int ".concat(indexName, ";\n"); + + for (var i = start; step > 0 && i < limit || step < 0 && i > limit; i += step) { + unrolled += "".concat(indexName, " = ").concat(i, ";\n"); + unrolled += code; + } + + return unrolled; + } + function addDefines(params) { + var defines = ''; + + for (var _i = 0, _Object$entries = Object.entries(params); _i < _Object$entries.length; _i++) { + var _Object$entries$_i = _slicedToArray(_Object$entries[_i], 2), + name = _Object$entries$_i[0], + value = _Object$entries$_i[1]; + + // don't define falsy values such as false, 0, and ''. + // this adds support for #ifdef on falsy values + if (value) { + defines += "#define ".concat(name, " ").concat(value, "\n"); + } + } + + return defines; + } + + function fragString (_ref) { + var rayTracingRenderTargets = _ref.rayTracingRenderTargets, + defines = _ref.defines; + return "#version 300 es\n\nprecision mediump float;\nprecision mediump int;\n\n".concat(addDefines(defines), "\n\n").concat(rayTracingRenderTargets.set(), "\n\n#define PI 3.14159265359\n#define TWOPI 6.28318530718\n#define INVPI 0.31830988618\n#define INVPI2 0.10132118364\n#define EPS 0.0005\n#define INF 1.0e999\n#define RAY_MAX_DISTANCE 9999.0\n\n#define STANDARD 0\n#define THIN_GLASS 1\n#define THICK_GLASS 2\n#define SHADOW_CATCHER 3\n\n#define SAMPLES_PER_MATERIAL 8\n\nconst float IOR = 1.5;\nconst float INV_IOR = 1.0 / IOR;\n\nconst float IOR_THIN = 1.015;\nconst float INV_IOR_THIN = 1.0 / IOR_THIN;\n\nconst float R0 = (1.0 - IOR) * (1.0 - IOR) / ((1.0 + IOR) * (1.0 + IOR));\n\n// https://www.w3.org/WAI/GL/wiki/Relative_luminance\nconst vec3 luminance = vec3(0.2126, 0.7152, 0.0722);\n\nstruct Ray {\n vec3 o;\n vec3 d;\n vec3 invD;\n float tMax;\n};\n\nstruct SurfaceInteraction {\n bool hit;\n vec3 position;\n vec3 normal; // smoothed normal from the three triangle vertices\n vec3 faceNormal; // normal of the triangle\n vec3 color;\n float roughness;\n float metalness;\n int materialType;\n int meshId;\n};\n\nstruct Camera {\n mat4 transform;\n float aspect;\n float fov;\n float focus;\n float aperture;\n};\n\nuniform Camera camera;\nuniform vec2 pixelSize; // 1 / screenResolution\nuniform vec2 jitter;\n\nin vec2 vCoord;\n\nvoid initRay(inout Ray ray, vec3 origin, vec3 direction) {\n ray.o = origin;\n ray.d = direction;\n ray.invD = 1.0 / ray.d;\n ray.tMax = RAY_MAX_DISTANCE;\n}\n\n// given the index from a 1D array, retrieve corresponding position from packed 2D texture\nivec2 unpackTexel(int i, int columnsLog2) {\n ivec2 u;\n u.y = i >> columnsLog2; // equivalent to (i / 2^columnsLog2)\n u.x = i - (u.y << columnsLog2); // equivalent to (i % 2^columnsLog2)\n return u;\n}\n\nvec4 fetchData(sampler2D s, int i, int columnsLog2) {\n return texelFetch(s, unpackTexel(i, columnsLog2), 0);\n}\n\nivec4 fetchData(isampler2D s, int i, int columnsLog2) {\n return texelFetch(s, unpackTexel(i, columnsLog2), 0);\n}\n\nstruct Path {\n Ray ray;\n vec3 li;\n vec3 albedo;\n float alpha;\n vec3 beta;\n bool specularBounce;\n bool abort;\n};\n\n").concat(textureLinear(), "\n").concat(intersect(defines), "\n").concat(random(), "\n").concat(envmap(), "\n").concat(bsdf(), "\n").concat(sample(), "\n").concat(sampleMaterial(), "\n").concat(sampleGlass(), "\n").concat(sampleShadowCatcher(), "\n\nvoid bounce(inout Path path, int i, inout SurfaceInteraction si) {\n if (path.abort) {\n return;\n }\n\n si = intersectScene(path.ray);\n\n if (!si.hit) {\n if (path.specularBounce) {\n path.li += path.beta * sampleBackgroundFromDirection(path.ray.d);\n }\n\n path.abort = true;\n } else {\n #ifdef USE_GLASS\n if (si.materialType == THIN_GLASS || si.materialType == THICK_GLASS) {\n sampleGlassSpecular(si, i, path);\n }\n #endif\n #ifdef USE_SHADOW_CATCHER\n if (si.materialType == SHADOW_CATCHER) {\n sampleShadowCatcher(si, i, path);\n }\n #endif\n if (si.materialType == STANDARD) {\n sampleMaterial(si, i, path);\n }\n\n // Russian Roulette sampling\n if (i >= 2) {\n float q = 1.0 - dot(path.beta, luminance);\n if (randomSample() < q) {\n path.abort = true;\n }\n path.beta /= 1.0 - q;\n }\n }\n}\n\n// Path tracing integrator as described in\n// http://www.pbr-book.org/3ed-2018/Light_Transport_I_Surface_Reflection/Path_Tracing.html#\nvec4 integrator(inout Ray ray, inout SurfaceInteraction si) {\n Path path;\n path.ray = ray;\n path.li = vec3(0);\n path.alpha = 1.0;\n path.beta = vec3(1.0);\n path.specularBounce = true;\n path.abort = false;\n\n bounce(path, 1, si);\n\n SurfaceInteraction indirectSi;\n\n // Manually unroll for loop.\n // Some hardware fails to interate over a GLSL loop, so we provide this workaround\n // for (int i = 1; i < defines.bounces + 1, i += 1)\n // equivelant to\n ").concat(unrollLoop('i', 2, defines.BOUNCES + 1, 1, "\n bounce(path, i, indirectSi);\n "), "\n\n return vec4(path.li, path.alpha);\n}\n\nvoid main() {\n initRandom();\n\n vec2 vCoordAntiAlias = vCoord + jitter;\n\n vec3 direction = normalize(vec3(vCoordAntiAlias - 0.5, -1.0) * vec3(camera.aspect, 1.0, camera.fov));\n\n // Thin lens model with depth-of-field\n // http://www.pbr-book.org/3ed-2018/Camera_Models/Projective_Camera_Models.html#TheThinLensModelandDepthofField\n // vec2 lensPoint = camera.aperture * sampleCircle(randomSampleVec2());\n // vec3 focusPoint = -direction * camera.focus / direction.z; // intersect ray direction with focus plane\n\n // vec3 origin = vec3(lensPoint, 0.0);\n // direction = normalize(focusPoint - origin);\n\n // origin = vec3(camera.transform * vec4(origin, 1.0));\n // direction = mat3(camera.transform) * direction;\n\n vec3 origin = camera.transform[3].xyz;\n direction = mat3(camera.transform) * direction;\n\n Ray cam;\n initRay(cam, origin, direction);\n\n SurfaceInteraction si;\n\n vec4 liAndAlpha = integrator(cam, si);\n\n if (dot(si.position, si.position) == 0.0) {\n si.position = origin + direction * RAY_MAX_DISTANCE;\n }\n\n if (!(liAndAlpha.x < INF && liAndAlpha.x > -EPS)) {\n liAndAlpha = vec4(0, 0, 0, 1);\n }\n\n out_light = liAndAlpha;\n out_position = vec4(si.position, si.meshId);\n\n // Stratified Sampling Sample Count Test\n // ---------------\n // Uncomment the following code\n // Then observe the colors of the image\n // If:\n // * The resulting image is pure black\n // Extra samples are being passed to the shader that aren't being used.\n // * The resulting image contains red\n // Not enough samples are being passed to the shader\n // * The resulting image contains only white with some black\n // All samples are used by the shader. Correct result!\n\n // fragColor = vec4(0, 0, 0, 1);\n // if (sampleIndex == SAMPLING_DIMENSIONS) {\n // fragColor = vec4(1, 1, 1, 1);\n // } else if (sampleIndex > SAMPLING_DIMENSIONS) {\n // fragColor = vec4(1, 0, 0, 1);\n // }\n}\n"); + } + + function mergeMeshesToGeometry(meshes) { + var vertexCount = 0; + var indexCount = 0; + var geometryAndMaterialIndex = []; + var materialIndexMap = new Map(); var _iteratorNormalCompletion = true; var _didIteratorError = false; var _iteratorError = undefined; try { - for (var _iterator = textureNames[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) { - var name = _step.value; - var textures = []; - textureMap[name] = { - indices: texturesFromMaterials(meshes, name, textures), - textures: textures - }; + for (var _iterator = meshes[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) { + var mesh = _step.value; + + var _geometry = cloneBufferGeometry(mesh.geometry, ['position', 'normal', 'uv']); + + var index = _geometry.getIndex(); + + if (!index) { + addFlatGeometryIndices(_geometry); + } + + _geometry.applyMatrix(mesh.matrixWorld); + + if (!_geometry.getAttribute('normal')) { + _geometry.computeVertexNormals(); + } else { + _geometry.normalizeNormals(); + } + + vertexCount += _geometry.getAttribute('position').count; + indexCount += _geometry.getIndex().count; + var material = mesh.material; + var materialIndex = materialIndexMap.get(material); + + if (materialIndex === undefined) { + materialIndex = materialIndexMap.size; + materialIndexMap.set(material, materialIndex); + } + + geometryAndMaterialIndex.push({ + geometry: _geometry, + materialIndex: materialIndex + }); } } catch (err) { _didIteratorError = true; @@ -1404,22 +1300,55 @@ } } - return textureMap; - } // retrieve textures used by meshes, grouping textures from meshes shared *across all* mesh properties + var _mergeGeometry = mergeGeometry(geometryAndMaterialIndex, vertexCount, indexCount), + geometry = _mergeGeometry.geometry, + materialIndices = _mergeGeometry.materialIndices; - function mergeTexturesFromMaterials(meshes, textureNames) { - var textureMap = { - textures: [], - indices: {} + return { + geometry: geometry, + materialIndices: materialIndices, + materials: Array.from(materialIndexMap.keys()) }; + } + + function mergeGeometry(geometryAndMaterialIndex, vertexCount, indexCount) { + var position = new THREE$1.BufferAttribute(new Float32Array(3 * vertexCount), 3, false); + var normal = new THREE$1.BufferAttribute(new Float32Array(3 * vertexCount), 3, false); + var uv = new THREE$1.BufferAttribute(new Float32Array(2 * vertexCount), 2, false); + var index = new THREE$1.BufferAttribute(new Uint32Array(indexCount), 1, false); + var materialIndices = []; + var bg = new THREE$1.BufferGeometry(); + bg.addAttribute('position', position); + bg.addAttribute('normal', normal); + bg.addAttribute('uv', uv); + bg.setIndex(index); + var currentVertex = 0; + var currentIndex = 0; var _iteratorNormalCompletion2 = true; var _didIteratorError2 = false; var _iteratorError2 = undefined; try { - for (var _iterator2 = textureNames[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) { - var name = _step2.value; - textureMap.indices[name] = texturesFromMaterials(meshes, name, textureMap.textures); + for (var _iterator2 = geometryAndMaterialIndex[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) { + var _step2$value = _step2.value, + geometry = _step2$value.geometry, + materialIndex = _step2$value.materialIndex; + var _vertexCount = geometry.getAttribute('position').count; + bg.merge(geometry, currentVertex); + var meshIndex = geometry.getIndex(); + + for (var i = 0; i < meshIndex.count; i++) { + index.setX(currentIndex + i, currentVertex + meshIndex.getX(i)); + } + + var triangleCount = meshIndex.count / 3; + + for (var _i = 0; _i < triangleCount; _i++) { + materialIndices.push(materialIndex); + } + + currentVertex += _vertexCount; + currentIndex += meshIndex.count; } } catch (err) { _didIteratorError2 = true; @@ -1436,38 +1365,27 @@ } } - return textureMap; - } + return { + geometry: bg, + materialIndices: materialIndices + }; + } // Similar to buffergeometry.clone(), except we only copy + // specific attributes instead of everything - function texturesFromMaterials(materials, textureName, textures) { - var indices = []; + + function cloneBufferGeometry(bufferGeometry, attributes) { + var newGeometry = new THREE$1.BufferGeometry(); var _iteratorNormalCompletion3 = true; var _didIteratorError3 = false; var _iteratorError3 = undefined; try { - for (var _iterator3 = materials[Symbol.iterator](), _step3; !(_iteratorNormalCompletion3 = (_step3 = _iterator3.next()).done); _iteratorNormalCompletion3 = true) { - var material = _step3.value; - - if (!material[textureName]) { - indices.push(-1); - } else { - var index = textures.length; - - for (var i = 0; i < textures.length; i++) { - if (textures[i] === material[textureName]) { - // Reuse existing duplicate texture. - index = i; - break; - } - } - - if (index === textures.length) { - // New texture. Add texture to list. - textures.push(material[textureName]); - } + for (var _iterator3 = attributes[Symbol.iterator](), _step3; !(_iteratorNormalCompletion3 = (_step3 = _iterator3.next()).done); _iteratorNormalCompletion3 = true) { + var name = _step3.value; + var attrib = bufferGeometry.getAttribute(name); - indices.push(index); + if (attrib) { + newGeometry.addAttribute(name, attrib.clone()); } } } catch (err) { @@ -1485,7 +1403,173 @@ } } - return indices; + var index = bufferGeometry.getIndex(); + + if (index) { + newGeometry.setIndex(index); + } + + return newGeometry; + } + + function addFlatGeometryIndices(geometry) { + var position = geometry.getAttribute('position'); + + if (!position) { + console.warn('No position attribute'); + return; + } + + var index = new Uint32Array(position.count); + + for (var i = 0; i < index.length; i++) { + index[i] = i; + } + + geometry.setIndex(new THREE$1.BufferAttribute(index, 1, false)); + return geometry; + } + + /* + Stratified Sampling + http://www.pbr-book.org/3ed-2018/Sampling_and_Reconstruction/Stratified_Sampling.html + + Repeatedly sampling random numbers between [0, 1) has the effect of producing numbers that are coincidentally clustered together, + instead of being evenly spaced across the domain. + This produces low quality results for the path tracer since clustered samples send too many rays in similar directions. + + We can reduce the amount of clustering of random numbers by using stratified sampling. + Stratification divides the [0, 1) range into partitions, or stratum, of equal size. + Each invocation of the stratified sampler draws one uniform random number from one stratum from a shuffled sequence of stratums. + When every stratum has been sampled once, this sequence is shuffled again and the process repeats. + + The returned sample ranges between [0, numberOfStratum). + The integer part ideintifies the stratum (the first stratum being 0). + The fractional part is the random number. + + To obtain the stratified sample between [0, 1), divide the returned sample by the stratum count. + */ + function makeStratifiedSampler(strataCount, dimensions) { + var strata = []; + var l = Math.pow(strataCount, dimensions); + + for (var i = 0; i < l; i++) { + strata[i] = i; + } + + var index = strata.length; + var sample = []; + + function restart() { + index = 0; + } + + function next() { + if (index >= strata.length) { + shuffle(strata); + restart(); + } + + var stratum = strata[index++]; + + for (var _i = 0; _i < dimensions; _i++) { + sample[_i] = stratum % strataCount + Math.random(); + stratum = Math.floor(stratum / strataCount); + } + + return sample; + } + + return { + next: next, + restart: restart, + strataCount: strataCount + }; + } + + /* + Stratified Sampling + http://www.pbr-book.org/3ed-2018/Sampling_and_Reconstruction/Stratified_Sampling.html + + It is computationally unfeasible to compute stratified sampling for large dimensions (>2) + Instead, we can compute stratified sampling for lower dimensional patterns that sum to the high dimension + e.g. instead of sampling a 6D domain, we sample a 2D + 2D + 2D domain. + This reaps many benefits of stratification while still allowing for small strata sizes. + */ + function makeStratifiedSamplerCombined(strataCount, listOfDimensions) { + var strataObjs = []; + var _iteratorNormalCompletion = true; + var _didIteratorError = false; + var _iteratorError = undefined; + + try { + for (var _iterator = listOfDimensions[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) { + var dim = _step.value; + strataObjs.push(makeStratifiedSampler(strataCount, dim)); + } + } catch (err) { + _didIteratorError = true; + _iteratorError = err; + } finally { + try { + if (!_iteratorNormalCompletion && _iterator["return"] != null) { + _iterator["return"](); + } + } finally { + if (_didIteratorError) { + throw _iteratorError; + } + } + } + + var combined = []; + + function next() { + var i = 0; + + for (var _i = 0, _strataObjs = strataObjs; _i < _strataObjs.length; _i++) { + var strata = _strataObjs[_i]; + var nums = strata.next(); + var _iteratorNormalCompletion2 = true; + var _didIteratorError2 = false; + var _iteratorError2 = undefined; + + try { + for (var _iterator2 = nums[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) { + var num = _step2.value; + combined[i++] = num; + } + } catch (err) { + _didIteratorError2 = true; + _iteratorError2 = err; + } finally { + try { + if (!_iteratorNormalCompletion2 && _iterator2["return"] != null) { + _iterator2["return"](); + } + } finally { + if (_didIteratorError2) { + throw _iteratorError2; + } + } + } + } + + return combined; + } + + function restart() { + for (var _i2 = 0, _strataObjs2 = strataObjs; _i2 < _strataObjs2.length; _i2++) { + var strata = _strataObjs2[_i2]; + strata.restart(); + } + } + + return { + next: next, + restart: restart, + strataCount: strataCount + }; } function makeTexture(gl, params) { @@ -1495,6 +1579,8 @@ height = _params$height === void 0 ? null : _params$height, _params$data = params.data, data = _params$data === void 0 ? null : _params$data, + _params$length = params.length, + length = _params$length === void 0 ? 1 : _params$length, _params$channels = params.channels, channels = _params$channels === void 0 ? null : _params$channels, _params$storage = params.storage, @@ -1520,11 +1606,9 @@ if (Array.isArray(data)) { dataArray = data; data = dataArray[0]; - target = gl.TEXTURE_2D_ARRAY; - } else { - target = gl.TEXTURE_2D; } + target = dataArray || length > 1 ? gl.TEXTURE_2D_ARRAY : gl.TEXTURE_2D; gl.activeTexture(gl.TEXTURE0); gl.bindTexture(target, texture); gl.texParameteri(target, gl.TEXTURE_WRAP_S, wrapS); @@ -1564,9 +1648,16 @@ gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, Array.isArray(flipY) ? flipY[i] : flipY); gl.texSubImage3D(target, 0, 0, 0, i, layerWidth, layerHeight, 1, format, type, dataArray[i]); } + } else if (length > 1) { + // create empty array texture + gl.texStorage3D(target, 1, internalFormat, width, height, length); } else { gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, flipY); - gl.texImage2D(target, 0, internalFormat, width, height, 0, format, type, data); + gl.texStorage2D(target, 1, internalFormat, width, height); + + if (data) { + gl.texSubImage2D(target, 0, 0, 0, width, height, format, type, data); + } } // return state to default @@ -1577,6 +1668,121 @@ }; } + // retrieve textures used by meshes, grouping textures from meshes shared by *the same* mesh property + function getTexturesFromMaterials(meshes, textureNames) { + var textureMap = {}; + var _iteratorNormalCompletion = true; + var _didIteratorError = false; + var _iteratorError = undefined; + + try { + for (var _iterator = textureNames[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) { + var name = _step.value; + var textures = []; + textureMap[name] = { + indices: texturesFromMaterials(meshes, name, textures), + textures: textures + }; + } + } catch (err) { + _didIteratorError = true; + _iteratorError = err; + } finally { + try { + if (!_iteratorNormalCompletion && _iterator["return"] != null) { + _iterator["return"](); + } + } finally { + if (_didIteratorError) { + throw _iteratorError; + } + } + } + + return textureMap; + } // retrieve textures used by meshes, grouping textures from meshes shared *across all* mesh properties + + function mergeTexturesFromMaterials(meshes, textureNames) { + var textureMap = { + textures: [], + indices: {} + }; + var _iteratorNormalCompletion2 = true; + var _didIteratorError2 = false; + var _iteratorError2 = undefined; + + try { + for (var _iterator2 = textureNames[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) { + var name = _step2.value; + textureMap.indices[name] = texturesFromMaterials(meshes, name, textureMap.textures); + } + } catch (err) { + _didIteratorError2 = true; + _iteratorError2 = err; + } finally { + try { + if (!_iteratorNormalCompletion2 && _iterator2["return"] != null) { + _iterator2["return"](); + } + } finally { + if (_didIteratorError2) { + throw _iteratorError2; + } + } + } + + return textureMap; + } + + function texturesFromMaterials(materials, textureName, textures) { + var indices = []; + var _iteratorNormalCompletion3 = true; + var _didIteratorError3 = false; + var _iteratorError3 = undefined; + + try { + for (var _iterator3 = materials[Symbol.iterator](), _step3; !(_iteratorNormalCompletion3 = (_step3 = _iterator3.next()).done); _iteratorNormalCompletion3 = true) { + var material = _step3.value; + + if (!material[textureName]) { + indices.push(-1); + } else { + var index = textures.length; + + for (var i = 0; i < textures.length; i++) { + if (textures[i] === material[textureName]) { + // Reuse existing duplicate texture. + index = i; + break; + } + } + + if (index === textures.length) { + // New texture. Add texture to list. + textures.push(material[textureName]); + } + + indices.push(index); + } + } + } catch (err) { + _didIteratorError3 = true; + _iteratorError3 = err; + } finally { + try { + if (!_iteratorNormalCompletion3 && _iterator3["return"] != null) { + _iterator3["return"](); + } + } finally { + if (_didIteratorError3) { + throw _iteratorError3; + } + } + } + + return indices; + } + // Packs different arrays into vec4's to take advantage of GLSL's std140 memory layout function uploadBuffers(gl, program, bufferData) { @@ -1655,170 +1861,69 @@ function interleave() { for (var _len = arguments.length, arrays = new Array(_len), _key = 0; _key < _len; _key++) { arrays[_key] = arguments[_key]; - } - - var maxLength = arrays.reduce(function (m, a) { - return Math.max(m, a.data.length / a.channels); - }, 0); - var interleaved = []; - - for (var i = 0; i < maxLength; i++) { - for (var j = 0; j < arrays.length; j++) { - var _arrays$j = arrays[j], - data = _arrays$j.data, - channels = _arrays$j.channels; - - for (var c = 0; c < channels; c++) { - interleaved.push(data[i * channels + c]); - } - } - } - - return interleaved; - } - - /* - Stratified Sampling - http://www.pbr-book.org/3ed-2018/Sampling_and_Reconstruction/Stratified_Sampling.html - - Repeatedly sampling random numbers between [0, 1) has the effect of producing numbers that are coincidentally clustered together, - instead of being evenly spaced across the domain. - This produces low quality results for the path tracer since clustered samples send too many rays in similar directions. - - We can reduce the amount of clustering of random numbers by using stratified sampling. - Stratification divides the [0, 1) range into partitions, or stratum, of equal size. - Each invocation of the stratified sampler draws one uniform random number from one stratum from a shuffled sequence of stratums. - When every stratum has been sampled once, this sequence is shuffled again and the process repeats. - - The returned sample ranges between [0, numberOfStratum). - The integer part ideintifies the stratum (the first stratum being 0). - The fractional part is the random number. - - To obtain the stratified sample between [0, 1), divide the returned sample by the stratum count. - */ - function makeStratifiedSampler(strataCount, dimensions) { - var strata = []; - var l = Math.pow(strataCount, dimensions); - - for (var i = 0; i < l; i++) { - strata[i] = i; - } - - var index = strata.length; - var sample = []; - - function restart() { - index = 0; - } - - function next() { - if (index >= strata.length) { - shuffle(strata); - restart(); - } - - var stratum = strata[index++]; - - for (var _i = 0; _i < dimensions; _i++) { - sample[_i] = stratum % strataCount + Math.random(); - stratum = Math.floor(stratum / strataCount); - } - - return sample; - } - - return { - next: next, - restart: restart, - strataCount: strataCount - }; - } + } - /* - Stratified Sampling - http://www.pbr-book.org/3ed-2018/Sampling_and_Reconstruction/Stratified_Sampling.html + var maxLength = arrays.reduce(function (m, a) { + return Math.max(m, a.data.length / a.channels); + }, 0); + var interleaved = []; - It is computationally unfeasible to compute stratified sampling for large dimensions (>2) - Instead, we can compute stratified sampling for lower dimensional patterns that sum to the high dimension - e.g. instead of sampling a 6D domain, we sample a 2D + 2D + 2D domain. - This reaps many benefits of stratification while still allowing for small strata sizes. - */ - function makeStratifiedSamplerCombined(strataCount, listOfDimensions) { - var strataObjs = []; - var _iteratorNormalCompletion = true; - var _didIteratorError = false; - var _iteratorError = undefined; + for (var i = 0; i < maxLength; i++) { + for (var j = 0; j < arrays.length; j++) { + var _arrays$j = arrays[j], + data = _arrays$j.data, + channels = _arrays$j.channels; - try { - for (var _iterator = listOfDimensions[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) { - var dim = _step.value; - strataObjs.push(makeStratifiedSampler(strataCount, dim)); - } - } catch (err) { - _didIteratorError = true; - _iteratorError = err; - } finally { - try { - if (!_iteratorNormalCompletion && _iterator["return"] != null) { - _iterator["return"](); - } - } finally { - if (_didIteratorError) { - throw _iteratorError; + for (var c = 0; c < channels; c++) { + interleaved.push(data[i * channels + c]); } } } - var combined = []; + return interleaved; + } - function next() { - var i = 0; + // targets is array of { name: string, storage: 'byte' | 'float'} + function makeRenderTargets(_ref) { + var storage = _ref.storage, + names = _ref.names; + var location = {}; - for (var _i = 0, _strataObjs = strataObjs; _i < _strataObjs.length; _i++) { - var strata = _strataObjs[_i]; - var nums = strata.next(); - var _iteratorNormalCompletion2 = true; - var _didIteratorError2 = false; - var _iteratorError2 = undefined; + for (var i = 0; i < names.length; i++) { + location[names[i]] = i; + } - try { - for (var _iterator2 = nums[Symbol.iterator](), _step2; !(_iteratorNormalCompletion2 = (_step2 = _iterator2.next()).done); _iteratorNormalCompletion2 = true) { - var num = _step2.value; - combined[i++] = num; - } - } catch (err) { - _didIteratorError2 = true; - _iteratorError2 = err; - } finally { - try { - if (!_iteratorNormalCompletion2 && _iterator2["return"] != null) { - _iterator2["return"](); - } - } finally { - if (_didIteratorError2) { - throw _iteratorError2; - } - } + return { + isRenderTargets: true, + storage: storage, + names: names, + location: location, + get: function get(textureName) { + var inputs = ''; + inputs += "uniform mediump sampler2DArray ".concat(textureName, ";\n"); + + for (var _i = 0; _i < names.length; _i++) { + inputs += "#define ".concat(textureName, "_").concat(names[_i], " ").concat(_i, "\n"); } - } - return combined; - } + return inputs; + }, + set: function set() { + var outputs = ''; - function restart() { - for (var _i2 = 0, _strataObjs2 = strataObjs; _i2 < _strataObjs2.length; _i2++) { - var strata = _strataObjs2[_i2]; - strata.restart(); - } - } + for (var _i2 = 0; _i2 < names.length; _i2++) { + outputs += "layout(location = ".concat(_i2, ") out vec4 out_").concat(names[_i2], ";\n"); + } - return { - next: next, - restart: restart, - strataCount: strataCount + return outputs; + } }; } + var rayTracingRenderTargets = makeRenderTargets({ + storage: 'float', + names: ['light', 'position'] + }); function makeRayTracingShader(_ref) { var bounces = _ref.bounces, fullscreenQuad = _ref.fullscreenQuad, @@ -1828,19 +1933,20 @@ textureAllocator = _ref.textureAllocator; bounces = clamp(bounces, 1, 6); var samplingDimensions = []; - samplingDimensions.push(2, 2); // anti aliasing, depth of field - for (var i = 0; i < bounces; i++) { + for (var i = 1; i <= bounces; i++) { // specular or diffuse reflection, light importance sampling, material sampling, next path direction samplingDimensions.push(2, 2, 2, 2); - if (i >= 1) { + if (i >= 2) { // russian roulette sampling // this step is skipped on the first bounce samplingDimensions.push(1); } } + var samples; + var _makeProgramFromScene = makeProgramFromScene({ bounces: bounces, fullscreenQuad: fullscreenQuad, @@ -1877,7 +1983,10 @@ gl.uniform1f(uniforms['camera.aperture'], camera.aperture || 0); } - var samples; + function setJitter(x, y) { + gl.useProgram(program); + gl.uniform2f(uniforms.jitter, x, y); + } function nextSeed() { gl.useProgram(program); @@ -1900,11 +2009,6 @@ nextSeed(); } - function useStratifiedSampling(stratifiedSampling) { - gl.useProgram(program); - gl.uniform1f(uniforms.useStratifiedSampling, stratifiedSampling ? 1.0 : 0.0); - } - function draw() { gl.useProgram(program); fullscreenQuad.draw(); @@ -1915,10 +2019,10 @@ draw: draw, nextSeed: nextSeed, setCamera: setCamera, + setJitter: setJitter, setNoise: setNoise, setSize: setSize, - setStrataCount: setStrataCount, - useStratifiedSampling: useStratifiedSampling + setStrataCount: setStrataCount }; } @@ -1935,6 +2039,7 @@ var _decomposeScene = decomposeScene(scene), meshes = _decomposeScene.meshes, directionalLights = _decomposeScene.directionalLights, + ambientLights = _decomposeScene.ambientLights, environmentLights = _decomposeScene.environmentLights; if (meshes.length === 0) { @@ -1961,22 +2066,25 @@ return m.shadowCatcher; }); var fragmentShader = createShader(gl, gl.FRAGMENT_SHADER, fragString({ - OES_texture_float_linear: OES_texture_float_linear, - BVH_COLUMNS: textureDimensionsFromArray(flattenedBvh.count).columnsLog, - INDEX_COLUMNS: textureDimensionsFromArray(numTris).columnsLog, - VERTEX_COLUMNS: textureDimensionsFromArray(geometry.attributes.position.count).columnsLog, - STACK_SIZE: flattenedBvh.maxDepth, - NUM_TRIS: numTris, - NUM_MATERIALS: materials.length, - NUM_DIFFUSE_MAPS: maps.map.textures.length, - NUM_NORMAL_MAPS: maps.normalMap.textures.length, - NUM_PBR_MAPS: pbrMap.textures.length, - BOUNCES: bounces, - USE_GLASS: useGlass, - USE_SHADOW_CATCHER: useShadowCatcher, - SAMPLING_DIMENSIONS: samplingDimensions.reduce(function (a, b) { - return a + b; - }) + rayTracingRenderTargets: rayTracingRenderTargets, + defines: { + OES_texture_float_linear: OES_texture_float_linear, + BVH_COLUMNS: textureDimensionsFromArray(flattenedBvh.count).columnsLog, + INDEX_COLUMNS: textureDimensionsFromArray(numTris).columnsLog, + VERTEX_COLUMNS: textureDimensionsFromArray(geometry.attributes.position.count).columnsLog, + STACK_SIZE: flattenedBvh.maxDepth, + NUM_TRIS: numTris, + NUM_MATERIALS: materials.length, + NUM_DIFFUSE_MAPS: maps.map.textures.length, + NUM_NORMAL_MAPS: maps.normalMap.textures.length, + NUM_PBR_MAPS: pbrMap.textures.length, + BOUNCES: bounces, + USE_GLASS: useGlass, + USE_SHADOW_CATCHER: useShadowCatcher, + SAMPLING_DIMENSIONS: samplingDimensions.reduce(function (a, b) { + return a + b; + }) + } })); var program = createProgram(gl, fullscreenQuad.vertexShader, fragmentShader); gl.useProgram(program); @@ -2040,14 +2148,31 @@ textureAllocator.bind(uniforms.normals, makeDataTexture(gl, geometry.getAttribute('normal').array, 3)); textureAllocator.bind(uniforms.uvs, makeDataTexture(gl, geometry.getAttribute('uv').array, 2)); textureAllocator.bind(uniforms.bvh, makeDataTexture(gl, flattenedBvh.buffer, 4)); - var envImage = generateEnvMapFromSceneComponents(directionalLights, environmentLights); - textureAllocator.bind(uniforms.envmap, makeTexture(gl, { + var envImage = generateEnvMapFromSceneComponents(directionalLights, ambientLights, environmentLights); + var envImageTextureObject = makeTexture(gl, { data: envImage.data, minFilter: OES_texture_float_linear ? gl.LINEAR : gl.NEAREST, magFilter: OES_texture_float_linear ? gl.LINEAR : gl.NEAREST, width: envImage.width, height: envImage.height - })); + }); + textureAllocator.bind(uniforms.envmap, envImageTextureObject); + var backgroundImageTextureObject; + + if (scene.background) { + var backgroundImage = generateBackgroundMapFromSceneBackground(scene.background); + backgroundImageTextureObject = makeTexture(gl, { + data: backgroundImage.data, + minFilter: OES_texture_float_linear ? gl.LINEAR : gl.NEAREST, + magFilter: OES_texture_float_linear ? gl.LINEAR : gl.NEAREST, + width: backgroundImage.width, + height: backgroundImage.height + }); + } else { + backgroundImageTextureObject = envImageTextureObject; + } + + textureAllocator.bind(uniforms.backgroundMap, backgroundImageTextureObject); var distribution = envmapDistribution(envImage); textureAllocator.bind(uniforms.envmapDistribution, makeTexture(gl, { data: distribution.data, @@ -2065,6 +2190,7 @@ function decomposeScene(scene) { var meshes = []; var directionalLights = []; + var ambientLights = []; var environmentLights = []; scene.traverse(function (child) { if (child.isMesh) { @@ -2081,19 +2207,27 @@ directionalLights.push(child); } + if (child.isAmbientLight) { + ambientLights.push(child); + } + if (child.isEnvironmentLight) { if (environmentLights.length > 1) { console.warn(environmentLights, 'only one environment light can be used per scene'); - } else if (isHDRTexture(child)) { + } // Valid lights have HDR texture map in RGBEEncoding + + + if (isHDRTexture(child)) { environmentLights.push(child); } else { - console.warn(child, 'environment light does not use THREE.RGBEEncoding'); + console.warn(child, 'environment light does not use color value or map with THREE.RGBEEncoding'); } } }); return { meshes: meshes, directionalLights: directionalLights, + ambientLights: ambientLights, environmentLights: environmentLights }; } @@ -2222,8 +2356,10 @@ return texture.map && texture.map.image && (texture.map.encoding === THREE$1.RGBEEncoding || texture.map.encoding === THREE$1.LinearEncoding); } - function fragString$1 (defines) { - return "#version 300 es\n\nprecision mediump float;\nprecision mediump int;\n\nin vec2 vCoord;\n\nout vec4 fragColor;\n\nuniform sampler2D image;\n\n".concat(textureLinear(), "\n\n// Tonemapping functions from THREE.js\n\nvec3 linear(vec3 color) {\n return color;\n}\n// https://www.cs.utah.edu/~reinhard/cdrom/\nvec3 reinhard(vec3 color) {\n return clamp(color / (vec3(1.0) + color), vec3(0.0), vec3(1.0));\n}\n// http://filmicworlds.com/blog/filmic-tonemapping-operators/\n#define uncharted2Helper(x) max(((x * (0.15 * x + 0.10 * 0.50) + 0.20 * 0.02) / (x * (0.15 * x + 0.50) + 0.20 * 0.30)) - 0.02 / 0.30, vec3(0.0))\nconst vec3 uncharted2WhitePoint = 1.0 / uncharted2Helper(vec3(").concat(defines.whitePoint, "));\nvec3 uncharted2( vec3 color ) {\n // John Hable's filmic operator from Uncharted 2 video game\n return clamp(uncharted2Helper(color) * uncharted2WhitePoint, vec3(0.0), vec3(1.0));\n}\n// http://filmicworlds.com/blog/filmic-tonemapping-operators/\nvec3 cineon( vec3 color ) {\n // optimized filmic operator by Jim Hejl and Richard Burgess-Dawson\n color = max(vec3( 0.0 ), color - 0.004);\n return pow((color * (6.2 * color + 0.5)) / (color * (6.2 * color + 1.7) + 0.06), vec3(2.2));\n}\n// https://knarkowicz.wordpress.com/2016/01/06/aces-filmic-tone-mapping-curve/\nvec3 acesFilmic( vec3 color ) {\n return clamp((color * (2.51 * color + 0.03)) / (color * (2.43 * color + 0.59) + 0.14), vec3(0.0), vec3(1.0));\n}\n\nvoid main() {\n vec4 tex = textureLinear(image, vCoord);\n\n // alpha channel stores the number of samples progressively rendered\n // divide the sum of light by alpha to obtain average contribution of light\n\n // in addition, alpha contains a scale factor for the shadow catcher material\n // dividing by alpha normalizes the brightness of the shadow catcher to match the background envmap.\n vec3 light = tex.rgb / tex.a;\n\n light *= ").concat(defines.exposure, "; // exposure\n\n light = ").concat(defines.toneMapping, "(light); // tone mapping\n\n light = pow(light, vec3(1.0 / 2.2)); // gamma correction\n\n fragColor = vec4(light, 1.0);\n}\n\n"); + function fragString$1 (_ref) { + var rayTracingRenderTargets = _ref.rayTracingRenderTargets, + defines = _ref.defines; + return "#version 300 es\n\nprecision mediump float;\nprecision mediump int;\n\nin vec2 vCoord;\n\nout vec4 fragColor;\n\n".concat(rayTracingRenderTargets.get('hdrBuffer'), "\n\n").concat(textureLinear(), "\n\n// Tonemapping functions from THREE.js\n\nvec3 linear(vec3 color) {\n return color;\n}\n// https://www.cs.utah.edu/~reinhard/cdrom/\nvec3 reinhard(vec3 color) {\n return clamp(color / (vec3(1.0) + color), vec3(0.0), vec3(1.0));\n}\n// http://filmicworlds.com/blog/filmic-tonemapping-operators/\n#define uncharted2Helper(x) max(((x * (0.15 * x + 0.10 * 0.50) + 0.20 * 0.02) / (x * (0.15 * x + 0.50) + 0.20 * 0.30)) - 0.02 / 0.30, vec3(0.0))\nconst vec3 uncharted2WhitePoint = 1.0 / uncharted2Helper(vec3(").concat(defines.whitePoint, "));\nvec3 uncharted2( vec3 color ) {\n // John Hable's filmic operator from Uncharted 2 video game\n return clamp(uncharted2Helper(color) * uncharted2WhitePoint, vec3(0.0), vec3(1.0));\n}\n// http://filmicworlds.com/blog/filmic-tonemapping-operators/\nvec3 cineon( vec3 color ) {\n // optimized filmic operator by Jim Hejl and Richard Burgess-Dawson\n color = max(vec3( 0.0 ), color - 0.004);\n return pow((color * (6.2 * color + 0.5)) / (color * (6.2 * color + 1.7) + 0.06), vec3(2.2));\n}\n// https://knarkowicz.wordpress.com/2016/01/06/aces-filmic-tone-mapping-curve/\nvec3 acesFilmic( vec3 color ) {\n return clamp((color * (2.51 * color + 0.03)) / (color * (2.43 * color + 0.59) + 0.14), vec3(0.0), vec3(1.0));\n}\n\nvoid main() {\n vec4 tex = texture(hdrBuffer, vec3(vCoord, hdrBuffer_light));\n\n // alpha channel stores the number of samples progressively rendered\n // divide the sum of light by alpha to obtain average contribution of light\n\n // in addition, alpha contains a scale factor for the shadow catcher material\n // dividing by alpha normalizes the brightness of the shadow catcher to match the background envmap.\n vec3 light = tex.rgb / tex.a;\n\n light *= ").concat(defines.exposure, "; // exposure\n\n light = ").concat(defines.toneMapping, "(light); // tone mapping\n\n light = pow(light, vec3(1.0 / 2.2)); // gamma correction\n\n fragColor = vec4(light, 1.0);\n}\n\n"); } var _toneMapFunctions; @@ -2239,20 +2375,22 @@ whitePoint = toneMappingParams.whitePoint, exposure = toneMappingParams.exposure; var fragmentShader = createShader(gl, gl.FRAGMENT_SHADER, fragString$1({ - OES_texture_float_linear: OES_texture_float_linear, - toneMapping: toneMapFunctions[toneMapping] || 'linear', - whitePoint: whitePoint.toExponential(), - // toExponential allows integers to be represented as GLSL floats - exposure: exposure.toExponential() + rayTracingRenderTargets: rayTracingRenderTargets, + defines: { + OES_texture_float_linear: OES_texture_float_linear, + toneMapping: toneMapFunctions[toneMapping] || 'linear', + whitePoint: whitePoint.toExponential(), + // toExponential allows integers to be represented as GLSL floats + exposure: exposure.toExponential() + } })); var program = createProgram(gl, fullscreenQuad.vertexShader, fragmentShader); var uniforms = getUniforms(gl, program); - var image = textureAllocator.reserveSlot(); + var hdrBufferLocation = textureAllocator.reserveSlot(); - function draw(_ref) { - var texture = _ref.texture; + function draw(texture) { gl.useProgram(program); - image.bind(uniforms.image, texture); + hdrBufferLocation.bind(uniforms.hdrBuffer, texture); fullscreenQuad.draw(); } @@ -2284,10 +2422,12 @@ width = Math.floor(w); height = Math.floor(h); - if (Array.isArray(renderTarget)) { - texture = initMultipleTextures(gl, width, height, linearFiltering, renderTarget); + if (renderTarget.isRenderTargets) { + // RenderTargets object + texture = initArrayTexture(gl, width, height, linearFiltering, renderTarget); } else { - texture = initSingleTexture(gl, width, height, linearFiltering, renderTarget); + // single render target in the form { storage } + texture = initTexture(gl, width, height, linearFiltering, renderTarget); } this.unbind(); @@ -2322,7 +2462,7 @@ }; } - function initSingleTexture(gl, width, height, linearFiltering, _ref) { + function initTexture(gl, width, height, linearFiltering, _ref) { var storage = _ref.storage; var texture = makeTexture(gl, { width: width, @@ -2336,44 +2476,23 @@ return texture; } - function initMultipleTextures(gl, width, height, linearFiltering, renderTargets) { - var texture = {}; + function initArrayTexture(gl, width, height, linearFiltering, _ref2) { + var storage = _ref2.storage, + names = _ref2.names; var drawBuffers = []; - var _iteratorNormalCompletion = true; - var _didIteratorError = false; - var _iteratorError = undefined; + var texture = makeTexture(gl, { + width: width, + height: height, + length: names.length, + storage: storage, + minFilter: linearFiltering ? gl.LINEAR : gl.NEAREST, + magFilter: linearFiltering ? gl.LINEAR : gl.NEAREST, + channels: 4 + }); - try { - for (var _iterator = renderTargets.targets[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) { - var _step$value = _step.value, - name = _step$value.name, - storage = _step$value.storage, - index = _step$value.index; - var t = makeTexture(gl, { - width: width, - height: height, - storage: storage, - minFilter: linearFiltering ? gl.LINEAR : gl.NEAREST, - magFilter: linearFiltering ? gl.LINEAR : gl.NEAREST, - channels: 4 - }); - gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0 + index, t.target, t.texture, 0); - texture[name] = t; - drawBuffers.push(gl.COLOR_ATTACHMENT0 + index); - } - } catch (err) { - _didIteratorError = true; - _iteratorError = err; - } finally { - try { - if (!_iteratorNormalCompletion && _iterator["return"] != null) { - _iterator["return"](); - } - } finally { - if (_didIteratorError) { - throw _iteratorError; - } - } + for (var i = 0; i < names.length; i++) { + gl.framebufferTextureLayer(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0 + i, texture.texture, 0, i); + drawBuffers.push(gl.COLOR_ATTACHMENT0 + i); } gl.drawBuffers(drawBuffers); @@ -2402,31 +2521,16 @@ var pixelsPerTile = pixelsPerTileEstimate(gl); var pixelsPerTileQuantized = pixelsPerTile; - var desiredTimePerTile = 22; // 45 fps - - var timePerPixelSum = desiredTimePerTile / pixelsPerTile; - var samples = 1; - var resetSum = true; - - function addToTimePerPixel(t) { - if (resetSum) { - timePerPixelSum = 0; - samples = 0; - resetSum = false; - } + var desiredTimePerTile = 20; + var timePerPixel = desiredTimePerTile / pixelsPerTile; - timePerPixelSum += t; - samples++; - } - - function getTimePerPixel() { - return timePerPixelSum / samples; + function restartTimer() { + firstTileTime = 0; } function reset() { currentTile = -1; firstTileTime = 0; - resetSum = true; } function setSize(w, h) { @@ -2450,12 +2554,11 @@ if (firstTileTime) { var timeElapsed = Date.now() - firstTileTime; var timePerTile = timeElapsed / numTiles; - var error = desiredTimePerTile - timePerTile; // higher number means framerate converges to targetRenderTime faster - // if set too high, the framerate fluctuates rapidly with small variations in frame-by-frame performance - - var convergenceStrength = 1000; - pixelsPerTile = pixelsPerTile + convergenceStrength * error; - addToTimePerPixel(timePerTile / pixelsPerTileQuantized); + var expAvg = 0.5; + var newPixelsPerTile = pixelsPerTile * desiredTimePerTile / timePerTile; + pixelsPerTile = expAvg * pixelsPerTile + (1 - expAvg) * newPixelsPerTile; + var newTimePerPixel = timePerTile / pixelsPerTileQuantized; + timePerPixel = expAvg * timePerPixel + (1 - expAvg) * newTimePerPixel; } firstTileTime = Date.now(); @@ -2484,33 +2587,25 @@ } return { - setSize: setSize, - reset: reset, - nextTile: nextTile, - getTimePerPixel: getTimePerPixel, - restartTimer: function restartTimer() { - firstTileTime = 0; + getTimePerPixel: function getTimePerPixel() { + return timePerPixel; }, - setRenderTime: function setRenderTime(time) { - desiredTimePerTile = time; - } + nextTile: nextTile, + reset: reset, + restartTimer: restartTimer, + setSize: setSize }; } function pixelsPerTileEstimate(gl) { var maxRenderbufferSize = gl.getParameter(gl.MAX_RENDERBUFFER_SIZE); - var maxViewportDims = gl.getParameter(gl.MAX_VIEWPORT_DIMS); if (maxRenderbufferSize <= 8192) { - return 25000; - } else if (maxRenderbufferSize === 16384 && maxViewportDims[0] <= 16384) { - return 50000; - } else if (maxRenderbufferSize === 16384 && maxViewportDims[0] >= 32768) { - return 100000; - } else if (maxRenderbufferSize >= 32768) { return 200000; - } else { - return 50000; + } else if (maxRenderbufferSize === 16384) { + return 400000; + } else if (maxRenderbufferSize >= 32768) { + return 600000; } } @@ -2551,6 +2646,60 @@ }; } + function fragString$2 (_ref) { + var rayTracingRenderTargets = _ref.rayTracingRenderTargets, + defines = _ref.defines; + return "#version 300 es\n\nprecision mediump float;\nprecision mediump int;\n\nin vec2 vCoord;\n\n".concat(rayTracingRenderTargets.get('historyBuffer'), "\n").concat(rayTracingRenderTargets.get('hdrBuffer'), "\n").concat(rayTracingRenderTargets.set(), "\n\n").concat(addDefines(defines), "\n\nuniform mat4 historyCamera;\nuniform float blendAmount;\nuniform vec2 jitter;\n\nvec2 reproject(vec3 position) {\n vec4 historyCoord = historyCamera * vec4(position, 1.0);\n return 0.5 * historyCoord.xy / historyCoord.w + 0.5;\n}\n\nvoid main() {\n vec4 positionTex = texture(hdrBuffer, vec3(vCoord, hdrBuffer_position));\n vec4 lightTex = texture(hdrBuffer, vec3(vCoord, hdrBuffer_light));\n\n vec3 currentPosition = positionTex.xyz;\n float currentMeshId = positionTex.w;\n\n vec2 hCoord = reproject(currentPosition) - jitter;\n\n ivec2 hSize = textureSize(historyBuffer, 0).xy;\n vec2 hSizef = vec2(hSize);\n\n vec2 hTexelf = hCoord * hSizef - 0.5;\n ivec2 hTexel = ivec2(hTexelf);\n vec2 f = fract(hTexelf);\n\n ivec2 texel[] = ivec2[](\n hTexel + ivec2(0, 0),\n hTexel + ivec2(1, 0),\n hTexel + ivec2(0, 1),\n hTexel + ivec2(1, 1)\n );\n\n float weights[] = float[](\n (1.0 - f.x) * (1.0 - f.y),\n f.x * (1.0 - f.y),\n (1.0 - f.x) * f.y,\n f.x * f.y\n );\n\n vec4 history;\n float sum;\n\n // bilinear sampling, rejecting samples that don't have a matching mesh id\n for (int i = 0; i < 4; i++) {\n float histMeshId = texelFetch(historyBuffer, ivec3(texel[i], historyBuffer_position), 0).w;\n\n float isValid = histMeshId != currentMeshId ? 0.0 : 1.0;\n\n float weight = isValid * weights[i];\n history += weight * texelFetch(historyBuffer, ivec3(texel[i], historyBuffer_light), 0);\n sum += weight;\n }\n\n if (sum > 0.0) {\n history /= sum;\n } else {\n // If all samples of bilinear fail, try a 3x3 box filter\n hTexel = ivec2(hTexelf + 0.5);\n\n for (int x = -1; x <= 1; x++) {\n for (int y = -1; y <= 1; y++) {\n ivec2 texel = hTexel + ivec2(x, y);\n\n float histMeshId = texelFetch(historyBuffer, ivec3(texel, historyBuffer_position), 0).w;\n\n float isValid = histMeshId != currentMeshId ? 0.0 : 1.0;\n\n float weight = isValid;\n vec4 h = texelFetch(historyBuffer, ivec3(texel, historyBuffer_light), 0);\n history += weight * h;\n sum += weight;\n }\n }\n history = sum > 0.0 ? history / sum : history;\n }\n\n if (history.w > MAX_SAMPLES) {\n history.xyz *= MAX_SAMPLES / history.w;\n history.w = MAX_SAMPLES;\n }\n\n out_light = blendAmount * history + lightTex;\n out_position = positionTex;\n}\n "); + } + + function makeReprojectShader(params) { + var fullscreenQuad = params.fullscreenQuad, + gl = params.gl, + maxReprojectedSamples = params.maxReprojectedSamples, + textureAllocator = params.textureAllocator; + var fragmentShader = createShader(gl, gl.FRAGMENT_SHADER, fragString$2({ + rayTracingRenderTargets: rayTracingRenderTargets, + defines: { + MAX_SAMPLES: maxReprojectedSamples.toFixed(1) + } + })); + var program = createProgram(gl, fullscreenQuad.vertexShader, fragmentShader); + var uniforms = getUniforms(gl, program); + var hdrBufferLocation = textureAllocator.reserveSlot(); + var historyBufferLocation = textureAllocator.reserveSlot(); + var historyCamera = new THREE$1.Matrix4(); + + function setPreviousCamera(camera) { + gl.useProgram(program); + historyCamera.multiplyMatrices(camera.projectionMatrix, camera.matrixWorldInverse); + gl.uniformMatrix4fv(uniforms.historyCamera, false, historyCamera.elements); + } + + function setBlendAmount(x) { + gl.useProgram(program); + gl.uniform1f(uniforms.blendAmount, x); + } + + function setJitter(x, y) { + gl.useProgram(program); + gl.uniform2f(uniforms.jitter, x, y); + } + + function draw(hdrBuffer, historyBuffer) { + gl.useProgram(program); + hdrBufferLocation.bind(uniforms.hdrBuffer, hdrBuffer); + historyBufferLocation.bind(uniforms.historyBuffer, historyBuffer); + fullscreenQuad.draw(); + } + + return { + draw: draw, + setBlendAmount: setBlendAmount, + setJitter: setJitter, + setPreviousCamera: setPreviousCamera + }; + } + var noiseBase64 = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAEAAAABAEAAAAADfkvJBAAAbsklEQVR4nA3UhQIIvBoA0E830810M91MN9PNdDPd/ulmupluppvpZrqZbqabe89DHCiDv5GzaossZGYBp2PFIFqKdmMXIKW85edCB/RT11SD3JMQidRlL7n2ufRH1jVkFUNVc3NaZ7DP0T7/112kM1Qc3RDG0K/4uN7CPC7OmtFRZK3Jy3fhSSySKIZXopTsnIhN69JjLHJYYnfpZu44hnV+UkhG/lPd/D+fIVwWtdhhupVPJmtsLFIhjHA7UUqY4fPIQ2qdKxviqH2sugJ2nC+1ZdV0vEF3RGNcMd4KdvIXaJnujdPrKj4ifkeX2f04avjEbqO0ogI/rD7zhmy6GKG/2w32IetIX5vE9DbrS+CNy4sbmgXoiaug48lV4bVKZgluwPujd+Ioa+KjuntypepEEvl/YYCYTq6w4aaReGMShwLkC4nvq7jFKJmLpoepHJTag/h2aMklShou+tyip5wm67P2/CnvH7K6zuq+KGvy2rkkrR4mc4dpUNTEFHDId9TXQiST3RxHO0lHNgNFIA/Ub1kC0pOlNBf77EtyZ0ejxvikzySL8C8hNWyyc1GvcBCusv/otvBO3YSj+KvvRlKgoNaF/GEB64prsx8qFRwVJcRmMk8l5E5swfHMPuhlr9DmtrLeqs7KOrCMQSpeGW/zH5F2dc0AXZhcp9IthLZyuxpHrkNnp0JfnsY+55XkAtgSOvsWzps8uoJ5GtpAXRWZ5TK9cEM1WVRWC81ZUstPZHHkC7GDjZfl7BJ+VcXkI8RfVIMW0Jq95oxE0R+MDQnMX97DPhYjEXzHM0LvUNyODhdDCvJdNmXlfFp0RsbBNclTj8hpXofsCgVYsAnwPRTNTiTLxZkQW43BmK6wHk7Y0iSdXIfyK8/aQULdx1/hJc0JkRE/UgNDc/dGZWanTCs2WQ0W6Xh7PZGuDMXEaLtIRMZcZAM4ieOwO661Qf4xVyhLOOA2mLe0JyvIDrBhUA42ioUiMmrHJ9te6jwtbQ6xWrKf/ED3qKJ0qvzO2of57KkcyMBvNZndbLTX/iWNaWTezm9E8cleKOSEXK1B3LDfeGk4yx/b7L5+uAvp6UVC/UYAhvPLvSwTWm+qqO5saYjh79LadBJaAR90ct9S/GGZ7Q1zhKyTOUJ9MzT85IldVjLLduUOqovEaASJbXeZ37oFv0w/sOGhvMzpVrL/2MeQx8+ldfQU/QBXIqn8NtHAHjCzaTJk+CDS0e6Wk8N7GEDgoR4rG5M/Zig/LD6hEr6VHmxzmijoKu/oZ+p84oEeiwegquE7pBZPYXEoyLeQ66wRicLXmOzWoib6mq6KUoWxuriq62OQh647TUmn0RuuIjtPfuEkcMQtwJ/IaJabRRe9fRX2Q8Z1L2UNlMclpfMFdKYr+XkVEeb6vChZuOBfhNl+l/hly9L0/mzYIxPhBq4oimlnB273mkgwnr+S7Vnp8Fff8/3VC7IJCtqZ9AxZRnujo3wjmQ9n7WtayxwgvUhUNtJ0UjlEU9vPFhePxDLfkl6z43hhdQSW+xbyKooJEEwqTOkL1VHWc1vReFaVxbcnTGM2Uq1XNXRPos0bdtI8VBKXcZdCV1dNpLcL3DE7Cqfmi2w5JGhGFqATTUhzy7sG2+a0II4ZtupikC488mt9abdTvpYXVALXBU6wNzYLXUTPQwTxH/nNttjKDA7pQT47mopOQmxzW/f3GVhXWoguEUl5EHcUoKm8LdpiMoZV9JONpzZa7wa7hG4XzxvquHj2s5lsIrFbtrbew3+SKbiK6Ry+whAyXrTBC0kgDfwZHNOMNRnwOjHVVICdOGVo6LuFsn6GTKN6u4IeZqtN7B6vzlegD7ioW8i/u430kbtO2pABrgTPwb+xchSZ7jK/V6KxPEWK+K+oBXFmeuikt+HzrIU66KQsI9bRaGqQfKqSkMNumbnN4/ljkFsPxqnDElSF32L17D8UhxbUI8xnuwk/0znwXXcGGmD4QpPo5n6kTod70Zb2oI8Y6pFJKiuLoab7bXBEj+CXFTOH4A4kV/1JNjNRLrexaEX5Ht0xQ1RRskzmhCd+rmnFi9hLeqHe7svy7Lq+/+Mq6am+A/X8e+iptvqcbIjzqCOfbW6SpKQ22gPt8HgTFUMPd9kWgKd2O45Pr0EuOlK8waXFfriga7sXrLlKZZbrgeaPnmsrurd+n2H8hugjc+i1OCpJj2vYPyQ27+lT6/f4JM0c6sJIHwm/8AJS4tXuuo6g9qOCjvOZIrI9ZpaaauQAjwb9eTG0RMYPr2y5AHv8YhZLHvZl+DdQqrI5Z1L4QawT/FOLoQCOLR+EyTIrjcqb6YtiA4mg0/L27reYYg7JpvSVOM7G+p2uIb1iJ0hE+/DvvLW+qqfL034nLU5GQh02j8aHi/aDLS2b4ncYk/OcE+V+hhNqmF2rs1j4a1qziXYgaaDWQRetSbOwC60J8VhFSIf62k2osy7FXqpdrDAdZbuQxf5ZOCGLy6Reago9xBydmN9HBdUqX9VtUYdIKZOGbGAFxEDXjLxDmeVXsd5WIOmlhN0kqe2r84o1upy+z9KLRjY/ui5qGkhNiqoL5iXN6hPbeyGa+ckKwRM6l51Ao+EG/yKruXNsrWvHkuDPKKctS4bYRnq7eIQX+at4s8lD2ovy+D/xlXUWuf2jsNiNQx9xDRwjLAgJUSd5AvfTD80U0Qk91fP8DTkBfaXx1Qhv7FMXifZRMw0MlxtxVFVNzoOTrnjoK9ObCZy5HOwjbWgTib1kFo3BJa9t7oojdJK5RpGcifO66LQ2xuIHBvxcnMcLdEoUWc0QjVhs0k3f4dnoXvREODRB5KWJ2UFTX60WcXERxFQ7uo9mDz1YVbzQddDBHQ3QxD0MPfBnsdX+p9+xg+Sybmtum4hKoJW+CG0NGSQxP/TC0AulZ1tozfATr9Ld/QfURp1kg2FqaOQ2QBZ9JNyCoeQfO0eS+SOCa0lLshW6hnulWqHi/qrMTj6Z03gzB/LMzuaXmZXJSUm7nSKACjQDVzafbiNTqUayYpjDNpqhqIzf4SfRU/KF6S+vo0MhAS/v36BoolU4JbKQO3S3nmAL88puH0GoN6tF3vg2rCzscLVcUbmKzHS/dFroBdGk8bP4Hx8DRotKtJdMa4YZKhvR2OgbnULv+lzYUfjhFusD6KaLR8aHFSSPjYmT2MP6tU1L76u4uqJYrqawEqqpW+Onm4G6KIw2CU0Z29/EIc9gKVwjH3wxNV5v8fmxVunIGB94PxYBV+I3RRM4IO8x7Ab6ZXi3aoEeoUXmtzqHVrGCsrUYpOvIFXSMgX4YQp1Qmp6xf/Ae8gR1U19NUzEdSOjApK9nPuoItqt5HE7TXPIm3sff2fm+SbioN9GcPLltyTLKeeGBjGr668sYsfuymdjM8uHjYqL5BLn4SFqRdjbnZJKgyFHIA51lEjEebtEMfqN7LlORlgreiM3B26G2g82iqssbZBQq6k+rGn5J+MMvsVRus95vMpFR9K9K4errLmJFSMO/iepoBu6CfptR4QzqxpOYH6ERP4xmqS4uKzz3V2RS0SnMNwnYKvdW5Bd16FdS0kWlDeQ2VIMEJtgeVJ7GZIdDYQldWQ6UVK2mM1l000/MRyn5GpGZDkRbQ1RUCs/HLcMDV4hV1/OkEZFpRX+f5zfSHGQR7W2obdeiMnK3qQarTK7wEiq5vTqWXayqhyF4By5l6+HDPKK4AZtVRnoHjVBv8Syd1VocyY2UP9g8c15PpXBNVIET8MnVd8/oNlaGcnZJBZoQ7uAe4SjJAWNdX3AkNrQTQ+ClmMxO23i4nXseStC+4agkPDYeChdcOzLRJ2f/2S+ukJqsW/tvKoN4bP5/sOpHxuN5qC3p5VbaizIefWBKkKWkCc+DO5paPAHAP7wQj+VFRVp/zhPy3Ufw+8I4VsE1QVPtS1ZLf6eJ5Qr3Se3GxfURld71EhvEHJXVbLdJzUL/2nk6nX1mGcxdXUpvIg2gt7rADrkoYq0ogKbYXyK1pOwljuEO0rykAh5k2pMp6hR7rVO7h3IY2Y6gOYpsBqhWfp/sQcbbZa6m7uge0dx8pUgjd9GY5CyUldNEXX3L5JRLaHP2G5UhDtfnn8Qk3sak8Y1dUR5BatyTnyTR2PWwnCVCZe09NdwLG8tpvl3nJCd8dfzPNFMp1Wb4YuuihKIPWkP2k5I0o4OVJB96wDby2Oy2TAwv9VAxh8dFJ9EvU1S390Pdekx8d0jrxgik35GaLDoeZR7ZhH4IqyzO+/WiNzkkGNrOm8MvN4dmom9kbtuCzgy14K097SrhJuoeDEMJ7CI5Tjwn+3AmfjkUQpXUTR+DzdDPKVRgh23w1c0MUoI1EYchky6st4hefmS4bhZhr5vJ9/QYfUpbywukv9iib4S8msMqOE6iqH86px6L3oubJike6fJBB1ODDTZb6V+fAvapLL6DTGQ+2hm2k1svL8litoeKxZaRIXq2/U3HsDb6ghQBJqP4OB29iP4Lv/FaVZlctV9QM5tC1UGRbCWRBSfQs/UOFAGtlhX8VJJMLTD7VQY6HRU23ehdXAYlJHN5FlkRvXQHdDzx2I8Lx1A3sxTd8MXdOjVKH4BCOp2pIx6zrHwar6qO6uYB3FaXXdYNycNXCUNlY9TFLwq5SFuemg60UdhieVa8hml4v/2sHOsDNV1JGM5zmx/U2qKhk/lq+7jXaCuuYxaTPba1OuMHhY16GiuJVonzKBUtjEDVtwPxJP+cXUaRfD/1w5zS0Ulr9DXcQPnIK39Xdgkn+WJahGzGkI1cda/xFhfNn6KP1R7c2Y4JZSBnWK26kkJhs51E/tGk8m5oInvSjOI5risjuorqlI8X0oZh+JmKQeuhn7KLjKmvmd6iCVnIKtMH5KOM6zGu5nP5hmixMLo8Ge0P6jWyD0ukR7F0lqIPEMc/gv0OIsqZvCSug8eZ964gnYXr+LsqPmojHrG0apiIzg6TtkyHc7BHIDzTXuL/yQ38Dhsnm5OPfCorYK/LFTKPOU4xr+m/6WzydVCmPWwM5+UuN9e1Ce/8TRbfdJVzbCrWQJTUO+R8V5Ouh6m6T2jpqllYDfew5Ylcb1teraRxUFb8xxp6zFWH+eqtbIhzomc+DRunqvv3doVoKfOEJGoRKilzmAt4B69k+0FyN0m2ED5ss6NkNLTbn1LDAmHU/QDBj5oU8j9cxLxi2dUd+z5E8RfNT9NUHvApzRU/Bv1R0MEPlER9Nzuhpb/lhmsLxUJfP8EkYWdUCbyW3QzlbTco4AfhKEDNUfeY7pLt8U/a063mUaGD+4wtofwtmo0L2WWqlSxHErH0aDltYsbwqHqNq2CnuJ3qdKjJh/hlYYrsKLKwwTy2eOnzyrIMB1A0rmhiNc3Iz9tkvJt44ZqhJQ70F+jhW8CIgNQuO49/Q8bcJ5NxWlaVj6Yx/VVIZWeY2uK+zuw3hSEhIu2hE5NLfiC9p//I7vq6i6+fioJwF2Uyf2lzHoGt521FPlUJrH+AioQzvJtcJnaGEwHewSXxGFExyX7y81hVsQGng6shr9lG74TM5KdX/LyLIevpKyin6sz/Qj/0MjTQh2g594Yct6NVPL5QNUC3QlX/RR3hOXE9th5Nhf2hBswWfdVZVJsvMQNoGnOVfvNx6Qudgo9Ra/hMVJV8wdF1XQwFSYqwzgxjkVQ9kS+cZjHEhzAK6qMKYlZIjg+ZGqIvykCWBy4T0dlkBykCq33WsIAOAoJaQjH/V5w1uekes5plQOPRfBuTFmGvWRueVX9VW2V7GcccoE90CTSW7cXzaU+9hdflUeUTkk001/PDCAnbTRXb2h4jPeCZ2O0Gh1JuOu2M97PnZjBd6QrJDuqBL60+kuH4BK+Fo8uzLjmaoO4Z4DvsCpZM9DJtlWKvUEnVmTVVj/SOUFmOxBHCZV7CJJETIKA8rIuZKavxzKaxvQSlxD/exg9g130ifoH20pBJPKAz2F+bwyVUq2Qrd98mshdVNhVTtjJXSFx4wzegSfhAKECfcY1u4Wamu3pPqogO+Fu4bifDU1MZRfepxAh8EeLYn0i4Ey6NWwYD4Yhp6hfK8uiGimFPubcsYXiI/nO58QmN5V4+zm1kpdl3AtoeFLF0MT0Wbqk5KJ37rmqFTWYR+4vLsGN4BM3uGoYUJgLv5irINGiw+upKhA3qOIxkiQjVGfR+uo7dRAv4B1WLbqApcD472903Hz2T6/0jmR6G0xWmEWz2g3U7uYZF1FNgKX7PK5p85lXoGMBAMzzA17Kb+EnZmFfk/eghNI4W9r1pGjGZ14YvbIHcHQbYy/Cbb0FTcW61x83ySGRGjc0SOC/qqKE+p28MfV0hfJhNV0P4VdGQdICcYrKPz/Lb306IfSKl+66z83LiKPokGeuq4pI5oqFMzY6FSQC50RXxgifnnckXEUfkZS9kFNJCn0b38Q4aWXRRt2Rl/pLMkll4fdwuPNaRXW11xT1lBdE2KfBblwAdDz/dNhIJtSZZzFtdWq+BqHZPKB8ukbZwCkf0Ne19X1hMFAvsLZIWFyPGnTe36TC9Ej8U5Tkk8J/0Ai9JpnCJ7iLz+VWzFqqEdyaXGqSWk8I4vYovWonifKW2Iok7p8boFaozGsinis86MpknWoeJoazD4OW5UEXvcxNoUvdDdDdP5Ag7V2xypbHy/eGcjY56yF2qGQwUz1xSaE2jit++h9mpYZpqYwuYyrAGT+QlXDsjVSrUXcwiiaCxfsYOm2lmszyrh4tY/LbrY9+GQqK8+SdSyYO2qsmqbvEi+old7nrCaL1Ed7Gx8B05gJ82C1FGFds3FM9tDvUJa9E4vNJVZTLzy89i2dg4sLQmFMGZ8TkH61lUf4Q94D1xRPTYMZst/IK9vjhskJdJeTdKfXNMdOfvVR5eDS3STUlGczIYHEvdhxZ2LR1ud/NYpqYIMqEs7P6yTbIpz8eru61QjH4mg1AybF17mgESqAN4PRnl8uvTsBpT9SlsJ4tgBKtjIZXua36TRmirSIo+iqX8FIol7pKx5CNEox1EdpGC3WWR5C4/Qf+wm3Rc9Z+fhdraPGi8KsWdT0Y7idMylzVwldSXGf1MeGZSiFGe+1tin67kr6ixag26TYYaSi771i5ueEjr+U4+neqPY6H37KaEFzBGFqfpuZIXUEsyIJST01xd2walDwvtGd0Xr7al/ALSXKbRNHSh1/xe9cHVDs+1hv7ul6xPX5ppZAjlZm446vuIsuiiW+rf8Yhmil+Bc0N3Ej3UxAXcTzWdZxEhaN3HRJaX5VMyyR3jLXxZDTnkbrsM3cA1eD52UGL2imx3xA7FB2wN+c9Opo3UG3rZDeIn9Wz2kCfTRVwEesH2oCn0MRHFzZWZcHm4y8GmVp/4BBzd7pXZbBd+3Kehjfw/N0duh2e4hTmuouCuvjrbo4uZaX5DqOyT+PxsJXTBMIOfstFd2/BF/8fnyximG1rFk/Bb6AWOywqHHSYhPhjy0zjuOWSndcUAMwVVtGtDZrFT1FCF+Bboxaz+wYujXVBNPSRt3TBel3xHhVk/9xASyFLqjEhr+/FFxMh7YiKktkftn5CDNDW7xTd7kcU1MJRWMm9Vb55YbVIl5D36BxqFk6osFmqjl8GTjLp7qCnHWMPa24NoufkdWuo7+j/zxUx0N+hbaBqQW6VGia52kcsnkb1p1/I5vgo26CIertrZgMfT8jqxrkeJfAMtwmAWX95Uo/g814vXll5BStHMzzG50EN8RE4g1WgWNNwtUpG10jl8S1zZvvfT7Urzi5eCKOEtweoMJWKejoFKoTY0TliqpCCU+WsqI7ywhpzipVFyeKKikfE+o63t11qguWAP/Wau6OEQE52l5dkq3BGeqwimFMnktyn4J4uoS3aNakAj8XbqStjpC/nXpL354q/zo3SxATjjuEtpr7H5uiodjVHoivbLhvoxnCDdMdZn/RMz0x/k0UIz3lv/EdN0K3pYdrO72VeeH24La2aqJ7wjWeFLhjlus/jC89FaKC05oN6biWqpgGjYshGQTpdTP8ggEQ9mkuTmgqglsFkrE4UBUNreIbnEMHcE9xRN8P2wlZTjr0xKv1HOEvn531ApJFLt1WdXRk/UKSyjmdxIkke903Ftc7EEC1PVDiaNfToRT/c2j0km6I6mKqcW44GqobuOOyp4goU26hWewpfxE/QZaoo2+L50vx5N8rmG/IefiDeJeuqDiAUFwjqeWX3VU11fdoFn04N9PVhNJoSdZoDMztbZ42YhfaMvueW4Irkmp+sS+hlJLmL5y6aI2KYvhGr6kG1kopid1vuiNlY4aXO5KhJmmTo8AWmF8/qUugcq5rLxb7gCiunu2jnQhZ2C2CGD6gw71CMzw13kQ0xEVogsZdVtHHjLD4j7LiIvxpxswLwYRguoCG6H7isSi/qwwQ0Rp8U4/IeuNq/oSDsDfto8dJx9ExJJyVqwX3S9Hi2TazjLCsNtu1984NXMdnbPLbaTdCv1Xpf02+UTqMZe8QWquBlDKoeEtp3e6+qTa7gV+SnG+VIhOeWop/0g56o0EFf+QC1wOdwRPyJH1U/AvgPJYffZMqEtzo4jhfoiKdOyrT7uqqA1NIvricqK3ei1gBW8DwE5zM8Jl3CCUC8MRpH0EbscEoihOptLBntDP+/CH5RWLkfvQhn1TCahR/w201XcYEvUGZbJbnajXRWyh/Xgt/TqkIBOcEXkPBsZHtiaaKlMbWbDSdGf7ab3aSl51fe3qf3nMM3e9vF5W5/BwQT/21ZQ611W2YGPtb8hHbuuiBP+nG6Op6HVqJUlEMUexs1YH5qbTBILRCY2nORVUeh0V1X/hwrwJuy5u2KWupx0Bj1NXtBsuKkezra58+Ez9NGN1R3x0VRindg7mRGZMA8XNOd4jXCIL+IfXYMAN3RSbVUT+oTFdmfMOl1R72SvPQtpwl95zZUxn+g9MtnVMOvDbXVcRnOd+Hr6iDcWH0g6/xRvD99FYtwJR/YlbD05AmFUneyl71x3W17k8xNRMrnJR1djaUGxlsThY6ARjgBPUSc7kkeH/GQIKilgG+8KRCv8mVLcW+Z300I7NBzNJ0XZZhSR1OPSLmHdMOJF8Wf5HzD9K5zFFXG/sFIewu1RPFSOrULH1JTwUR1UMdUvNQAv5jHwTb3KxuWt8StXkuz3mfklNIcc0z3DPyhn9opkrClsVI/xqRBbwytYQq7gQTYNXi4bmGPyjk+CYuiHfj8fp3vDMZ+QZSRvzW6Yq7OilGQHFMfx3GyZXBa2DMa7S2YeuWeHyMy6p3lo29LNtDR3rq5Ljf+RI2guPkcHy9rkF2mJEvvqNI+4jRUs50FfgWy+u5uDaynIAq15dF4tPIB9KIp8L7PDUv1NVoWWJht6iQrIdfgcLu05vsbHBkGc5mECeyC2spv8F4rG++C80ICkoNXwOlIwXEOJzSyX23UIU0h/mklVoY9lfNdVL/E36VD20u4QbVxm6GeKyfGkEvrFUqPR/H9s/XjiBWp1EAAAAABJRU5ErkJggg=='; function makeRenderingPipeline(_ref) { @@ -2560,20 +2709,28 @@ toneMappingParams = _ref.toneMappingParams, bounces = _ref.bounces; var ready = false; + var reprojectDecay = 0.975; + var maxReprojectedSamples = Math.round(reprojectDecay / (1 - reprojectDecay)); var fullscreenQuad = makeFullscreenQuad(gl); var textureAllocator = makeTextureAllocator(gl); var rayTracingShader = makeRayTracingShader({ + bounces: bounces, + fullscreenQuad: fullscreenQuad, gl: gl, optionalExtensions: optionalExtensions, - fullscreenQuad: fullscreenQuad, - textureAllocator: textureAllocator, scene: scene, - bounces: bounces + textureAllocator: textureAllocator + }); + var reprojectShader = makeReprojectShader({ + fullscreenQuad: fullscreenQuad, + gl: gl, + maxReprojectedSamples: maxReprojectedSamples, + textureAllocator: textureAllocator }); var toneMapShader = makeToneMapShader({ + fullscreenQuad: fullscreenQuad, gl: gl, optionalExtensions: optionalExtensions, - fullscreenQuad: fullscreenQuad, textureAllocator: textureAllocator, toneMappingParams: toneMappingParams }); @@ -2583,77 +2740,86 @@ noiseImage.onload = function () { rayTracingShader.setNoise(noiseImage); ready = true; - }; + }; // full resolution buffer representing the rendered scene with HDR lighting - var useLinearFiltering = optionalExtensions.OES_texture_float_linear; // full resolution buffer representing the rendered scene with HDR lighting var hdrBuffer = makeFramebuffer({ gl: gl, - renderTarget: { - storage: 'float' - } - }); // lower resolution buffer used for the first frame - + renderTarget: rayTracingRenderTargets + }); var hdrPreviewBuffer = makeFramebuffer({ gl: gl, - renderTarget: { - storage: 'float' - }, - useLinearFiltering: useLinearFiltering - }); // used to sample only a portion of the scene to the HDR Buffer to prevent the GPU from locking up from excessive computation + renderTarget: rayTracingRenderTargets + }); + var historyBuffer = makeFramebuffer({ + gl: gl, + renderTarget: rayTracingRenderTargets, + linearFiltering: true + }); + var reprojectBuffer = makeFramebuffer({ + gl: gl, + renderTarget: rayTracingRenderTargets + }); + var reprojectPreviewBuffer = makeFramebuffer({ + gl: gl, + renderTarget: rayTracingRenderTargets, + linearFiltering: true + }); + var lastToneMappedBuffer = reprojectPreviewBuffer; + var clearToBlack = new Float32Array([0, 0, 0, 0]); // used to sample only a portion of the scene to the HDR Buffer to prevent the GPU from locking up from excessive computation var tileRender = makeTileRender(gl); - var lastCamera = new LensCamera(); // how many samples to render with uniform noise before switching to stratified noise + var lastCamera = new THREE$1.PerspectiveCamera(); // how many samples to render with uniform noise before switching to stratified noise var numUniformSamples = 6; // how many partitions of stratified noise should be created // higher number results in faster convergence over time, but with lower quality initial samples var strataCount = 6; - var sampleCount = 0; + var sampleCount = 1; var sampleRenderedCallback = function sampleRenderedCallback() {}; - function clear() { - hdrBuffer.bind(); - gl.clear(gl.COLOR_BUFFER_BIT); - hdrBuffer.unbind(); - sampleCount = 0; + function initFirstSample() { + sampleCount = 1; tileRender.reset(); } - function initFirstSample(camera) { - lastCamera.copy(camera); - rayTracingShader.setCamera(camera); - rayTracingShader.useStratifiedSampling(false); - clear(); - } - function setPreviewBufferDimensions() { - var aspectRatio = hdrBuffer.width / hdrBuffer.height; - var desiredTimeForPreview = 16; // 60 fps - + var desiredTimeForPreview = 10; var numPixelsForPreview = desiredTimeForPreview / tileRender.getTimePerPixel(); - var previewWidth = clamp(Math.sqrt(numPixelsForPreview * aspectRatio), 1, hdrBuffer.width); - var previewHeight = clamp(previewWidth / aspectRatio, 1, hdrBuffer.height); + var aspectRatio = hdrBuffer.width / hdrBuffer.height; + var previewWidth = Math.round(clamp(Math.sqrt(numPixelsForPreview * aspectRatio), 1, hdrBuffer.width)); + var previewHeight = Math.round(clamp(previewWidth / aspectRatio, 1, hdrBuffer.height)); + var diff = Math.abs(previewWidth - hdrPreviewBuffer.width) / previewWidth; - if (previewWidth !== hdrPreviewBuffer.width) { + if (diff > 0.05) { + // don't bother resizing if the buffer size is only slightly different hdrPreviewBuffer.setSize(previewWidth, previewHeight); + reprojectPreviewBuffer.setSize(previewWidth, previewHeight); + historyBuffer.setSize(previewWidth, previewHeight); } } - function camerasEqual(cam1, cam2) { - return numberArraysEqual(cam1.matrixWorld.elements, cam2.matrixWorld.elements) && cam1.aspect === cam2.aspect && cam1.fov === cam2.fov && cam1.focus === cam2.focus && cam1.aperture === cam2.aperture; + function areCamerasEqual(cam1, cam2) { + return numberArraysEqual(cam1.matrixWorld.elements, cam2.matrixWorld.elements) && cam1.aspect === cam2.aspect && cam1.fov === cam2.fov && cam1.focus === cam2.focus; + } + + function clearBuffer(buffer) { + buffer.bind(); + gl.clear(gl.COLOR_BUFFER_BIT); + buffer.unbind(); } function addSampleToBuffer(buffer) { + buffer.bind(); gl.blendEquation(gl.FUNC_ADD); gl.blendFunc(gl.ONE, gl.ONE); gl.enable(gl.BLEND); - buffer.bind(); + gl.clearBufferfv(gl.COLOR, rayTracingRenderTargets.location.position, clearToBlack); gl.viewport(0, 0, buffer.width, buffer.height); rayTracingShader.draw(); - buffer.unbind(); gl.disable(gl.BLEND); + buffer.unbind(); } function newSampleToBuffer(buffer) { @@ -2663,31 +2829,27 @@ buffer.unbind(); } - function renderPreview() { - newSampleToBuffer(hdrPreviewBuffer); + function toneMapToScreen(buffer) { gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight); - toneMapShader.draw({ - texture: hdrPreviewBuffer.texture - }); + toneMapShader.draw(buffer.texture); + lastToneMappedBuffer = buffer; } - function renderTile(x, y, width, height) { + function renderTile(buffer, x, y, width, height) { gl.scissor(x, y, width, height); gl.enable(gl.SCISSOR_TEST); - addSampleToBuffer(hdrBuffer); + addSampleToBuffer(buffer); gl.disable(gl.SCISSOR_TEST); } - function hdrBufferToScreen() { - gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight); - toneMapShader.draw({ - texture: hdrBuffer.texture - }); - } + function updateSeed(width, height) { + rayTracingShader.setSize(width, height); + var jitterX = (Math.random() - 0.5) / width; + var jitterY = (Math.random() - 0.5) / height; + rayTracingShader.setJitter(jitterX, jitterY); + reprojectShader.setJitter(jitterX, jitterY); - function updateSeed() { - if (sampleCount === 2) { - rayTracingShader.useStratifiedSampling(true); + if (sampleCount === 1) { rayTracingShader.setStrataCount(1); } else if (sampleCount === numUniformSamples) { rayTracingShader.setStrataCount(strataCount); @@ -2699,10 +2861,29 @@ function drawTile(camera) { if (!ready) { return; - } else if (!camerasEqual(camera, lastCamera)) { - initFirstSample(camera); + } + + if (sampleCount === 1) { + reprojectShader.setPreviousCamera(lastCamera); + } + + if (!areCamerasEqual(camera, lastCamera)) { + initFirstSample(); setPreviewBufferDimensions(); - renderPreview(); + rayTracingShader.setCamera(camera); + updateSeed(hdrPreviewBuffer.width, hdrPreviewBuffer.height); + newSampleToBuffer(hdrPreviewBuffer); + reprojectShader.setBlendAmount(reprojectDecay); + var temp = historyBuffer; + historyBuffer = reprojectPreviewBuffer; + reprojectPreviewBuffer = temp; + reprojectPreviewBuffer.bind(); + gl.viewport(0, 0, reprojectPreviewBuffer.width, reprojectPreviewBuffer.height); + reprojectShader.draw(hdrPreviewBuffer.texture, lastToneMappedBuffer.texture); + reprojectPreviewBuffer.unbind(); + toneMapToScreen(reprojectPreviewBuffer); + clearBuffer(hdrBuffer); + lastCamera.copy(camera); } else { var _tileRender$nextTile = tileRender.nextTile(), x = _tileRender$nextTile.x, @@ -2714,73 +2895,85 @@ if (isFirstTile) { sampleCount++; - updateSeed(); + updateSeed(hdrBuffer.width, hdrBuffer.height); } - renderTile(x, y, tileWidth, tileHeight); + renderTile(hdrBuffer, x, y, tileWidth, tileHeight); if (isLastTile) { - hdrBufferToScreen(); + var blendAmount = clamp(1.0 - sampleCount / maxReprojectedSamples, 0, 1); + blendAmount *= blendAmount; + + if (blendAmount > 0.0) { + reprojectShader.setBlendAmount(blendAmount); + reprojectBuffer.bind(); + gl.viewport(0, 0, reprojectBuffer.width, reprojectBuffer.height); + reprojectShader.draw(hdrBuffer.texture, reprojectPreviewBuffer.texture); + reprojectBuffer.unbind(); + toneMapToScreen(reprojectBuffer); + } else { + toneMapToScreen(hdrBuffer); + } + sampleRenderedCallback(sampleCount); } } - } + } // debug draw call to measure performance + // use full resolution buffers every frame + // reproject every frame - function drawOffscreenTile(camera) { + + function drawFull(camera) { if (!ready) { return; - } else if (!camerasEqual(camera, lastCamera)) { - initFirstSample(camera); } - var _tileRender$nextTile2 = tileRender.nextTile(), - x = _tileRender$nextTile2.x, - y = _tileRender$nextTile2.y, - tileWidth = _tileRender$nextTile2.tileWidth, - tileHeight = _tileRender$nextTile2.tileHeight, - isFirstTile = _tileRender$nextTile2.isFirstTile, - isLastTile = _tileRender$nextTile2.isLastTile; + if (sampleCount === 1) { + reprojectShader.setPreviousCamera(lastCamera); + } - if (isFirstTile) { + if (!areCamerasEqual(camera, lastCamera)) { + sampleCount = 1; + rayTracingShader.setCamera(camera); + clearBuffer(hdrBuffer); + lastCamera.copy(camera); + } else { sampleCount++; - updateSeed(); } - renderTile(x, y, tileWidth, tileHeight); - - if (isLastTile) { - sampleRenderedCallback(sampleCount); - } - } + updateSeed(hdrBuffer.width, hdrBuffer.height); + addSampleToBuffer(hdrBuffer); + var blendAmount = clamp(1.0 - sampleCount / maxReprojectedSamples, 0, 1); + blendAmount *= blendAmount; + reprojectShader.setBlendAmount(blendAmount); - function drawFull(camera) { - if (!ready) { - return; - } else if (!camerasEqual(camera, lastCamera)) { - initFirstSample(camera); + if (historyBuffer.width !== hdrBuffer.width) { + historyBuffer.setSize(hdrBuffer.width, hdrBuffer.height); } - sampleCount++; - updateSeed(); - addSampleToBuffer(hdrBuffer); - hdrBufferToScreen(); + var temp = historyBuffer; + historyBuffer = reprojectBuffer; + reprojectBuffer = temp; + reprojectBuffer.bind(); + gl.viewport(0, 0, reprojectBuffer.width, reprojectBuffer.height); + reprojectShader.draw(hdrBuffer.texture, historyBuffer.texture); + reprojectBuffer.unbind(); + toneMapToScreen(reprojectBuffer); } - function setSize(width, height) { - rayTracingShader.setSize(width, height); - hdrBuffer.setSize(width, height); - tileRender.setSize(width, height); - clear(); + function setSize(w, h) { + rayTracingShader.setSize(w, h); + tileRender.setSize(w, h); + hdrBuffer.setSize(w, h); + reprojectBuffer.setSize(w, h); + initFirstSample(); } return { drawTile: drawTile, - drawOffscreenTile: drawOffscreenTile, drawFull: drawFull, restartTimer: tileRender.restartTimer, - setRenderTime: tileRender.setRenderTime, setSize: setSize, - hdrBufferToScreen: hdrBufferToScreen, getTotalSamplesRendered: function getTotalSamplesRendered() { return sampleCount; }, @@ -2796,7 +2989,8 @@ }; } - var glRequiredExtensions = ['EXT_color_buffer_float']; + var glRequiredExtensions = ['EXT_color_buffer_float', // enables rendering to float buffers + 'EXT_float_blend']; var glOptionalExtensions = ['OES_texture_float_linear']; function RayTracingRenderer() { var params = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; @@ -2813,7 +3007,6 @@ var optionalExtensions = loadExtensions(gl, glOptionalExtensions); var pipeline = null; var size = new THREE$1.Vector2(); - var renderTime = 22; var pixelRatio = 1; var module = { bounces: 3, @@ -2850,7 +3043,6 @@ } }; - module.setRenderTime(renderTime); module.setSize(size.width, size.height); module.needsUpdate = false; } @@ -2898,18 +3090,6 @@ return pixelRatio; }; - module.setRenderTime = function (time) { - renderTime = time; - - if (pipeline) { - pipeline.setRenderTime(time); - } - }; - - module.getRenderTime = function () { - return renderTime; - }; - module.getTotalSamplesRendered = function () { if (pipeline) { return pipeline.getTotalSamplesRendered(); diff --git a/build/RayTracingRenderer.js b/build/RayTracingRenderer.js index 08648c6..ea2e619 100644 --- a/build/RayTracingRenderer.js +++ b/build/RayTracingRenderer.js @@ -256,2056 +256,2158 @@ void main() { }; } - // Manually performs linear filtering if the extension OES_texture_float_linear is not supported - - function textureLinear(defines) { - return ` - - vec4 textureLinear(sampler2D map, vec2 uv) { - #ifdef OES_texture_float_linear - return texture(map, uv); - #else - vec2 size = vec2(textureSize(map, 0)); - vec2 texelSize = 1.0 / size; + // Reorders the elements in the range [first, last) in such a way that + // all elements for which the comparator c returns true + // precede the elements for which comparator c returns false. + function partition(array, compare, left = 0, right = array.length) { + while (left !== right) { + while (compare(array[left])) { + left++; + if (left === right) { + return left; + } + } + do { + right--; + if (left === right) { + return left; + } + } while (!compare(array[right])); - uv = uv * size - 0.5; - vec2 f = fract(uv); - uv = floor(uv) + 0.5; + swap(array, left, right); + left++; + } - vec4 s1 = texture(map, (uv + vec2(0, 0)) * texelSize); - vec4 s2 = texture(map, (uv + vec2(1, 0)) * texelSize); - vec4 s3 = texture(map, (uv + vec2(0, 1)) * texelSize); - vec4 s4 = texture(map, (uv + vec2(1, 1)) * texelSize); + return left; + } - return mix(mix(s1, s2, f.x), mix(s3, s4, f.x), f.y); - #endif + // nth_element is a partial sorting algorithm that rearranges elements in [first, last) such that: + // The element pointed at by nth is changed to whatever element would occur in that position if [first, last) were sorted. + // All of the elements before this new nth element compare to true with elements after the nth element + function nthElement(array, compare, left = 0, right = array.length, k = Math.floor((left + right) / 2)) { + for (let i = left; i <= k; i++) { + let minIndex = i; + let minValue = array[i]; + for (let j = i + 1; j < right; j++) { + if (!compare(minValue, array[j])) { + minIndex = j; + minValue = array[j]; + swap(array, i, minIndex); + } + } + } } -`; + + function swap(array, a, b) { + const x = array[b]; + array[b] = array[a]; + array[a] = x; } - function intersect(defines) { - return ` + // Create a bounding volume hierarchy of scene geometry -uniform highp isampler2D indices; -uniform sampler2D positions; -uniform sampler2D normals; -uniform sampler2D uvs; -uniform sampler2D bvh; + const size = new THREE$1.Vector3(); -uniform Materials { - vec4 colorAndMaterialType[NUM_MATERIALS]; - vec4 roughnessMetalnessNormalScale[NUM_MATERIALS]; + function bvhAccel(geometry, materialIndices) { + const primitiveInfo = makePrimitiveInfo(geometry, materialIndices); + const node = recursiveBuild(primitiveInfo, 0, primitiveInfo.length); - #if defined(NUM_DIFFUSE_MAPS) || defined(NUM_NORMAL_MAPS) || defined(NUM_PBR_MAPS) - ivec4 diffuseNormalRoughnessMetalnessMapIndex[NUM_MATERIALS]; - #endif + return node; + } - #if defined(NUM_DIFFUSE_MAPS) || defined(NUM_NORMAL_MAPS) - vec4 diffuseNormalMapSize[${Math.max(defines.NUM_DIFFUSE_MAPS, defines.NUM_NORMAL_MAPS)}]; - #endif + function flattenBvh(bvh) { + const flat = []; + const isBounds = []; - #if defined(NUM_PBR_MAPS) - vec2 pbrMapSize[NUM_PBR_MAPS]; - #endif -} materials; + const splitAxisMap = { + x: 0, + y: 1, + z: 2 + }; -#ifdef NUM_DIFFUSE_MAPS - uniform mediump sampler2DArray diffuseMap; -#endif + let maxDepth = 1; + const traverse = (node, depth = 1) => { -#ifdef NUM_NORMAL_MAPS - uniform mediump sampler2DArray normalMap; -#endif + maxDepth = Math.max(depth, maxDepth); -#ifdef NUM_PBR_MAPS - uniform mediump sampler2DArray pbrMap; -#endif + if (node.primitives) { + for (let i = 0; i < node.primitives.length; i++) { + const p = node.primitives[i]; + flat.push( + p.indices[0], p.indices[1], p.indices[2], node.primitives.length, + p.faceNormal.x, p.faceNormal.y, p.faceNormal.z, p.materialIndex + ); + isBounds.push(false); + } + } else { + const bounds = node.bounds; -struct Triangle { - vec3 p0; - vec3 p1; - vec3 p2; -}; + flat.push( + bounds.min.x, bounds.min.y, bounds.min.z, splitAxisMap[node.splitAxis], + bounds.max.x, bounds.max.y, bounds.max.z, null // pointer to second shild + ); -void surfaceInteractionFromIntersection(inout SurfaceInteraction si, Triangle tri, vec3 barycentric, ivec3 index, vec3 faceNormal, int materialIndex) { - si.hit = true; - si.faceNormal = faceNormal; - si.position = barycentric.x * tri.p0 + barycentric.y * tri.p1 + barycentric.z * tri.p2; - ivec2 i0 = unpackTexel(index.x, VERTEX_COLUMNS); - ivec2 i1 = unpackTexel(index.y, VERTEX_COLUMNS); - ivec2 i2 = unpackTexel(index.z, VERTEX_COLUMNS); + const i = flat.length - 1; + isBounds.push(true); - vec3 n0 = texelFetch(normals, i0, 0).xyz; - vec3 n1 = texelFetch(normals, i1, 0).xyz; - vec3 n2 = texelFetch(normals, i2, 0).xyz; - si.normal = normalize(barycentric.x * n0 + barycentric.y * n1 + barycentric.z * n2); + traverse(node.child0, depth + 1); + flat[i] = flat.length / 4; // pointer to second child + traverse(node.child1, depth + 1); + } + }; - si.color = materials.colorAndMaterialType[materialIndex].xyz; - si.roughness = materials.roughnessMetalnessNormalScale[materialIndex].x; - si.metalness = materials.roughnessMetalnessNormalScale[materialIndex].y; + traverse(bvh); - si.materialType = int(materials.colorAndMaterialType[materialIndex].w); + const buffer = new ArrayBuffer(4 * flat.length); + const floatView = new Float32Array(buffer); + const intView = new Int32Array(buffer); - #if defined(NUM_DIFFUSE_MAPS) || defined(NUM_NORMAL_MAPS) || defined(NUM_PBR_MAPS) - vec2 uv0 = texelFetch(uvs, i0, 0).xy; - vec2 uv1 = texelFetch(uvs, i1, 0).xy; - vec2 uv2 = texelFetch(uvs, i2, 0).xy; - vec2 uv = fract(barycentric.x * uv0 + barycentric.y * uv1 + barycentric.z * uv2); - #endif + for (let i = 0; i < isBounds.length; i++) { + let k = 8 * i; - #ifdef NUM_DIFFUSE_MAPS - int diffuseMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].x; - if (diffuseMapIndex >= 0) { - si.color *= texture(diffuseMap, vec3(uv * materials.diffuseNormalMapSize[diffuseMapIndex].xy, diffuseMapIndex)).rgb; + if (isBounds[i]) { + floatView[k] = flat[k]; + floatView[k + 1] = flat[k + 1]; + floatView[k + 2] = flat[k + 2]; + intView[k + 3] = flat[k + 3]; + } else { + intView[k] = flat[k]; + intView[k + 1] = flat[k + 1]; + intView[k + 2] = flat[k + 2]; + intView[k + 3] = -flat[k + 3]; // negative signals to shader that this node is a triangle + } + + floatView[k + 4] = flat[k + 4]; + floatView[k + 5] = flat[k + 5]; + floatView[k + 6] = flat[k + 6]; + intView[k + 7] = flat[k + 7]; } - #endif - #ifdef NUM_NORMAL_MAPS - int normalMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].y; - if (normalMapIndex >= 0) { - vec2 duv02 = uv0 - uv2; - vec2 duv12 = uv1 - uv2; - vec3 dp02 = tri.p0 - tri.p2; - vec3 dp12 = tri.p1 - tri.p2; + return { + maxDepth, + count: flat.length / 4, + buffer: floatView + }; + } - // Method One - // http://www.pbr-book.org/3ed-2018/Shapes/Triangle_Meshes.html#fragment-Computetrianglepartialderivatives-0 - // Compute tangent vectors relative to the face normal. These vectors won't necessarily be orthogonal to the smoothed normal - // This means the TBN matrix won't be orthogonal which is technically incorrect. - // This is Three.js's method (https://github.com/mrdoob/three.js/blob/dev/src/renderers/shaders/ShaderChunk/normalmap_pars_fragment.glsl.js) - // -------------- - // float scale = sign(duv02.x * duv12.y - duv02.y * duv12.x); - // vec3 dpdu = normalize((duv12.y * dp02 - duv02.y * dp12) * scale); - // vec3 dpdv = normalize((-duv12.x * dp02 + duv02.x * dp12) * scale); + function makePrimitiveInfo(geometry, materialIndices) { + const primitiveInfo = []; + const indices = geometry.getIndex().array; + const position = geometry.getAttribute('position'); + const v0 = new THREE$1.Vector3(); + const v1 = new THREE$1.Vector3(); + const v2 = new THREE$1.Vector3(); + const e0 = new THREE$1.Vector3(); + const e1 = new THREE$1.Vector3(); - // Method Two - // Compute tangent vectors as in Method One but apply Gram-Schmidt process to make vectors orthogonal to smooth normal - // This might inadvertently flip coordinate space orientation - // -------------- - // float scale = sign(duv02.x * duv12.y - duv02.y * duv12.x); - // vec3 dpdu = normalize((duv12.y * dp02 - duv02.y * dp12) * scale); - // dpdu = (dpdu - dot(dpdu, si.normal) * si.normal); // Gram-Schmidt process - // vec3 dpdv = cross(si.normal, dpdu) * scale; + for (let i = 0; i < indices.length; i += 3) { + const bounds = new THREE$1.Box3(); - // Method Three - // http://www.thetenthplanet.de/archives/1180 - // Compute co-tangent and co-bitangent vectors - // These vectors are orthongal and maintain a consistent coordinate space - // -------------- - vec3 dp12perp = cross(dp12, si.normal); - vec3 dp02perp = cross(si.normal, dp02); - vec3 dpdu = dp12perp * duv02.x + dp02perp * duv12.x; - vec3 dpdv = dp12perp * duv02.y + dp02perp * duv12.y; - float invmax = inversesqrt(max(dot(dpdu, dpdu), dot(dpdv, dpdv))); - dpdu *= invmax; - dpdv *= invmax; + v0.fromBufferAttribute(position, indices[i]); + v1.fromBufferAttribute(position, indices[i + 1]); + v2.fromBufferAttribute(position, indices[i + 2]); + e0.subVectors(v2, v0); + e1.subVectors(v1, v0); - vec3 n = 2.0 * texture(normalMap, vec3(uv * materials.diffuseNormalMapSize[normalMapIndex].zw, normalMapIndex)).rgb - 1.0; - n.xy *= materials.roughnessMetalnessNormalScale[materialIndex].zw; + bounds.expandByPoint(v0); + bounds.expandByPoint(v1); + bounds.expandByPoint(v2); - mat3 tbn = mat3(dpdu, dpdv, si.normal); + const info = { + bounds: bounds, + center: bounds.getCenter(new THREE$1.Vector3()), + indices: [indices[i], indices[i + 1], indices[i + 2]], + faceNormal: new THREE$1.Vector3().crossVectors(e1, e0).normalize(), + materialIndex: materialIndices[i / 3] + }; - si.normal = normalize(tbn * n); + primitiveInfo.push(info); } - #endif - #ifdef NUM_PBR_MAPS - int roughnessMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].z; - int metalnessMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].w; - if (roughnessMapIndex >= 0) { - si.roughness *= texture(pbrMap, vec3(uv * materials.pbrMapSize[roughnessMapIndex].xy, roughnessMapIndex)).g; - } - if (metalnessMapIndex >= 0) { - si.metalness *= texture(pbrMap, vec3(uv * materials.pbrMapSize[metalnessMapIndex].xy, metalnessMapIndex)).b; + return primitiveInfo; + } + + function recursiveBuild(primitiveInfo, start, end) { + const bounds = new THREE$1.Box3(); + for (let i = start; i < end; i++) { + bounds.union(primitiveInfo[i].bounds); } - #endif -} -struct TriangleIntersect { - float t; - vec3 barycentric; -}; + const nPrimitives = end - start; -// Triangle-ray intersection -// Faster than the classic Möller–Trumbore intersection algorithm -// http://www.pbr-book.org/3ed-2018/Shapes/Triangle_Meshes.html#TriangleIntersection -TriangleIntersect intersectTriangle(Ray r, Triangle tri, int maxDim, vec3 shear) { - TriangleIntersect ti; - vec3 d = r.d; + if (nPrimitives === 1) { + return makeLeafNode(primitiveInfo.slice(start, end), bounds); + } else { + const centroidBounds = new THREE$1.Box3(); + for (let i = start; i < end; i++) { + centroidBounds.expandByPoint(primitiveInfo[i].center); + } + const dim = maximumExtent(centroidBounds); + + let mid = Math.floor((start + end) / 2); + + // middle split method + // const dimMid = (centroidBounds.max[dim] + centroidBounds.min[dim]) / 2; + // mid = partition(primitiveInfo, p => p.center[dim] < dimMid, start, end); + + // if (mid === start || mid === end) { + // mid = Math.floor((start + end) / 2); + // nthElement(primitiveInfo, (a, b) => a.center[dim] < b.center[dim], start, end, mid); + // } + + // surface area heuristic method + if (nPrimitives <= 4) { + nthElement(primitiveInfo, (a, b) => a.center[dim] < b.center[dim], start, end, mid); + } else { + const buckets = []; + for (let i = 0; i < 12; i++) { + buckets.push({ + bounds: new THREE$1.Box3(), + count: 0, + }); + } + + for (let i = start; i < end; i++) { + let b = Math.floor(buckets.length * boxOffset(centroidBounds, dim, primitiveInfo[i].center)); + if (b === buckets.length) { + b = buckets.length - 1; + } + buckets[b].count++; + buckets[b].bounds.union(primitiveInfo[i].bounds); + } + + const cost = []; + + for (let i = 0; i < buckets.length - 1; i++) { + const b0 = new THREE$1.Box3(); + const b1 = new THREE$1.Box3(); + let count0 = 0; + let count1 = 0; + for (let j = 0; j <= i; j++) { + b0.union(buckets[j].bounds); + count0 += buckets[j].count; + } + for (let j = i + 1; j < buckets.length; j++) { + b1.union(buckets[j].bounds); + count1 += buckets[j].count; + } + cost.push(0.1 + (count0 * surfaceArea(b0) + count1 * surfaceArea(b1)) / surfaceArea(bounds)); + } + + let minCost = cost[0]; + let minCostSplitBucket = 0; + for (let i = 1; i < cost.length; i++) { + if (cost[i] < minCost) { + minCost = cost[i]; + minCostSplitBucket = i; + } + } - // translate vertices based on ray origin - vec3 p0t = tri.p0 - r.o; - vec3 p1t = tri.p1 - r.o; - vec3 p2t = tri.p2 - r.o; + mid = partition(primitiveInfo, p => { + let b = Math.floor(buckets.length * boxOffset(centroidBounds, dim, p.center)); + if (b === buckets.length) { + b = buckets.length - 1; + } + return b <= minCostSplitBucket; + }, start, end); + } - // permute components of triangle vertices - if (maxDim == 0) { - p0t = p0t.yzx; - p1t = p1t.yzx; - p2t = p2t.yzx; - } else if (maxDim == 1) { - p0t = p0t.zxy; - p1t = p1t.zxy; - p2t = p2t.zxy; + return makeInteriorNode( + dim, + recursiveBuild(primitiveInfo, start, mid), + recursiveBuild(primitiveInfo, mid, end), + ); + } } - // apply shear transformation to translated vertex positions - p0t.xy += shear.xy * p0t.z; - p1t.xy += shear.xy * p1t.z; - p2t.xy += shear.xy * p2t.z; - - // compute edge function coefficients - vec3 e = vec3( - p1t.x * p2t.y - p1t.y * p2t.x, - p2t.x * p0t.y - p2t.y * p0t.x, - p0t.x * p1t.y - p0t.y * p1t.x - ); + function makeLeafNode(primitives, bounds) { + return { + primitives, + bounds + }; + } - // check if intersection is inside triangle - if (any(lessThan(e, vec3(0))) && any(greaterThan(e, vec3(0)))) { - return ti; + function makeInteriorNode(splitAxis, child0, child1) { + return { + child0, + child1, + bounds: new THREE$1.Box3().union(child0.bounds).union(child1.bounds), + splitAxis, + }; } - float det = e.x + e.y + e.z; + function maximumExtent(box3) { + box3.getSize(size); + if (size.x > size.z) { + return size.x > size.y ? 'x' : 'y'; + } else { + return size.z > size.y ? 'z' : 'y'; + } + } - // not needed? - // if (det == 0.) { - // return ti; - // } + function boxOffset(box3, dim, v) { + let offset = v[dim] - box3.min[dim]; - p0t.z *= shear.z; - p1t.z *= shear.z; - p2t.z *= shear.z; - float tScaled = (e.x * p0t.z + e.y * p1t.z + e.z * p2t.z); + if (box3.max[dim] > box3.min[dim]){ + offset /= box3.max[dim] - box3.min[dim]; + } - // not needed? - // if (sign(det) != sign(tScaled)) { - // return ti; - // } + return offset; + } - // check if closer intersection already exists - if (abs(tScaled) > abs(r.tMax * det)) { - return ti; + function surfaceArea(box3) { + box3.getSize(size); + return 2 * (size.x * size.z + size.x * size.y + size.z * size.y); } - float invDet = 1. / det; - ti.t = tScaled * invDet; - ti.barycentric = e * invDet; + // Convert image data from the RGBE format to a 32-bit floating point format + // See https://www.cg.tuwien.ac.at/research/theses/matkovic/node84.html for a description of the RGBE format + // Optional multiplier argument for performance optimization + function rgbeToFloat(buffer, intensity = 1) { + const texels = buffer.length / 4; + const floatBuffer = new Float32Array(texels * 3); - return ti; -} + const expTable = []; + for (let i = 0; i < 255; i++) { + expTable[i] = intensity * Math.pow(2, i - 128) / 255; + } -struct Box { - vec3 min; - vec3 max; -}; + for (let i = 0; i < texels; i++) { -// Branchless ray/box intersection -// https://tavianator.com/fast-branchless-raybounding-box-intersections/ -float intersectBox(Ray r, Box b) { - vec3 tBot = (b.min - r.o) * r.invD; - vec3 tTop = (b.max - r.o) * r.invD; - vec3 tNear = min(tBot, tTop); - vec3 tFar = max(tBot, tTop); - float t0 = max(tNear.x, max(tNear.y, tNear.z)); - float t1 = min(tFar.x, min(tFar.y, tFar.z)); + const r = buffer[4 * i]; + const g = buffer[4 * i + 1]; + const b = buffer[4 * i + 2]; + const a = buffer[4 * i + 3]; + const e = expTable[a]; - return (t0 > t1 || t0 > r.tMax) ? -1.0 : (t0 > 0.0 ? t0 : t1); -} + floatBuffer[3 * i] = r * e; + floatBuffer[3 * i + 1] = g * e; + floatBuffer[3 * i + 2] = b * e; + } -int maxDimension(vec3 v) { - return v.x > v.y ? (v.x > v.z ? 0 : 2) : (v.y > v.z ? 1 : 2); -} + return floatBuffer; + } -// Traverse BVH, find closest triangle intersection, and return surface information -SurfaceInteraction intersectScene(inout Ray ray) { - SurfaceInteraction si; + function clamp(x, min, max) { + return Math.min(Math.max(x, min), max); + } - int maxDim = maxDimension(abs(ray.d)); + function shuffle(arr) { + for (let i = arr.length - 1; i > 0; i--) { + const j = Math.floor(Math.random() * (i + 1)); + const x = arr[i]; + arr[i] = arr[j]; + arr[j] = x; + } + return arr; + } - // Permute space so that the z dimension is the one where the absolute value of the ray's direction is largest. - // Then create a shear transformation that aligns ray direction with the +z axis - vec3 shear; - if (maxDim == 0) { - shear = vec3(-ray.d.y, -ray.d.z, 1.0) * ray.invD.x; - } else if (maxDim == 1) { - shear = vec3(-ray.d.z, -ray.d.x, 1.0) * ray.invD.y; - } else { - shear = vec3(-ray.d.x, -ray.d.y, 1.0) * ray.invD.z; + function numberArraysEqual(a, b, eps = 1e-4) { + for (let i = 0; i < a.length; i++) { + if (Math.abs(a[i] - b[i]) > eps) { + return false; + } + } + + return true; } - int nodesToVisit[STACK_SIZE]; - int stack = 0; + // Convert image data from the RGBE format to a 32-bit floating point format - nodesToVisit[0] = 0; + const DEFAULT_MAP_RESOLUTION = { + width: 2048, + height: 1024, + }; - while(stack >= 0) { - int i = nodesToVisit[stack--]; + // Tools for generating and modify env maps for lighting from scene component data - vec4 r1 = fetchData(bvh, i, BVH_COLUMNS); - vec4 r2 = fetchData(bvh, i + 1, BVH_COLUMNS); + function generateBackgroundMapFromSceneBackground(background) { + let backgroundImage; + + if (background.isColor) { + backgroundImage = generateSolidMap(1, 1, background); + } else if (background.encoding === THREE$1.RGBEEncoding) { + backgroundImage = { + width: background.image.width, + height: background.image.height, + data: background.image.data, + }; + backgroundImage.data = rgbeToFloat(backgroundImage.data); + } + return backgroundImage; + } - int splitAxisOrNumPrimitives = floatBitsToInt(r1.w); + function generateEnvMapFromSceneComponents(directionalLights, ambientLights, environmentLights) { + let envImage = initializeEnvMap(environmentLights); + ambientLights.forEach( light => { addAmbientLightToEnvMap(light, envImage); }); + directionalLights.forEach( light => { envImage.data = addDirectionalLightToEnvMap(light, envImage); }); - if (splitAxisOrNumPrimitives >= 0) { - // Intersection is a bounding box. Test for box intersection and keep traversing BVH - int splitAxis = splitAxisOrNumPrimitives; + return envImage; + } - Box bbox = Box(r1.xyz, r2.xyz); + function initializeEnvMap(environmentLights) { + let envImage; - if (intersectBox(ray, bbox) > 0.0) { - // traverse near node to ray first, and far node to ray last - if (ray.d[splitAxis] > 0.0) { - nodesToVisit[++stack] = floatBitsToInt(r2.w); - nodesToVisit[++stack] = i + 2; - } else { - nodesToVisit[++stack] = i + 2; - nodesToVisit[++stack] = floatBitsToInt(r2.w); - } - } + // Initialize map from environment light if present + if (environmentLights.length > 0) { + // TODO: support multiple environment lights (what if they have different resolutions?) + const environmentLight = environmentLights[0]; + envImage = { + width: environmentLight.map.image.width, + height: environmentLight.map.image.height, + data: environmentLight.map.image.data, + }; + envImage.data = rgbeToFloat(envImage.data, environmentLight.intensity); } else { - ivec3 index = floatBitsToInt(r1.xyz); - Triangle tri = Triangle( - fetchData(positions, index.x, VERTEX_COLUMNS).xyz, - fetchData(positions, index.y, VERTEX_COLUMNS).xyz, - fetchData(positions, index.z, VERTEX_COLUMNS).xyz - ); - TriangleIntersect hit = intersectTriangle(ray, tri, maxDim, shear); + // initialize blank map + envImage = generateSolidMap(DEFAULT_MAP_RESOLUTION.width, DEFAULT_MAP_RESOLUTION.height); + } - if (hit.t > 0.0) { - ray.tMax = hit.t; - int materialIndex = floatBitsToInt(r2.w); - vec3 faceNormal = r2.xyz; - surfaceInteractionFromIntersection(si, tri, hit.barycentric, index, faceNormal, materialIndex); - } + return envImage; + } + + function generateSolidMap(width, height, color, intensity) { + const texels = width * height; + const floatBuffer = new Float32Array(texels * 3); + if (color && color.isColor) { + setBufferToColor(floatBuffer, color, intensity); } + return { + width: width, + height: height, + data: floatBuffer, + }; } - // Values must be clamped outside of intersection loop. Clamping inside the loop produces incorrect numbers on some devices. - si.roughness = clamp(si.roughness, 0.03, 1.0); - si.metalness = clamp(si.metalness, 0.0, 1.0); + function setBufferToColor(buffer, color, intensity = 1) { + buffer.forEach(function(part, index) { + const component = index % 3; + if (component === 0) { + buffer[index] = color.r * intensity; + } + else if (component === 1) { + buffer[index] = color.g * intensity; + } + else if (component === 2) { + buffer[index] = color.b * intensity; + } + }); + return buffer; + } + + function addAmbientLightToEnvMap(light, image) { + const color = light.color; + image.data.forEach(function(part, index) { + const component = index % 3; + if (component === 0) { + image.data[index] += color.r * light.intensity; + } + else if (component === 1) { + image.data[index] += color.g * light.intensity; + } + else if (component === 2) { + image.data[index] += color.b * light.intensity; + } + }); + } - return si; -} + function addDirectionalLightToEnvMap(light, image) { + const sphericalCoords = new THREE$1.Spherical(); + const lightDirection = light.position.clone().sub(light.target.position); -bool intersectSceneShadow(inout Ray ray) { - int maxDim = maxDimension(abs(ray.d)); + sphericalCoords.setFromVector3(lightDirection); + sphericalCoords.theta = (Math.PI * 3 / 2) - sphericalCoords.theta; + sphericalCoords.makeSafe(); - // Permute space so that the z dimension is the one where the absolute value of the ray's direction is largest. - // Then create a shear transformation that aligns ray direction with the +z axis - vec3 shear; - if (maxDim == 0) { - shear = vec3(-ray.d.y, -ray.d.z, 1.0) * ray.invD.x; - } else if (maxDim == 1) { - shear = vec3(-ray.d.z, -ray.d.x, 1.0) * ray.invD.y; - } else { - shear = vec3(-ray.d.x, -ray.d.y, 1.0) * ray.invD.z; + return addLightAtCoordinates(light, image, sphericalCoords); } - int nodesToVisit[STACK_SIZE]; - int stack = 0; + // Perform modifications on env map to match input scene + function addLightAtCoordinates(light, image, originCoords) { + const floatBuffer = image.data; + const width = image.width; + const height = image.height; + const xTexels = floatBuffer.length / (3 * height); + const yTexels = floatBuffer.length / (3 * width); - nodesToVisit[0] = 0; + // default softness for standard directional lights is 0.01, i.e. a hard shadow + const softness = light.softness || 0.01; - while(stack >= 0) { - int i = nodesToVisit[stack--]; + // angle from center of light at which no more contributions are projected + const threshold = findThreshold(softness); - vec4 r1 = fetchData(bvh, i, BVH_COLUMNS); - vec4 r2 = fetchData(bvh, i + 1, BVH_COLUMNS); + // if too few texels are rejected by the threshold then the time to evaluate it is no longer worth it + const useThreshold = threshold < Math.PI / 5; - int splitAxisOrNumPrimitives = floatBitsToInt(r1.w); + // functional trick to keep the conditional check out of the main loop + const intensityFromAngleFunction = useThreshold ? getIntensityFromAngleDifferentialThresholded : getIntensityFromAngleDifferential; - if (splitAxisOrNumPrimitives >= 0) { - int splitAxis = splitAxisOrNumPrimitives; + let begunAddingContributions = false; + let currentCoords = new THREE$1.Spherical(); - Box bbox = Box(r1.xyz, r2.xyz); + // Iterates over each row from top to bottom + for (let i = 0; i < xTexels; i++) { - if (intersectBox(ray, bbox) > 0.0) { - if (ray.d[splitAxis] > 0.0) { - nodesToVisit[++stack] = floatBitsToInt(r2.w); - nodesToVisit[++stack] = i + 2; - } else { - nodesToVisit[++stack] = i + 2; - nodesToVisit[++stack] = floatBitsToInt(r2.w); + let encounteredInThisRow = false; + + // Iterates over each texel in row + for (let j = 0; j < yTexels; j++) { + const bufferIndex = j * width + i; + currentCoords = equirectangularToSpherical(i, j, width, height, currentCoords); + const falloff = intensityFromAngleFunction(originCoords, currentCoords, softness, threshold); + + if(falloff > 0) { + encounteredInThisRow = true; + begunAddingContributions = true; } + + const intensity = light.intensity * falloff; + + floatBuffer[bufferIndex * 3] += intensity * light.color.r; + floatBuffer[bufferIndex * 3 + 1] += intensity * light.color.g; + floatBuffer[bufferIndex * 3 + 2] += intensity * light.color.b; } - } else { - ivec3 index = floatBitsToInt(r1.xyz); - Triangle tri = Triangle( - fetchData(positions, index.x, VERTEX_COLUMNS).xyz, - fetchData(positions, index.y, VERTEX_COLUMNS).xyz, - fetchData(positions, index.z, VERTEX_COLUMNS).xyz - ); - if (intersectTriangle(ray, tri, maxDim, shear).t > 0.0) { - return true; + // First row to not add a contribution since adding began + // This means the entire light has been added and we can exit early + if(!encounteredInThisRow && begunAddingContributions) { + return floatBuffer; } } - } - return false; -} -`; + return floatBuffer; } - function random(defines) { - return ` + function findThreshold(softness) { + const step = Math.PI / 128; + const maxSteps = (2.0 * Math.PI) / step; -// Noise texture used to generate a different random number for each pixel. -// We use blue noise in particular, but any type of noise will work. -uniform sampler2D noise; + for (let i = 0; i < maxSteps; i++) { + const angle = i * step; + const falloff = getFalloffAtAngle(angle, softness); + if (falloff <= 0.0001) { + return angle; + } + } + } -uniform float stratifiedSamples[SAMPLING_DIMENSIONS]; -uniform float strataSize; -uniform float useStratifiedSampling; + function getIntensityFromAngleDifferentialThresholded(originCoords, currentCoords, softness, threshold) { + const deltaPhi = getAngleDelta(originCoords.phi, currentCoords.phi); + const deltaTheta = getAngleDelta(originCoords.theta, currentCoords.theta); -// Every time we call randomSample() in the shader, and for every call to render, -// we want that specific bit of the shader to fetch a sample from the same position in stratifiedSamples -// This allows us to use stratified sampling for each random variable in our path tracing -int sampleIndex = 0; + if(deltaTheta > threshold && deltaPhi > threshold) { + return 0; + } -const highp float maxUint = 1.0 / 4294967295.0; + const angle = angleBetweenSphericals(originCoords, currentCoords); + return getFalloffAtAngle(angle, softness); + } -float pixelSeed; -highp uint randState; - -// simple integer hashing function -// https://en.wikipedia.org/wiki/Xorshift -uint xorshift(uint x) { - x ^= x << 13u; - x ^= x >> 17u; - x ^= x << 5u; - return x; -} + function getIntensityFromAngleDifferential(originCoords, currentCoords, softness) { + const angle = angleBetweenSphericals(originCoords, currentCoords); + return getFalloffAtAngle(angle, softness); + } -void initRandom() { - vec2 noiseSize = vec2(textureSize(noise, 0)); + function getAngleDelta(angleA, angleB) { + const diff = Math.abs(angleA - angleB) % (2 * Math.PI); + return diff > Math.PI ? (2 * Math.PI - diff) : diff; + } - // tile the small noise texture across the entire screen - pixelSeed = texture(noise, vCoord / (pixelSize * noiseSize)).r; + const angleBetweenSphericals = function() { + const originVector = new THREE$1.Vector3(); + const currentVector = new THREE$1.Vector3(); - // white noise used if stratified sampling is disabled - // produces more balanced path tracing for 1 sample-per-pixel renders - randState = xorshift(xorshift(floatBitsToUint(vCoord.x)) * xorshift(floatBitsToUint(vCoord.y))); -} + return (originCoords, currentCoords) => { + originVector.setFromSpherical(originCoords); + currentVector.setFromSpherical(currentCoords); + return originVector.angleTo(currentVector); + }; + }(); -float randomSample() { - randState = xorshift(randState); + // TODO: possibly clean this up and optimize it + // + // This function was arrived at through experimentation, it provides good + // looking results with percieved softness that scale relatively linearly with + // the softness value in the 0 - 1 range + // + // For now it doesn't incur too much of a performance penalty because for most of our use cases (lights without too much softness) + // the threshold cutoff in getIntensityFromAngleDifferential stops us from running it too many times + function getFalloffAtAngle(angle, softness) { + const softnessCoefficient = Math.pow(2, 14.5 * Math.max(0.001, 1.0 - clamp(softness, 0.0, 1.0))); + const falloff = Math.pow(softnessCoefficient, 1.1) * Math.pow(8, -softnessCoefficient * Math.pow(angle, 1.8)); + return falloff; + } - float stratifiedSample = stratifiedSamples[sampleIndex++]; + function equirectangularToSpherical(x, y, width, height, target) { + target.phi = (Math.PI * y) / height; + target.theta = (2.0 * Math.PI * x) / width; + return target; + } - float random = mix( - float(randState) * maxUint, // white noise - fract((stratifiedSample + pixelSeed) * strataSize), // blue noise + stratified samples - useStratifiedSampling - ); + // Create a piecewise 2D cumulative distribution function of light intensity from an envmap + // http://www.pbr-book.org/3ed-2018/Monte_Carlo_Integration/2D_Sampling_with_Multidimensional_Transformations.html#Piecewise-Constant2DDistributions - // transform random number between [0, 1] to (0, 1) - return EPS + (1.0 - 2.0 * EPS) * random; -} + function envmapDistribution(image) { + const data = image.data; -vec2 randomSampleVec2() { - return vec2(randomSample(), randomSample()); -} -`; - } + const cdfImage = { + width: image.width + 2, + height: image.height + 1 + }; - // Sample the environment map using a cumulative distribution function as described in - // http://www.pbr-book.org/3ed-2018/Light_Transport_I_Surface_Reflection/Sampling_Light_Sources.html#InfiniteAreaLights + const cdf = makeTextureArray(cdfImage.width, cdfImage.height, 2); - function envmap(defines) { - return ` + for (let y = 0; y < image.height; y++) { + const sinTheta = Math.sin(Math.PI * (y + 0.5) / image.height); + for (let x = 0; x < image.width; x++) { + const i = 3 * (y * image.width + x); + let r = data[i]; + let g = data[i + 1]; + let b = data[i + 2]; + let luminance = 0.2126 * r + 0.7152 * g + 0.0722 * b; + luminance *= sinTheta; + cdf.set(x + 2, y, 0, cdf.get(x + 1, y, 0) + luminance / image.width); + cdf.set(x + 1, y, 1, luminance); + } -uniform sampler2D envmap; -uniform sampler2D envmapDistribution; + const rowIntegral = cdf.get(cdfImage.width - 1, y, 0); -vec2 cartesianToEquirect(vec3 pointOnSphere) { - float phi = mod(atan(-pointOnSphere.z, -pointOnSphere.x), TWOPI); - float theta = acos(pointOnSphere.y); - return vec2(phi * 0.5 * INVPI, theta * INVPI); -} + for (let x = 1; x < cdf.width; x++) { + cdf.set(x, y, 0, cdf.get(x, y, 0) / rowIntegral); + cdf.set(x, y, 1, cdf.get(x, y, 1) / rowIntegral); + } -float getEnvmapV(float u, out int vOffset, out float pdf) { - ivec2 size = textureSize(envmap, 0); + cdf.set(0, y + 1, 0, cdf.get(0, y, 0) + rowIntegral / image.height); + cdf.set(0, y, 1, rowIntegral); + } - int left = 0; - int right = size.y + 1; // cdf length is the length of the envmap + 1 - while (left < right) { - int mid = (left + right) >> 1; - float s = texelFetch(envmapDistribution, ivec2(0, mid), 0).x; - if (s <= u) { - left = mid + 1; - } else { - right = mid; + const integral = cdf.get(0, cdf.height - 1, 0); + + for (let y = 0; y < cdf.height; y++) { + cdf.set(0, y, 0, cdf.get(0, y, 0) / integral); + cdf.set(0, y, 1, cdf.get(0, y, 1) / integral); } + cdfImage.data = cdf.array; + + return cdfImage; } - vOffset = left - 1; - // x channel is cumulative distribution of envmap luminance - // y channel is partial probability density of envmap luminance - vec2 s0 = texelFetch(envmapDistribution, ivec2(0, vOffset), 0).xy; - vec2 s1 = texelFetch(envmapDistribution, ivec2(0, vOffset + 1), 0).xy; - pdf = s0.y; + function makeTextureArray(width, height, channels) { + const array = new Float32Array(channels * width * height); + + return { + set(x, y, channel, val) { + array[channels * (y * width + x) + channel] = val; + }, + get(x, y, channel) { + return array[channels * (y * width + x) + channel]; + }, + width, + height, + channels, + array + }; + } - return (float(vOffset) + (u - s0.x) / (s1.x - s0.x)) / float(size.y); -} + // Manually performs linear filtering if the extension OES_texture_float_linear is not supported -float getEnvmapU(float u, int vOffset, out float pdf) { - ivec2 size = textureSize(envmap, 0); + function textureLinear(defines) { + return ` - int left = 0; - int right = size.x + 1; // cdf length is the length of the envmap + 1 - while (left < right) { - int mid = (left + right) >> 1; - float s = texelFetch(envmapDistribution, ivec2(1 + mid, vOffset), 0).x; - if (s <= u) { - left = mid + 1; - } else { - right = mid; - } - } - int uOffset = left - 1; + vec4 textureLinear(sampler2D map, vec2 uv) { + #ifdef OES_texture_float_linear + return texture(map, uv); + #else + vec2 size = vec2(textureSize(map, 0)); + vec2 texelSize = 1.0 / size; - // x channel is cumulative distribution of envmap luminance - // y channel is partial probability density of envmap luminance - vec2 s0 = texelFetch(envmapDistribution, ivec2(1 + uOffset, vOffset), 0).xy; - vec2 s1 = texelFetch(envmapDistribution, ivec2(1 + uOffset + 1, vOffset), 0).xy; + uv = uv * size - 0.5; + vec2 f = fract(uv); + uv = floor(uv) + 0.5; - pdf = s0.y; + vec4 s1 = texture(map, (uv + vec2(0, 0)) * texelSize); + vec4 s2 = texture(map, (uv + vec2(1, 0)) * texelSize); + vec4 s3 = texture(map, (uv + vec2(0, 1)) * texelSize); + vec4 s4 = texture(map, (uv + vec2(1, 1)) * texelSize); - return (float(uOffset) + (u - s0.x) / (s1.x - s0.x)) / float(size.x); -} + return mix(mix(s1, s2, f.x), mix(s3, s4, f.x), f.y); + #endif + } +`; + } -// Perform two binary searches to find light direction. -vec3 sampleEnvmap(vec2 random, out vec2 uv, out float pdf) { - vec2 partialPdf; - int vOffset; + function intersect(defines) { + return ` - uv.y = getEnvmapV(random.x, vOffset, partialPdf.y); - uv.x = getEnvmapU(random.y, vOffset, partialPdf.x); +uniform highp isampler2D indices; +uniform sampler2D positions; +uniform sampler2D normals; +uniform sampler2D uvs; +uniform sampler2D bvh; - float phi = uv.x * TWOPI; - float theta = uv.y * PI; - float cosTheta = cos(theta); - float sinTheta = sin(theta); - float cosPhi = cos(phi); - float sinPhi = sin(phi); +uniform Materials { + vec4 colorAndMaterialType[NUM_MATERIALS]; + vec4 roughnessMetalnessNormalScale[NUM_MATERIALS]; - vec3 dir = vec3(-sinTheta * cosPhi, cosTheta, -sinTheta * sinPhi); + #if defined(NUM_DIFFUSE_MAPS) || defined(NUM_NORMAL_MAPS) || defined(NUM_PBR_MAPS) + ivec4 diffuseNormalRoughnessMetalnessMapIndex[NUM_MATERIALS]; + #endif - pdf = partialPdf.x * partialPdf.y * INVPI2 / (2.0 * sinTheta); + #if defined(NUM_DIFFUSE_MAPS) || defined(NUM_NORMAL_MAPS) + vec4 diffuseNormalMapSize[${Math.max(defines.NUM_DIFFUSE_MAPS, defines.NUM_NORMAL_MAPS)}]; + #endif - return dir; -} + #if defined(NUM_PBR_MAPS) + vec2 pbrMapSize[NUM_PBR_MAPS]; + #endif +} materials; -float envmapPdf(vec2 uv) { - vec2 size = vec2(textureSize(envmap, 0)); +#ifdef NUM_DIFFUSE_MAPS + uniform mediump sampler2DArray diffuseMap; +#endif - float sinTheta = sin(uv.y * PI); +#ifdef NUM_NORMAL_MAPS + uniform mediump sampler2DArray normalMap; +#endif - uv *= size; +#ifdef NUM_PBR_MAPS + uniform mediump sampler2DArray pbrMap; +#endif - float partialX = texelFetch(envmapDistribution, ivec2(1.0 + uv.x, uv.y), 0).y; - float partialY = texelFetch(envmapDistribution, ivec2(0, uv.y), 0).y; +struct Triangle { + vec3 p0; + vec3 p1; + vec3 p2; +}; - return partialX * partialY * INVPI2 / (2.0 * sinTheta); -} +void surfaceInteractionFromIntersection(inout SurfaceInteraction si, Triangle tri, vec3 barycentric, ivec3 index, vec3 faceNormal, int materialIndex) { + si.hit = true; + si.faceNormal = faceNormal; + si.position = barycentric.x * tri.p0 + barycentric.y * tri.p1 + barycentric.z * tri.p2; + ivec2 i0 = unpackTexel(index.x, VERTEX_COLUMNS); + ivec2 i1 = unpackTexel(index.y, VERTEX_COLUMNS); + ivec2 i2 = unpackTexel(index.z, VERTEX_COLUMNS); -vec3 sampleEnvmapFromDirection(vec3 d) { - vec2 uv = cartesianToEquirect(d); - return textureLinear(envmap, uv).rgb; -} + vec3 n0 = texelFetch(normals, i0, 0).xyz; + vec3 n1 = texelFetch(normals, i1, 0).xyz; + vec3 n2 = texelFetch(normals, i2, 0).xyz; + si.normal = normalize(barycentric.x * n0 + barycentric.y * n1 + barycentric.z * n2); -`; - } + si.color = materials.colorAndMaterialType[materialIndex].xyz; + si.roughness = materials.roughnessMetalnessNormalScale[materialIndex].x; + si.metalness = materials.roughnessMetalnessNormalScale[materialIndex].y; - function bsdf(defines) { - return ` + si.materialType = int(materials.colorAndMaterialType[materialIndex].w); -// Computes the exact value of the Fresnel factor -// https://seblagarde.wordpress.com/2013/04/29/memo-on-fresnel-equations/ -float fresnel(float cosTheta, float eta, float invEta) { - eta = cosTheta > 0.0 ? eta : invEta; - cosTheta = abs(cosTheta); + // TODO: meshId should be the actual mesh id instead of the material id, which can be shared amoung meshes. + // This will involve storing the mesh id AND the material id in the BVH texture + si.meshId = materialIndex + 1; // +1 so that the mesh id is never 0 - float gSquared = eta * eta + cosTheta * cosTheta - 1.0; + #if defined(NUM_DIFFUSE_MAPS) || defined(NUM_NORMAL_MAPS) || defined(NUM_PBR_MAPS) + vec2 uv0 = texelFetch(uvs, i0, 0).xy; + vec2 uv1 = texelFetch(uvs, i1, 0).xy; + vec2 uv2 = texelFetch(uvs, i2, 0).xy; + vec2 uv = fract(barycentric.x * uv0 + barycentric.y * uv1 + barycentric.z * uv2); + #endif - if (gSquared < 0.0) { - return 1.0; - } + #ifdef NUM_DIFFUSE_MAPS + int diffuseMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].x; + if (diffuseMapIndex >= 0) { + si.color *= texture(diffuseMap, vec3(uv * materials.diffuseNormalMapSize[diffuseMapIndex].xy, diffuseMapIndex)).rgb; + } + #endif - float g = sqrt(gSquared); + #ifdef NUM_NORMAL_MAPS + int normalMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].y; + if (normalMapIndex >= 0) { + vec2 duv02 = uv0 - uv2; + vec2 duv12 = uv1 - uv2; + vec3 dp02 = tri.p0 - tri.p2; + vec3 dp12 = tri.p1 - tri.p2; - float a = (g - cosTheta) / (g + cosTheta); - float b = (cosTheta * (g + cosTheta) - 1.0) / (cosTheta * (g - cosTheta) + 1.0); + // Method One + // http://www.pbr-book.org/3ed-2018/Shapes/Triangle_Meshes.html#fragment-Computetrianglepartialderivatives-0 + // Compute tangent vectors relative to the face normal. These vectors won't necessarily be orthogonal to the smoothed normal + // This means the TBN matrix won't be orthogonal which is technically incorrect. + // This is Three.js's method (https://github.com/mrdoob/three.js/blob/dev/src/renderers/shaders/ShaderChunk/normalmap_pars_fragment.glsl.js) + // -------------- + // float scale = sign(duv02.x * duv12.y - duv02.y * duv12.x); + // vec3 dpdu = normalize((duv12.y * dp02 - duv02.y * dp12) * scale); + // vec3 dpdv = normalize((-duv12.x * dp02 + duv02.x * dp12) * scale); - return 0.5 * a * a * (1.0 + b * b); -} + // Method Two + // Compute tangent vectors as in Method One but apply Gram-Schmidt process to make vectors orthogonal to smooth normal + // This might inadvertently flip coordinate space orientation + // -------------- + // float scale = sign(duv02.x * duv12.y - duv02.y * duv12.x); + // vec3 dpdu = normalize((duv12.y * dp02 - duv02.y * dp12) * scale); + // dpdu = (dpdu - dot(dpdu, si.normal) * si.normal); // Gram-Schmidt process + // vec3 dpdv = cross(si.normal, dpdu) * scale; -float fresnelSchlickWeight(float cosTheta) { - float w = 1.0 - cosTheta; - return (w * w) * (w * w) * w; -} + // Method Three + // http://www.thetenthplanet.de/archives/1180 + // Compute co-tangent and co-bitangent vectors + // These vectors are orthongal and maintain a consistent coordinate space + // -------------- + vec3 dp12perp = cross(dp12, si.normal); + vec3 dp02perp = cross(si.normal, dp02); + vec3 dpdu = dp12perp * duv02.x + dp02perp * duv12.x; + vec3 dpdv = dp12perp * duv02.y + dp02perp * duv12.y; + float invmax = inversesqrt(max(dot(dpdu, dpdu), dot(dpdv, dpdv))); + dpdu *= invmax; + dpdv *= invmax; -// Computes Schlick's approximation of the Fresnel factor -// Assumes ray is moving from a less dense to a more dense medium -float fresnelSchlick(float cosTheta, float r0) { - return mix(fresnelSchlickWeight(cosTheta), 1.0, r0); -} + vec3 n = 2.0 * texture(normalMap, vec3(uv * materials.diffuseNormalMapSize[normalMapIndex].zw, normalMapIndex)).rgb - 1.0; + n.xy *= materials.roughnessMetalnessNormalScale[materialIndex].zw; -// Computes Schlick's approximation of Fresnel factor -// Accounts for total internal reflection if ray is moving from a more dense to a less dense medium -float fresnelSchlickTIR(float cosTheta, float r0, float ni) { + mat3 tbn = mat3(dpdu, dpdv, si.normal); - // moving from a more dense to a less dense medium - if (cosTheta < 0.0) { - float inv_eta = ni; - float SinT2 = inv_eta * inv_eta * (1.0f - cosTheta * cosTheta); - if (SinT2 > 1.0) { - return 1.0; // total internal reflection + si.normal = normalize(tbn * n); } - cosTheta = sqrt(1.0f - SinT2); - } - - return mix(fresnelSchlickWeight(cosTheta), 1.0, r0); -} + #endif -float trowbridgeReitzD(float cosTheta, float alpha2) { - float e = cosTheta * cosTheta * (alpha2 - 1.0) + 1.0; - return alpha2 / (PI * e * e); + #ifdef NUM_PBR_MAPS + int roughnessMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].z; + int metalnessMapIndex = materials.diffuseNormalRoughnessMetalnessMapIndex[materialIndex].w; + if (roughnessMapIndex >= 0) { + si.roughness *= texture(pbrMap, vec3(uv * materials.pbrMapSize[roughnessMapIndex].xy, roughnessMapIndex)).g; + } + if (metalnessMapIndex >= 0) { + si.metalness *= texture(pbrMap, vec3(uv * materials.pbrMapSize[metalnessMapIndex].xy, metalnessMapIndex)).b; + } + #endif } -float trowbridgeReitzLambda(float cosTheta, float alpha2) { - float cos2Theta = cosTheta * cosTheta; - float tan2Theta = (1.0 - cos2Theta) / cos2Theta; - return 0.5 * (-1.0 + sqrt(1.0 + alpha2 * tan2Theta)); -} +struct TriangleIntersect { + float t; + vec3 barycentric; +}; -// An implementation of Disney's principled BRDF -// https://disney-animation.s3.amazonaws.com/library/s2012_pbs_disney_brdf_notes_v2.pdf -vec3 materialBrdf(SurfaceInteraction si, vec3 viewDir, vec3 lightDir, float cosThetaL, float diffuseWeight, out float pdf) { - vec3 halfVector = normalize(viewDir + lightDir); +// Triangle-ray intersection +// Faster than the classic Möller–Trumbore intersection algorithm +// http://www.pbr-book.org/3ed-2018/Shapes/Triangle_Meshes.html#TriangleIntersection +TriangleIntersect intersectTriangle(Ray r, Triangle tri, int maxDim, vec3 shear) { + TriangleIntersect ti; + vec3 d = r.d; - cosThetaL = abs(cosThetaL); - float cosThetaV = abs(dot(si.normal, viewDir)); - float cosThetaH = abs(dot(si.normal, halfVector)); - float cosThetaD = abs(dot(lightDir, halfVector)); + // translate vertices based on ray origin + vec3 p0t = tri.p0 - r.o; + vec3 p1t = tri.p1 - r.o; + vec3 p2t = tri.p2 - r.o; - float alpha2 = (si.roughness * si.roughness) * (si.roughness * si.roughness); + // permute components of triangle vertices + if (maxDim == 0) { + p0t = p0t.yzx; + p1t = p1t.yzx; + p2t = p2t.yzx; + } else if (maxDim == 1) { + p0t = p0t.zxy; + p1t = p1t.zxy; + p2t = p2t.zxy; + } - float F = fresnelSchlick(cosThetaD, mix(R0, 0.6, si.metalness)); - float D = trowbridgeReitzD(cosThetaH, alpha2); + // apply shear transformation to translated vertex positions + p0t.xy += shear.xy * p0t.z; + p1t.xy += shear.xy * p1t.z; + p2t.xy += shear.xy * p2t.z; - float roughnessRemapped = 0.5 + 0.5 * si.roughness; - float alpha2Remapped = (roughnessRemapped * roughnessRemapped) * (roughnessRemapped * roughnessRemapped); + // compute edge function coefficients + vec3 e = vec3( + p1t.x * p2t.y - p1t.y * p2t.x, + p2t.x * p0t.y - p2t.y * p0t.x, + p0t.x * p1t.y - p0t.y * p1t.x + ); - float G = 1.0 / (1.0 + trowbridgeReitzLambda(cosThetaV, alpha2Remapped) + trowbridgeReitzLambda(cosThetaL, alpha2Remapped)); + // check if intersection is inside triangle + if (any(lessThan(e, vec3(0))) && any(greaterThan(e, vec3(0)))) { + return ti; + } - float specular = F * D * G / (4.0 * cosThetaV * cosThetaL); - float specularPdf = D * cosThetaH / (4.0 * cosThetaD); + float det = e.x + e.y + e.z; - float f = -0.5 + 2.0 * cosThetaD * cosThetaD * si.roughness; - float diffuse = diffuseWeight * INVPI * (1.0 + f * fresnelSchlickWeight(cosThetaL)) * (1.0 + f * fresnelSchlickWeight(cosThetaV)); - float diffusePdf = cosThetaL * INVPI; + // not needed? + // if (det == 0.) { + // return ti; + // } - pdf = mix(0.5 * (specularPdf + diffusePdf), specularPdf, si.metalness); + p0t.z *= shear.z; + p1t.z *= shear.z; + p2t.z *= shear.z; + float tScaled = (e.x * p0t.z + e.y * p1t.z + e.z * p2t.z); - return mix(si.color * diffuse + specular, si.color * specular, si.metalness); -} + // not needed? + // if (sign(det) != sign(tScaled)) { + // return ti; + // } -`; + // check if closer intersection already exists + if (abs(tScaled) > abs(r.tMax * det)) { + return ti; } - function sample(defines) { - return ` + float invDet = 1. / det; + ti.t = tScaled * invDet; + ti.barycentric = e * invDet; -// https://graphics.pixar.com/library/OrthonormalB/paper.pdf -mat3 orthonormalBasis(vec3 n) { - float zsign = n.z >= 0.0 ? 1.0 : -1.0; - float a = -1.0 / (zsign + n.z); - float b = n.x * n.y * a; - vec3 s = vec3(1.0 + zsign * n.x * n.x * a, zsign * b, -zsign * n.x); - vec3 t = vec3(b, zsign + n.y * n.y * a, -n.y); - return mat3(s, t, n); + return ti; } -// http://www.pbr-book.org/3ed-2018/Monte_Carlo_Integration/2D_Sampling_with_Multidimensional_Transformations.html#SamplingaUnitDisk -vec2 sampleCircle(vec2 p) { - p = 2.0 * p - 1.0; - - bool greater = abs(p.x) > abs(p.y); +struct Box { + vec3 min; + vec3 max; +}; - float r = greater ? p.x : p.y; - float theta = greater ? 0.25 * PI * p.y / p.x : PI * (0.5 - 0.25 * p.x / p.y); +// Branchless ray/box intersection +// https://tavianator.com/fast-branchless-raybounding-box-intersections/ +float intersectBox(Ray r, Box b) { + vec3 tBot = (b.min - r.o) * r.invD; + vec3 tTop = (b.max - r.o) * r.invD; + vec3 tNear = min(tBot, tTop); + vec3 tFar = max(tBot, tTop); + float t0 = max(tNear.x, max(tNear.y, tNear.z)); + float t1 = min(tFar.x, min(tFar.y, tFar.z)); - return r * vec2(cos(theta), sin(theta)); + return (t0 > t1 || t0 > r.tMax) ? -1.0 : (t0 > 0.0 ? t0 : t1); } -// http://www.pbr-book.org/3ed-2018/Monte_Carlo_Integration/2D_Sampling_with_Multidimensional_Transformations.html#Cosine-WeightedHemisphereSampling -vec3 cosineSampleHemisphere(vec2 p) { - vec2 h = sampleCircle(p); - float z = sqrt(max(0.0, 1.0 - h.x * h.x - h.y * h.y)); - return vec3(h, z); +int maxDimension(vec3 v) { + return v.x > v.y ? (v.x > v.z ? 0 : 2) : (v.y > v.z ? 1 : 2); } +// Traverse BVH, find closest triangle intersection, and return surface information +SurfaceInteraction intersectScene(inout Ray ray) { + SurfaceInteraction si; -// http://www.pbr-book.org/3ed-2018/Light_Transport_I_Surface_Reflection/Sampling_Reflection_Functions.html#MicrofacetBxDFs -// Instead of Beckmann distrubtion, we use the GTR2 (GGX) distrubtion as covered in Disney's Principled BRDF paper -vec3 lightDirSpecular(vec3 faceNormal, vec3 viewDir, mat3 basis, float roughness, vec2 random) { - float phi = TWOPI * random.y; - float alpha = roughness * roughness; - float cosTheta = sqrt((1.0 - random.x) / (1.0 + (alpha * alpha - 1.0) * random.x)); - float sinTheta = sqrt(1.0 - cosTheta * cosTheta); + int maxDim = maxDimension(abs(ray.d)); - vec3 halfVector = basis * sign(dot(faceNormal, viewDir)) * vec3(sinTheta * cos(phi), sinTheta * sin(phi), cosTheta); + // Permute space so that the z dimension is the one where the absolute value of the ray's direction is largest. + // Then create a shear transformation that aligns ray direction with the +z axis + vec3 shear; + if (maxDim == 0) { + shear = vec3(-ray.d.y, -ray.d.z, 1.0) * ray.invD.x; + } else if (maxDim == 1) { + shear = vec3(-ray.d.z, -ray.d.x, 1.0) * ray.invD.y; + } else { + shear = vec3(-ray.d.x, -ray.d.y, 1.0) * ray.invD.z; + } - vec3 lightDir = reflect(-viewDir, halfVector); + int nodesToVisit[STACK_SIZE]; + int stack = 0; - return lightDir; -} + nodesToVisit[0] = 0; -vec3 lightDirDiffuse(vec3 faceNormal, vec3 viewDir, mat3 basis, vec2 random) { - return basis * sign(dot(faceNormal, viewDir)) * cosineSampleHemisphere(random); -} + while(stack >= 0) { + int i = nodesToVisit[stack--]; -float powerHeuristic(float f, float g) { - return (f * f) / (f * f + g * g); -} + vec4 r1 = fetchData(bvh, i, BVH_COLUMNS); + vec4 r2 = fetchData(bvh, i + 1, BVH_COLUMNS); -`; - } + int splitAxisOrNumPrimitives = floatBitsToInt(r1.w); - // Estimate the direct lighting integral using multiple importance sampling - // http://www.pbr-book.org/3ed-2018/Light_Transport_I_Surface_Reflection/Direct_Lighting.html#EstimatingtheDirectLightingIntegral + if (splitAxisOrNumPrimitives >= 0) { + // Intersection is a bounding box. Test for box intersection and keep traversing BVH + int splitAxis = splitAxisOrNumPrimitives; - function sampleMaterial(defines) { - return ` + Box bbox = Box(r1.xyz, r2.xyz); -vec3 importanceSampleLight(SurfaceInteraction si, vec3 viewDir, bool lastBounce, vec2 random) { - vec3 li; + if (intersectBox(ray, bbox) > 0.0) { + // traverse near node to ray first, and far node to ray last + if (ray.d[splitAxis] > 0.0) { + nodesToVisit[++stack] = floatBitsToInt(r2.w); + nodesToVisit[++stack] = i + 2; + } else { + nodesToVisit[++stack] = i + 2; + nodesToVisit[++stack] = floatBitsToInt(r2.w); + } + } + } else { + ivec3 index = floatBitsToInt(r1.xyz); + Triangle tri = Triangle( + fetchData(positions, index.x, VERTEX_COLUMNS).xyz, + fetchData(positions, index.y, VERTEX_COLUMNS).xyz, + fetchData(positions, index.z, VERTEX_COLUMNS).xyz + ); + TriangleIntersect hit = intersectTriangle(ray, tri, maxDim, shear); - float lightPdf; - vec2 uv; - vec3 lightDir = sampleEnvmap(random, uv, lightPdf); + if (hit.t > 0.0) { + ray.tMax = hit.t; + int materialIndex = floatBitsToInt(r2.w); + vec3 faceNormal = r2.xyz; + surfaceInteractionFromIntersection(si, tri, hit.barycentric, index, faceNormal, materialIndex); + } + } + } - float cosThetaL = dot(si.normal, lightDir); + // Values must be clamped outside of intersection loop. Clamping inside the loop produces incorrect numbers on some devices. + si.roughness = clamp(si.roughness, 0.03, 1.0); + si.metalness = clamp(si.metalness, 0.0, 1.0); - float orientation = dot(si.faceNormal, viewDir) * cosThetaL; - if (orientation < 0.0) { - return li; - } + return si; +} - float diffuseWeight = 1.0; - Ray ray; - initRay(ray, si.position + EPS * lightDir, lightDir); - if (intersectSceneShadow(ray)) { - if (lastBounce) { - diffuseWeight = 0.0; - } else { - return li; - } - } +bool intersectSceneShadow(inout Ray ray) { + int maxDim = maxDimension(abs(ray.d)); - vec3 irr = textureLinear(envmap, uv).xyz; + // Permute space so that the z dimension is the one where the absolute value of the ray's direction is largest. + // Then create a shear transformation that aligns ray direction with the +z axis + vec3 shear; + if (maxDim == 0) { + shear = vec3(-ray.d.y, -ray.d.z, 1.0) * ray.invD.x; + } else if (maxDim == 1) { + shear = vec3(-ray.d.z, -ray.d.x, 1.0) * ray.invD.y; + } else { + shear = vec3(-ray.d.x, -ray.d.y, 1.0) * ray.invD.z; + } - float scatteringPdf; - vec3 brdf = materialBrdf(si, viewDir, lightDir, cosThetaL, diffuseWeight, scatteringPdf); + int nodesToVisit[STACK_SIZE]; + int stack = 0; - float weight = powerHeuristic(lightPdf, scatteringPdf); + nodesToVisit[0] = 0; - li = brdf * irr * abs(cosThetaL) * weight / lightPdf; + while(stack >= 0) { + int i = nodesToVisit[stack--]; - return li; -} + vec4 r1 = fetchData(bvh, i, BVH_COLUMNS); + vec4 r2 = fetchData(bvh, i + 1, BVH_COLUMNS); -vec3 importanceSampleMaterial(SurfaceInteraction si, vec3 viewDir, bool lastBounce, vec3 lightDir) { - vec3 li; + int splitAxisOrNumPrimitives = floatBitsToInt(r1.w); - float cosThetaL = dot(si.normal, lightDir); + if (splitAxisOrNumPrimitives >= 0) { + int splitAxis = splitAxisOrNumPrimitives; - float orientation = dot(si.faceNormal, viewDir) * cosThetaL; - if (orientation < 0.0) { - return li; - } + Box bbox = Box(r1.xyz, r2.xyz); - float diffuseWeight = 1.0; - Ray ray; - initRay(ray, si.position + EPS * lightDir, lightDir); - if (intersectSceneShadow(ray)) { - if (lastBounce) { - diffuseWeight = 0.0; + if (intersectBox(ray, bbox) > 0.0) { + if (ray.d[splitAxis] > 0.0) { + nodesToVisit[++stack] = floatBitsToInt(r2.w); + nodesToVisit[++stack] = i + 2; + } else { + nodesToVisit[++stack] = i + 2; + nodesToVisit[++stack] = floatBitsToInt(r2.w); + } + } } else { - return li; + ivec3 index = floatBitsToInt(r1.xyz); + Triangle tri = Triangle( + fetchData(positions, index.x, VERTEX_COLUMNS).xyz, + fetchData(positions, index.y, VERTEX_COLUMNS).xyz, + fetchData(positions, index.z, VERTEX_COLUMNS).xyz + ); + + if (intersectTriangle(ray, tri, maxDim, shear).t > 0.0) { + return true; + } } } - vec2 uv = cartesianToEquirect(lightDir); - - float lightPdf = envmapPdf(uv); - - vec3 irr = textureLinear(envmap, uv).rgb; - - float scatteringPdf; - vec3 brdf = materialBrdf(si, viewDir, lightDir, cosThetaL, diffuseWeight, scatteringPdf); - - float weight = powerHeuristic(scatteringPdf, lightPdf); - - li += brdf * irr * abs(cosThetaL) * weight / scatteringPdf; - - return li; + return false; } +`; + } -vec3 sampleMaterial(SurfaceInteraction si, int bounce, inout Ray ray, inout vec3 beta, inout bool abort) { - mat3 basis = orthonormalBasis(si.normal); - vec3 viewDir = -ray.d; - - vec2 diffuseOrSpecular = randomSampleVec2(); - - vec3 lightDir = diffuseOrSpecular.x < mix(0.5, 0.0, si.metalness) ? - lightDirDiffuse(si.faceNormal, viewDir, basis, randomSampleVec2()) : - lightDirSpecular(si.faceNormal, viewDir, basis, si.roughness, randomSampleVec2()); + function random(defines) { + return ` - bool lastBounce = bounce == BOUNCES; +// Noise texture used to generate a different random number for each pixel. +// We use blue noise in particular, but any type of noise will work. +uniform sampler2D noise; - // Add path contribution - vec3 li = beta * ( - importanceSampleLight(si, viewDir, lastBounce, randomSampleVec2()) + - importanceSampleMaterial(si, viewDir, lastBounce, lightDir) - ); +uniform float stratifiedSamples[SAMPLING_DIMENSIONS]; +uniform float strataSize; - // Get new path direction +// Every time we call randomSample() in the shader, and for every call to render, +// we want that specific bit of the shader to fetch a sample from the same position in stratifiedSamples +// This allows us to use stratified sampling for each random variable in our path tracing +int sampleIndex = 0; - lightDir = diffuseOrSpecular.y < mix(0.5, 0.0, si.metalness) ? - lightDirDiffuse(si.faceNormal, viewDir, basis, randomSampleVec2()) : - lightDirSpecular(si.faceNormal, viewDir, basis, si.roughness, randomSampleVec2()); +const highp float maxUint = 1.0 / 4294967295.0; - float cosThetaL = dot(si.normal, lightDir); +float pixelSeed; - float scatteringPdf; - vec3 brdf = materialBrdf(si, viewDir, lightDir, cosThetaL, 1.0, scatteringPdf); +void initRandom() { + vec2 noiseSize = vec2(textureSize(noise, 0)); - beta *= abs(cosThetaL) * brdf / scatteringPdf; + // tile the small noise texture across the entire screen + pixelSeed = texture(noise, vCoord / (pixelSize * noiseSize)).r; +} - initRay(ray, si.position + EPS * lightDir, lightDir); +float randomSample() { + float stratifiedSample = stratifiedSamples[sampleIndex++]; - // If new ray direction is pointing into the surface, - // the light path is physically impossible and we terminate the path. - float orientation = dot(si.faceNormal, viewDir) * cosThetaL; - abort = orientation < 0.0; + float random = fract((stratifiedSample + pixelSeed) * strataSize); // blue noise + stratified samples - return li; + // transform random number between [0, 1] to (0, 1) + return EPS + (1.0 - 2.0 * EPS) * random; } +vec2 randomSampleVec2() { + return vec2(randomSample(), randomSample()); +} `; } - function sampleShadowCatcher (defines) { - return ` + // Sample the environment map using a cumulative distribution function as described in + // http://www.pbr-book.org/3ed-2018/Light_Transport_I_Surface_Reflection/Sampling_Light_Sources.html#InfiniteAreaLights -#ifdef USE_SHADOW_CATCHER + function envmap(defines) { + return ` -float importanceSampleLightShadowCatcher(SurfaceInteraction si, vec3 viewDir, vec2 random, inout float alpha) { - float li; +uniform sampler2D envmap; +uniform sampler2D envmapDistribution; +uniform sampler2D backgroundMap; - float lightPdf; - vec2 uv; - vec3 lightDir = sampleEnvmap(random, uv, lightPdf); +vec2 cartesianToEquirect(vec3 pointOnSphere) { + float phi = mod(atan(-pointOnSphere.z, -pointOnSphere.x), TWOPI); + float theta = acos(pointOnSphere.y); + return vec2(phi * 0.5 * INVPI, theta * INVPI); +} - float cosThetaL = dot(si.normal, lightDir); +float getEnvmapV(float u, out int vOffset, out float pdf) { + ivec2 size = textureSize(envmap, 0); - float orientation = dot(si.faceNormal, viewDir) * cosThetaL; - if (orientation < 0.0) { - return li; + int left = 0; + int right = size.y + 1; // cdf length is the length of the envmap + 1 + while (left < right) { + int mid = (left + right) >> 1; + float s = texelFetch(envmapDistribution, ivec2(0, mid), 0).x; + if (s <= u) { + left = mid + 1; + } else { + right = mid; + } } + vOffset = left - 1; - float occluded = 1.0; + // x channel is cumulative distribution of envmap luminance + // y channel is partial probability density of envmap luminance + vec2 s0 = texelFetch(envmapDistribution, ivec2(0, vOffset), 0).xy; + vec2 s1 = texelFetch(envmapDistribution, ivec2(0, vOffset + 1), 0).xy; - Ray ray; - initRay(ray, si.position + EPS * lightDir, lightDir); - if (intersectSceneShadow(ray)) { - occluded = 0.0; - } + pdf = s0.y; - float irr = dot(luminance, textureLinear(envmap, uv).rgb); + return (float(vOffset) + (u - s0.x) / (s1.x - s0.x)) / float(size.y); +} - // lambertian BRDF - float brdf = INVPI; - float scatteringPdf = abs(cosThetaL) * INVPI; +float getEnvmapU(float u, int vOffset, out float pdf) { + ivec2 size = textureSize(envmap, 0); - float weight = powerHeuristic(lightPdf, scatteringPdf); + int left = 0; + int right = size.x + 1; // cdf length is the length of the envmap + 1 + while (left < right) { + int mid = (left + right) >> 1; + float s = texelFetch(envmapDistribution, ivec2(1 + mid, vOffset), 0).x; + if (s <= u) { + left = mid + 1; + } else { + right = mid; + } + } + int uOffset = left - 1; - float lightEq = irr * brdf * abs(cosThetaL) * weight / lightPdf; + // x channel is cumulative distribution of envmap luminance + // y channel is partial probability density of envmap luminance + vec2 s0 = texelFetch(envmapDistribution, ivec2(1 + uOffset, vOffset), 0).xy; + vec2 s1 = texelFetch(envmapDistribution, ivec2(1 + uOffset + 1, vOffset), 0).xy; - alpha += lightEq; - li += occluded * lightEq; + pdf = s0.y; - return li; + return (float(uOffset) + (u - s0.x) / (s1.x - s0.x)) / float(size.x); } -float importanceSampleMaterialShadowCatcher(SurfaceInteraction si, vec3 viewDir, vec3 lightDir, inout float alpha) { - float li; - - float cosThetaL = dot(si.normal, lightDir); +// Perform two binary searches to find light direction. +vec3 sampleEnvmap(vec2 random, out vec2 uv, out float pdf) { + vec2 partialPdf; + int vOffset; - float orientation = dot(si.faceNormal, viewDir) * cosThetaL; - if (orientation < 0.0) { - return li; - } + uv.y = getEnvmapV(random.x, vOffset, partialPdf.y); + uv.x = getEnvmapU(random.y, vOffset, partialPdf.x); - float occluded = 1.0; + float phi = uv.x * TWOPI; + float theta = uv.y * PI; + float cosTheta = cos(theta); + float sinTheta = sin(theta); + float cosPhi = cos(phi); + float sinPhi = sin(phi); - Ray ray; - initRay(ray, si.position + EPS * lightDir, lightDir); - if (intersectSceneShadow(ray)) { - occluded = 0.0; - } + vec3 dir = vec3(-sinTheta * cosPhi, cosTheta, -sinTheta * sinPhi); - vec2 uv = cartesianToEquirect(lightDir); + pdf = partialPdf.x * partialPdf.y * INVPI2 / (2.0 * sinTheta); - float lightPdf = envmapPdf(uv); + return dir; +} - float irr = dot(luminance, textureLinear(envmap, uv).rgb); +float envmapPdf(vec2 uv) { + vec2 size = vec2(textureSize(envmap, 0)); - // lambertian BRDF - float brdf = INVPI; - float scatteringPdf = abs(cosThetaL) * INVPI; + float sinTheta = sin(uv.y * PI); - float weight = powerHeuristic(scatteringPdf, lightPdf); + uv *= size; - float lightEq = irr * brdf * abs(cosThetaL) * weight / scatteringPdf; + float partialX = texelFetch(envmapDistribution, ivec2(1.0 + uv.x, uv.y), 0).y; + float partialY = texelFetch(envmapDistribution, ivec2(0, uv.y), 0).y; - alpha += lightEq; - li += occluded * lightEq; + return partialX * partialY * INVPI2 / (2.0 * sinTheta); +} - return li; +vec3 sampleEnvmapFromDirection(vec3 d) { + vec2 uv = cartesianToEquirect(d); + return textureLinear(envmap, uv).rgb; } -vec3 sampleShadowCatcher(SurfaceInteraction si, int bounce, inout Ray ray, inout vec3 beta, inout float alpha, inout vec3 prevLi, inout bool abort) { - mat3 basis = orthonormalBasis(si.normal); - vec3 viewDir = -ray.d; - vec3 color = sampleEnvmapFromDirection(-viewDir); +vec3 sampleBackgroundFromDirection(vec3 d) { + vec2 uv = cartesianToEquirect(d); + return textureLinear(backgroundMap, uv).rgb; +} - vec3 lightDir = lightDirDiffuse(si.faceNormal, viewDir, basis, randomSampleVec2()); +`; + } - float alphaBounce = 0.0; + function bsdf(defines) { + return ` - // Add path contribution - vec3 li = beta * color * ( - importanceSampleLightShadowCatcher(si, viewDir, randomSampleVec2(), alphaBounce) + - importanceSampleMaterialShadowCatcher(si, viewDir, lightDir, alphaBounce) - ); +// Computes the exact value of the Fresnel factor +// https://seblagarde.wordpress.com/2013/04/29/memo-on-fresnel-equations/ +float fresnel(float cosTheta, float eta, float invEta) { + eta = cosTheta > 0.0 ? eta : invEta; + cosTheta = abs(cosTheta); - // alphaBounce contains the lighting of the shadow catcher *without* shadows - alphaBounce = alphaBounce == 0.0 ? 1.0 : alphaBounce; + float gSquared = eta * eta + cosTheta * cosTheta - 1.0; - // in post processing step, we divide by alpha to obtain the percentage of light relative to shadow for the shadow catcher - alpha *= alphaBounce; + if (gSquared < 0.0) { + return 1.0; + } - // we only want the alpha division to affect the shadow catcher - // factor in alpha to the previous light, so that dividing by alpha with the previous light cancels out this contribution - prevLi *= alphaBounce; + float g = sqrt(gSquared); - // Get new path direction + float a = (g - cosTheta) / (g + cosTheta); + float b = (cosTheta * (g + cosTheta) - 1.0) / (cosTheta * (g - cosTheta) + 1.0); - lightDir = lightDirDiffuse(si.faceNormal, viewDir, basis, randomSampleVec2()); + return 0.5 * a * a * (1.0 + b * b); +} - float cosThetaL = dot(si.normal, lightDir); +float fresnelSchlickWeight(float cosTheta) { + float w = 1.0 - cosTheta; + return (w * w) * (w * w) * w; +} - // lambertian brdf with terms cancelled - beta *= color; +// Computes Schlick's approximation of the Fresnel factor +// Assumes ray is moving from a less dense to a more dense medium +float fresnelSchlick(float cosTheta, float r0) { + return mix(fresnelSchlickWeight(cosTheta), 1.0, r0); +} - initRay(ray, si.position + EPS * lightDir, lightDir); +// Computes Schlick's approximation of Fresnel factor +// Accounts for total internal reflection if ray is moving from a more dense to a less dense medium +float fresnelSchlickTIR(float cosTheta, float r0, float ni) { - // If new ray direction is pointing into the surface, - // the light path is physically impossible and we terminate the path. - float orientation = dot(si.faceNormal, viewDir) * cosThetaL; - abort = orientation < 0.0; + // moving from a more dense to a less dense medium + if (cosTheta < 0.0) { + float inv_eta = ni; + float SinT2 = inv_eta * inv_eta * (1.0f - cosTheta * cosTheta); + if (SinT2 > 1.0) { + return 1.0; // total internal reflection + } + cosTheta = sqrt(1.0f - SinT2); + } - // advance dimension index by unused stratified samples - const int usedSamples = 6; - sampleIndex += SAMPLES_PER_MATERIAL - usedSamples; + return mix(fresnelSchlickWeight(cosTheta), 1.0, r0); +} - return li; +float trowbridgeReitzD(float cosTheta, float alpha2) { + float e = cosTheta * cosTheta * (alpha2 - 1.0) + 1.0; + return alpha2 / (PI * e * e); } -#endif -`; - } +float trowbridgeReitzLambda(float cosTheta, float alpha2) { + float cos2Theta = cosTheta * cosTheta; + float tan2Theta = (1.0 - cos2Theta) / cos2Theta; + return 0.5 * (-1.0 + sqrt(1.0 + alpha2 * tan2Theta)); +} - function sampleGlass (defines) { - return ` +// An implementation of Disney's principled BRDF +// https://disney-animation.s3.amazonaws.com/library/s2012_pbs_disney_brdf_notes_v2.pdf +vec3 materialBrdf(SurfaceInteraction si, vec3 viewDir, vec3 lightDir, float cosThetaL, float diffuseWeight, out float pdf) { + vec3 halfVector = normalize(viewDir + lightDir); -#ifdef USE_GLASS + cosThetaL = abs(cosThetaL); + float cosThetaV = abs(dot(si.normal, viewDir)); + float cosThetaH = abs(dot(si.normal, halfVector)); + float cosThetaD = abs(dot(lightDir, halfVector)); -vec3 sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Ray ray, inout vec3 beta) { - vec3 viewDir = -ray.d; - float cosTheta = dot(si.normal, viewDir); + float alpha2 = (si.roughness * si.roughness) * (si.roughness * si.roughness); - float F = si.materialType == THIN_GLASS ? - fresnelSchlick(abs(cosTheta), R0) : // thin glass - fresnelSchlickTIR(cosTheta, R0, IOR); // thick glass + float F = fresnelSchlick(cosThetaD, mix(R0, 0.6, si.metalness)); + float D = trowbridgeReitzD(cosThetaH, alpha2); - vec3 lightDir; + float roughnessRemapped = 0.5 + 0.5 * si.roughness; + float alpha2Remapped = (roughnessRemapped * roughnessRemapped) * (roughnessRemapped * roughnessRemapped); - float reflectionOrRefraction = randomSample(); + float G = 1.0 / (1.0 + trowbridgeReitzLambda(cosThetaV, alpha2Remapped) + trowbridgeReitzLambda(cosThetaL, alpha2Remapped)); - if (reflectionOrRefraction < F) { - lightDir = reflect(-viewDir, si.normal); - } else { - lightDir = si.materialType == THIN_GLASS ? - refract(-viewDir, sign(cosTheta) * si.normal, INV_IOR_THIN) : // thin glass - refract(-viewDir, sign(cosTheta) * si.normal, cosTheta < 0.0 ? IOR : INV_IOR); // thick glass - beta *= si.color; - } + float specular = F * D * G / (4.0 * cosThetaV * cosThetaL); + float specularPdf = D * cosThetaH / (4.0 * cosThetaD); - initRay(ray, si.position + EPS * lightDir, lightDir); + float f = -0.5 + 2.0 * cosThetaD * cosThetaD * si.roughness; + float diffuse = diffuseWeight * INVPI * (1.0 + f * fresnelSchlickWeight(cosThetaL)) * (1.0 + f * fresnelSchlickWeight(cosThetaV)); + float diffusePdf = cosThetaL * INVPI; - // advance sample index by unused stratified samples - const int usedSamples = 1; - sampleIndex += SAMPLES_PER_MATERIAL - usedSamples; + pdf = mix(0.5 * (specularPdf + diffusePdf), specularPdf, si.metalness); - return bounce == BOUNCES ? beta * sampleEnvmapFromDirection(lightDir) : vec3(0.0); + return mix(si.color * diffuse + specular, si.color * specular, si.metalness); } -#endif - `; } - function unrollLoop(indexName, start, limit, step, code) { - let unrolled = `int ${indexName};\n`; + function sample(defines) { + return ` - for (let i = start; (step > 0 && i < limit) || (step < 0 && i > limit); i += step) { - unrolled += `${indexName} = ${i};\n`; - unrolled += code; - } +// https://graphics.pixar.com/library/OrthonormalB/paper.pdf +mat3 orthonormalBasis(vec3 n) { + float zsign = n.z >= 0.0 ? 1.0 : -1.0; + float a = -1.0 / (zsign + n.z); + float b = n.x * n.y * a; + vec3 s = vec3(1.0 + zsign * n.x * n.x * a, zsign * b, -zsign * n.x); + vec3 t = vec3(b, zsign + n.y * n.y * a, -n.y); + return mat3(s, t, n); +} - return unrolled; - } +// http://www.pbr-book.org/3ed-2018/Monte_Carlo_Integration/2D_Sampling_with_Multidimensional_Transformations.html#SamplingaUnitDisk +vec2 sampleCircle(vec2 p) { + p = 2.0 * p - 1.0; - function addDefines(params) { - let defines = ''; + bool greater = abs(p.x) > abs(p.y); - for (let [name, value] of Object.entries(params)) { - // don't define falsy values such as false, 0, and ''. - // this adds support for #ifdef on falsy values - if (value) { - defines += `#define ${name} ${value}\n`; - } - } + float r = greater ? p.x : p.y; + float theta = greater ? 0.25 * PI * p.y / p.x : PI * (0.5 - 0.25 * p.x / p.y); - return defines; - } + return r * vec2(cos(theta), sin(theta)); +} - function fragString(defines) { - return `#version 300 es +// http://www.pbr-book.org/3ed-2018/Monte_Carlo_Integration/2D_Sampling_with_Multidimensional_Transformations.html#Cosine-WeightedHemisphereSampling +vec3 cosineSampleHemisphere(vec2 p) { + vec2 h = sampleCircle(p); + float z = sqrt(max(0.0, 1.0 - h.x * h.x - h.y * h.y)); + return vec3(h, z); +} -precision mediump float; -precision mediump int; -${addDefines(defines)} +// http://www.pbr-book.org/3ed-2018/Light_Transport_I_Surface_Reflection/Sampling_Reflection_Functions.html#MicrofacetBxDFs +// Instead of Beckmann distrubtion, we use the GTR2 (GGX) distrubtion as covered in Disney's Principled BRDF paper +vec3 lightDirSpecular(vec3 faceNormal, vec3 viewDir, mat3 basis, float roughness, vec2 random) { + float phi = TWOPI * random.y; + float alpha = roughness * roughness; + float cosTheta = sqrt((1.0 - random.x) / (1.0 + (alpha * alpha - 1.0) * random.x)); + float sinTheta = sqrt(1.0 - cosTheta * cosTheta); -#define PI 3.14159265359 -#define TWOPI 6.28318530718 -#define INVPI 0.31830988618 -#define INVPI2 0.10132118364 -#define EPS 0.0005 -#define INF 1.0e999 -#define RAY_MAX_DISTANCE 9999.0 + vec3 halfVector = basis * sign(dot(faceNormal, viewDir)) * vec3(sinTheta * cos(phi), sinTheta * sin(phi), cosTheta); -#define STANDARD 0 -#define THIN_GLASS 1 -#define THICK_GLASS 2 -#define SHADOW_CATCHER 3 + vec3 lightDir = reflect(-viewDir, halfVector); -#define SAMPLES_PER_MATERIAL 8 + return lightDir; +} -const float IOR = 1.5; -const float INV_IOR = 1.0 / IOR; +vec3 lightDirDiffuse(vec3 faceNormal, vec3 viewDir, mat3 basis, vec2 random) { + return basis * sign(dot(faceNormal, viewDir)) * cosineSampleHemisphere(random); +} -const float IOR_THIN = 1.015; -const float INV_IOR_THIN = 1.0 / IOR_THIN; +float powerHeuristic(float f, float g) { + return (f * f) / (f * f + g * g); +} -const float R0 = (1.0 - IOR) * (1.0 - IOR) / ((1.0 + IOR) * (1.0 + IOR)); +`; + } -// https://www.w3.org/WAI/GL/wiki/Relative_luminance -const vec3 luminance = vec3(0.2126, 0.7152, 0.0722); + // Estimate the direct lighting integral using multiple importance sampling + // http://www.pbr-book.org/3ed-2018/Light_Transport_I_Surface_Reflection/Direct_Lighting.html#EstimatingtheDirectLightingIntegral -struct Ray { - vec3 o; - vec3 d; - vec3 invD; - float tMax; -}; + function sampleMaterial(defines) { + return ` -struct SurfaceInteraction { - bool hit; - vec3 position; - vec3 normal; // smoothed normal from the three triangle vertices - vec3 faceNormal; // normal of the triangle - vec3 color; - float roughness; - float metalness; - int materialType; -}; +vec3 importanceSampleLight(SurfaceInteraction si, vec3 viewDir, bool lastBounce, vec2 random) { + vec3 li; -struct Camera { - mat4 transform; - float aspect; - float fov; - float focus; - float aperture; -}; + float lightPdf; + vec2 uv; + vec3 lightDir = sampleEnvmap(random, uv, lightPdf); -uniform Camera camera; -uniform vec2 pixelSize; // 1 / screenResolution + float cosThetaL = dot(si.normal, lightDir); -in vec2 vCoord; + float orientation = dot(si.faceNormal, viewDir) * cosThetaL; + if (orientation < 0.0) { + return li; + } -out vec4 fragColor; + float diffuseWeight = 1.0; + Ray ray; + initRay(ray, si.position + EPS * lightDir, lightDir); + if (intersectSceneShadow(ray)) { + if (lastBounce) { + diffuseWeight = 0.0; + } else { + return li; + } + } -void initRay(inout Ray ray, vec3 origin, vec3 direction) { - ray.o = origin; - ray.d = direction; - ray.invD = 1.0 / ray.d; - ray.tMax = RAY_MAX_DISTANCE; -} + vec3 irr = textureLinear(envmap, uv).xyz; -// given the index from a 1D array, retrieve corresponding position from packed 2D texture -ivec2 unpackTexel(int i, int columnsLog2) { - ivec2 u; - u.y = i >> columnsLog2; // equivalent to (i / 2^columnsLog2) - u.x = i - (u.y << columnsLog2); // equivalent to (i % 2^columnsLog2) - return u; -} + float scatteringPdf; + vec3 brdf = materialBrdf(si, viewDir, lightDir, cosThetaL, diffuseWeight, scatteringPdf); -vec4 fetchData(sampler2D s, int i, int columnsLog2) { - return texelFetch(s, unpackTexel(i, columnsLog2), 0); -} + float weight = powerHeuristic(lightPdf, scatteringPdf); -ivec4 fetchData(isampler2D s, int i, int columnsLog2) { - return texelFetch(s, unpackTexel(i, columnsLog2), 0); -} + li = brdf * irr * abs(cosThetaL) * weight / lightPdf; -${textureLinear()} -${intersect(defines)} -${random()} -${envmap()} -${bsdf()} -${sample()} -${sampleMaterial()} -${sampleGlass()} -${sampleShadowCatcher()} + return li; +} -struct Path { - Ray ray; +vec3 importanceSampleMaterial(SurfaceInteraction si, vec3 viewDir, bool lastBounce, vec3 lightDir) { vec3 li; - float alpha; - vec3 beta; - bool specularBounce; - bool abort; -}; -void bounce(inout Path path, int i) { - if (path.abort) { - return; - } + float cosThetaL = dot(si.normal, lightDir); - SurfaceInteraction si = intersectScene(path.ray); + float orientation = dot(si.faceNormal, viewDir) * cosThetaL; + if (orientation < 0.0) { + return li; + } - if (!si.hit) { - if (path.specularBounce) { - path.li += path.beta * sampleEnvmapFromDirection(path.ray.d); + float diffuseWeight = 1.0; + Ray ray; + initRay(ray, si.position + EPS * lightDir, lightDir); + if (intersectSceneShadow(ray)) { + if (lastBounce) { + diffuseWeight = 0.0; + } else { + return li; } + } - path.abort = true; - } else { - #ifdef USE_GLASS - if (si.materialType == THIN_GLASS || si.materialType == THICK_GLASS) { - path.li += sampleGlassSpecular(si, i, path.ray, path.beta); - path.specularBounce = true; - } - #endif - #ifdef USE_SHADOW_CATCHER - if (si.materialType == SHADOW_CATCHER) { - path.li += sampleShadowCatcher(si, i, path.ray, path.beta, path.alpha, path.li, path.abort); - path.specularBounce = false; - } - #endif - if (si.materialType == STANDARD) { - path.li += sampleMaterial(si, i, path.ray, path.beta, path.abort); - path.specularBounce = false; - } + vec2 uv = cartesianToEquirect(lightDir); - // Russian Roulette sampling - if (i >= 2) { - float q = 1.0 - dot(path.beta, luminance); - if (randomSample() < q) { - path.abort = true; - } - path.beta /= 1.0 - q; - } - } -} + float lightPdf = envmapPdf(uv); -// Path tracing integrator as described in -// http://www.pbr-book.org/3ed-2018/Light_Transport_I_Surface_Reflection/Path_Tracing.html# -vec4 integrator(inout Ray ray) { - Path path; - path.ray = ray; - path.li = vec3(0); - path.alpha = 1.0; - path.beta = vec3(1.0); - path.specularBounce = true; - path.abort = false; + vec3 irr = textureLinear(envmap, uv).rgb; - // Manually unroll for loop. - // Some hardware fails to interate over a GLSL loop, so we provide this workaround + float scatteringPdf; + vec3 brdf = materialBrdf(si, viewDir, lightDir, cosThetaL, diffuseWeight, scatteringPdf); - ${unrollLoop('i', 1, defines.BOUNCES + 1, 1, ` - // equivelant to - // for (int i = 1; i < defines.bounces + 1, i += 1) - bounce(path, i); - `)} + float weight = powerHeuristic(scatteringPdf, lightPdf); - return vec4(path.li, path.alpha); + li += brdf * irr * abs(cosThetaL) * weight / scatteringPdf; + + return li; } -void main() { - initRandom(); +void sampleMaterial(SurfaceInteraction si, int bounce, inout Path path) { + mat3 basis = orthonormalBasis(si.normal); + vec3 viewDir = -path.ray.d; - vec2 vCoordAntiAlias = vCoord + pixelSize * (randomSampleVec2() - 0.5); + vec2 diffuseOrSpecular = randomSampleVec2(); - vec3 direction = normalize(vec3(vCoordAntiAlias - 0.5, -1.0) * vec3(camera.aspect, 1.0, camera.fov)); + vec3 lightDir = diffuseOrSpecular.x < mix(0.5, 0.0, si.metalness) ? + lightDirDiffuse(si.faceNormal, viewDir, basis, randomSampleVec2()) : + lightDirSpecular(si.faceNormal, viewDir, basis, si.roughness, randomSampleVec2()); - // Thin lens model with depth-of-field - // http://www.pbr-book.org/3ed-2018/Camera_Models/Projective_Camera_Models.html#TheThinLensModelandDepthofField - vec2 lensPoint = camera.aperture * sampleCircle(randomSampleVec2()); - vec3 focusPoint = -direction * camera.focus / direction.z; // intersect ray direction with focus plane + bool lastBounce = bounce == BOUNCES; - vec3 origin = vec3(lensPoint, 0.0); - direction = normalize(focusPoint - origin); + // Add path contribution + path.li += path.beta * ( + importanceSampleLight(si, viewDir, lastBounce, randomSampleVec2()) + + importanceSampleMaterial(si, viewDir, lastBounce, lightDir) + ); - origin = vec3(camera.transform * vec4(origin, 1.0)); - direction = mat3(camera.transform) * direction; + // Get new path direction - Ray cam; - initRay(cam, origin, direction); + lightDir = diffuseOrSpecular.y < mix(0.5, 0.0, si.metalness) ? + lightDirDiffuse(si.faceNormal, viewDir, basis, randomSampleVec2()) : + lightDirSpecular(si.faceNormal, viewDir, basis, si.roughness, randomSampleVec2()); - vec4 liAndAlpha = integrator(cam); + float cosThetaL = dot(si.normal, lightDir); - if (!(liAndAlpha.x < INF && liAndAlpha.x > -EPS)) { - liAndAlpha = vec4(0, 0, 0, 1); - } + float scatteringPdf; + vec3 brdf = materialBrdf(si, viewDir, lightDir, cosThetaL, 1.0, scatteringPdf); - fragColor = liAndAlpha; + path.beta *= abs(cosThetaL) * brdf / scatteringPdf; - // Stratified Sampling Sample Count Test - // --------------- - // Uncomment the following code - // Then observe the colors of the image - // If: - // * The resulting image is pure black - // Extra samples are being passed to the shader that aren't being used. - // * The resulting image contains red - // Not enough samples are being passed to the shader - // * The resulting image contains only white with some black - // All samples are used by the shader. Correct result! + initRay(path.ray, si.position + EPS * lightDir, lightDir); - // fragColor = vec4(0, 0, 0, 1); - // if (sampleIndex == SAMPLING_DIMENSIONS) { - // fragColor = vec4(1, 1, 1, 1); - // } else if (sampleIndex > SAMPLING_DIMENSIONS) { - // fragColor = vec4(1, 0, 0, 1); - // } + // If new ray direction is pointing into the surface, + // the light path is physically impossible and we terminate the path. + float orientation = dot(si.faceNormal, viewDir) * cosThetaL; + path.abort = orientation < 0.0; + + path.specularBounce = false; } + `; } - function mergeMeshesToGeometry(meshes) { - - let vertexCount = 0; - let indexCount = 0; - - const geometryAndMaterialIndex = []; - const materialIndexMap = new Map(); - - for (const mesh of meshes) { - const geometry = cloneBufferGeometry(mesh.geometry, ['position', 'normal', 'uv']); - - const index = geometry.getIndex(); - if (!index) { - addFlatGeometryIndices(geometry); - } + function sampleShadowCatcher (defines) { + return ` - geometry.applyMatrix(mesh.matrixWorld); +#ifdef USE_SHADOW_CATCHER - if (!geometry.getAttribute('normal')) { - geometry.computeVertexNormals(); - } else { - geometry.normalizeNormals(); - } +float importanceSampleLightShadowCatcher(SurfaceInteraction si, vec3 viewDir, vec2 random, inout float alpha) { + float li; - vertexCount += geometry.getAttribute('position').count; - indexCount += geometry.getIndex().count; + float lightPdf; + vec2 uv; + vec3 lightDir = sampleEnvmap(random, uv, lightPdf); - const material = mesh.material; - let materialIndex = materialIndexMap.get(material); - if (materialIndex === undefined) { - materialIndex = materialIndexMap.size; - materialIndexMap.set(material, materialIndex); - } + float cosThetaL = dot(si.normal, lightDir); - geometryAndMaterialIndex.push({ - geometry, - materialIndex - }); - } + float orientation = dot(si.faceNormal, viewDir) * cosThetaL; + if (orientation < 0.0) { + return li; + } - const { geometry, materialIndices } = mergeGeometry(geometryAndMaterialIndex, vertexCount, indexCount); + float occluded = 1.0; - return { - geometry, - materialIndices, - materials: Array.from(materialIndexMap.keys()) - }; + Ray ray; + initRay(ray, si.position + EPS * lightDir, lightDir); + if (intersectSceneShadow(ray)) { + occluded = 0.0; } - function mergeGeometry(geometryAndMaterialIndex, vertexCount, indexCount) { - const position = new THREE$1.BufferAttribute(new Float32Array(3 * vertexCount), 3, false); - const normal = new THREE$1.BufferAttribute(new Float32Array(3 * vertexCount), 3, false); - const uv = new THREE$1.BufferAttribute(new Float32Array(2 * vertexCount), 2, false); - const index = new THREE$1.BufferAttribute(new Uint32Array(indexCount), 1, false); + float irr = dot(luminance, textureLinear(envmap, uv).rgb); - const materialIndices = []; + // lambertian BRDF + float brdf = INVPI; + float scatteringPdf = abs(cosThetaL) * INVPI; - const bg = new THREE$1.BufferGeometry(); - bg.addAttribute('position', position); - bg.addAttribute('normal', normal); - bg.addAttribute('uv', uv); - bg.setIndex(index); + float weight = powerHeuristic(lightPdf, scatteringPdf); - let currentVertex = 0; - let currentIndex = 0; + float lightEq = irr * brdf * abs(cosThetaL) * weight / lightPdf; - for (const { geometry, materialIndex } of geometryAndMaterialIndex) { - const vertexCount = geometry.getAttribute('position').count; - bg.merge(geometry, currentVertex); + alpha += lightEq; + li += occluded * lightEq; - const meshIndex = geometry.getIndex(); - for (let i = 0; i < meshIndex.count; i++) { - index.setX(currentIndex + i, currentVertex + meshIndex.getX(i)); - } + return li; +} - const triangleCount = meshIndex.count / 3; - for (let i = 0; i < triangleCount; i++) { - materialIndices.push(materialIndex); - } +float importanceSampleMaterialShadowCatcher(SurfaceInteraction si, vec3 viewDir, vec3 lightDir, inout float alpha) { + float li; - currentVertex += vertexCount; - currentIndex += meshIndex.count; - } + float cosThetaL = dot(si.normal, lightDir); - return { geometry: bg, materialIndices }; + float orientation = dot(si.faceNormal, viewDir) * cosThetaL; + if (orientation < 0.0) { + return li; } - // Similar to buffergeometry.clone(), except we only copy - // specific attributes instead of everything - function cloneBufferGeometry(bufferGeometry, attributes) { - const newGeometry = new THREE$1.BufferGeometry(); + float occluded = 1.0; - for (const name of attributes) { - const attrib = bufferGeometry.getAttribute(name); - if (attrib) { - newGeometry.addAttribute(name, attrib.clone()); - } - } + Ray ray; + initRay(ray, si.position + EPS * lightDir, lightDir); + if (intersectSceneShadow(ray)) { + occluded = 0.0; + } - const index = bufferGeometry.getIndex(); - if (index) { - newGeometry.setIndex(index); - } + vec2 uv = cartesianToEquirect(lightDir); - return newGeometry; - } + float lightPdf = envmapPdf(uv); - function addFlatGeometryIndices(geometry) { - const position = geometry.getAttribute('position'); + float irr = dot(luminance, textureLinear(envmap, uv).rgb); - if (!position) { - console.warn('No position attribute'); - return; - } + // lambertian BRDF + float brdf = INVPI; + float scatteringPdf = abs(cosThetaL) * INVPI; - const index = new Uint32Array(position.count); + float weight = powerHeuristic(scatteringPdf, lightPdf); - for (let i = 0; i < index.length; i++) { - index[i] = i; - } + float lightEq = irr * brdf * abs(cosThetaL) * weight / scatteringPdf; - geometry.setIndex(new THREE$1.BufferAttribute(index, 1, false)); + alpha += lightEq; + li += occluded * lightEq; - return geometry; - } + return li; +} - // Reorders the elements in the range [first, last) in such a way that - // all elements for which the comparator c returns true - // precede the elements for which comparator c returns false. - function partition(array, compare, left = 0, right = array.length) { - while (left !== right) { - while (compare(array[left])) { - left++; - if (left === right) { - return left; - } - } - do { - right--; - if (left === right) { - return left; - } - } while (!compare(array[right])); +void sampleShadowCatcher(SurfaceInteraction si, int bounce, inout Path path) { + mat3 basis = orthonormalBasis(si.normal); + vec3 viewDir = -path.ray.d; + vec3 color = bounce > 1 && !path.specularBounce ? sampleEnvmapFromDirection(-viewDir) : sampleBackgroundFromDirection(-viewDir); - swap(array, left, right); - left++; - } + vec3 lightDir = lightDirDiffuse(si.faceNormal, viewDir, basis, randomSampleVec2()); - return left; - } + float alphaBounce = 0.0; - // nth_element is a partial sorting algorithm that rearranges elements in [first, last) such that: - // The element pointed at by nth is changed to whatever element would occur in that position if [first, last) were sorted. - // All of the elements before this new nth element compare to true with elements after the nth element - function nthElement(array, compare, left = 0, right = array.length, k = Math.floor((left + right) / 2)) { - for (let i = left; i <= k; i++) { - let minIndex = i; - let minValue = array[i]; - for (let j = i + 1; j < right; j++) { - if (!compare(minValue, array[j])) { - minIndex = j; - minValue = array[j]; - swap(array, i, minIndex); - } - } - } - } + vec3 li = path.beta * color * ( + importanceSampleLightShadowCatcher(si, viewDir, randomSampleVec2(), alphaBounce) + + importanceSampleMaterialShadowCatcher(si, viewDir, lightDir, alphaBounce) + ); - function swap(array, a, b) { - const x = array[b]; - array[b] = array[a]; - array[a] = x; - } + // alphaBounce contains the lighting of the shadow catcher *without* shadows + alphaBounce = alphaBounce == 0.0 ? 1.0 : alphaBounce; - // Create a bounding volume hierarchy of scene geometry + // in post processing step, we divide by alpha to obtain the percentage of light relative to shadow for the shadow catcher + path.alpha *= alphaBounce; - const size = new THREE$1.Vector3(); + // we only want the alpha division to affect the shadow catcher + // factor in alpha to the previous light, so that dividing by alpha with the previous light cancels out this contribution + path.li *= alphaBounce; - function bvhAccel(geometry, materialIndices) { - const primitiveInfo = makePrimitiveInfo(geometry, materialIndices); - const node = recursiveBuild(primitiveInfo, 0, primitiveInfo.length); + // add path contribution + path.li += li; - return node; - } + // Get new path direction - function flattenBvh(bvh) { - const flat = []; - const isBounds = []; + lightDir = lightDirDiffuse(si.faceNormal, viewDir, basis, randomSampleVec2()); - const splitAxisMap = { - x: 0, - y: 1, - z: 2 - }; + float cosThetaL = dot(si.normal, lightDir); - let maxDepth = 1; - const traverse = (node, depth = 1) => { + // lambertian brdf with terms cancelled + path.beta *= color; - maxDepth = Math.max(depth, maxDepth); + initRay(path.ray, si.position + EPS * lightDir, lightDir); - if (node.primitives) { - for (let i = 0; i < node.primitives.length; i++) { - const p = node.primitives[i]; - flat.push( - p.indices[0], p.indices[1], p.indices[2], node.primitives.length, - p.faceNormal.x, p.faceNormal.y, p.faceNormal.z, p.materialIndex - ); - isBounds.push(false); - } - } else { - const bounds = node.bounds; + // If new ray direction is pointing into the surface, + // the light path is physically impossible and we terminate the path. + float orientation = dot(si.faceNormal, viewDir) * cosThetaL; + path.abort = orientation < 0.0; - flat.push( - bounds.min.x, bounds.min.y, bounds.min.z, splitAxisMap[node.splitAxis], - bounds.max.x, bounds.max.y, bounds.max.z, null // pointer to second shild - ); + path.specularBounce = false; - const i = flat.length - 1; - isBounds.push(true); + // advance dimension index by unused stratified samples + const int usedSamples = 6; + sampleIndex += SAMPLES_PER_MATERIAL - usedSamples; +} - traverse(node.child0, depth + 1); - flat[i] = flat.length / 4; // pointer to second child - traverse(node.child1, depth + 1); - } - }; +#endif +`; + } - traverse(bvh); + function sampleGlass (defines) { + return ` - const buffer = new ArrayBuffer(4 * flat.length); - const floatView = new Float32Array(buffer); - const intView = new Int32Array(buffer); +#ifdef USE_GLASS - for (let i = 0; i < isBounds.length; i++) { - let k = 8 * i; +void sampleGlassSpecular(SurfaceInteraction si, int bounce, inout Path path) { + vec3 viewDir = -path.ray.d; + float cosTheta = dot(si.normal, viewDir); - if (isBounds[i]) { - floatView[k] = flat[k]; - floatView[k + 1] = flat[k + 1]; - floatView[k + 2] = flat[k + 2]; - intView[k + 3] = flat[k + 3]; - } else { - intView[k] = flat[k]; - intView[k + 1] = flat[k + 1]; - intView[k + 2] = flat[k + 2]; - intView[k + 3] = -flat[k + 3]; // negative signals to shader that this node is a triangle - } + float F = si.materialType == THIN_GLASS ? + fresnelSchlick(abs(cosTheta), R0) : // thin glass + fresnelSchlickTIR(cosTheta, R0, IOR); // thick glass - floatView[k + 4] = flat[k + 4]; - floatView[k + 5] = flat[k + 5]; - floatView[k + 6] = flat[k + 6]; - intView[k + 7] = flat[k + 7]; - } + vec3 lightDir; - return { - maxDepth, - count: flat.length / 4, - buffer: floatView - }; + float reflectionOrRefraction = randomSample(); + + if (reflectionOrRefraction < F) { + lightDir = reflect(-viewDir, si.normal); + } else { + lightDir = si.materialType == THIN_GLASS ? + refract(-viewDir, sign(cosTheta) * si.normal, INV_IOR_THIN) : // thin glass + refract(-viewDir, sign(cosTheta) * si.normal, cosTheta < 0.0 ? IOR : INV_IOR); // thick glass + path.beta *= si.color; } - function makePrimitiveInfo(geometry, materialIndices) { - const primitiveInfo = []; - const indices = geometry.getIndex().array; - const position = geometry.getAttribute('position'); - const v0 = new THREE$1.Vector3(); - const v1 = new THREE$1.Vector3(); - const v2 = new THREE$1.Vector3(); - const e0 = new THREE$1.Vector3(); - const e1 = new THREE$1.Vector3(); + initRay(path.ray, si.position + EPS * lightDir, lightDir); - for (let i = 0; i < indices.length; i += 3) { - const bounds = new THREE$1.Box3(); + // advance sample index by unused stratified samples + const int usedSamples = 1; + sampleIndex += SAMPLES_PER_MATERIAL - usedSamples; - v0.fromBufferAttribute(position, indices[i]); - v1.fromBufferAttribute(position, indices[i + 1]); - v2.fromBufferAttribute(position, indices[i + 2]); - e0.subVectors(v2, v0); - e1.subVectors(v1, v0); + path.li += bounce == BOUNCES ? path.beta * sampleBackgroundFromDirection(lightDir) : vec3(0.0); +} - bounds.expandByPoint(v0); - bounds.expandByPoint(v1); - bounds.expandByPoint(v2); +#endif - const info = { - bounds: bounds, - center: bounds.getCenter(new THREE$1.Vector3()), - indices: [indices[i], indices[i + 1], indices[i + 2]], - faceNormal: new THREE$1.Vector3().crossVectors(e1, e0).normalize(), - materialIndex: materialIndices[i / 3] - }; +`; + } - primitiveInfo.push(info); + function unrollLoop(indexName, start, limit, step, code) { + let unrolled = `int ${indexName};\n`; + + for (let i = start; (step > 0 && i < limit) || (step < 0 && i > limit); i += step) { + unrolled += `${indexName} = ${i};\n`; + unrolled += code; } - return primitiveInfo; + return unrolled; } - function recursiveBuild(primitiveInfo, start, end) { - const bounds = new THREE$1.Box3(); - for (let i = start; i < end; i++) { - bounds.union(primitiveInfo[i].bounds); + function addDefines(params) { + let defines = ''; + + for (let [name, value] of Object.entries(params)) { + // don't define falsy values such as false, 0, and ''. + // this adds support for #ifdef on falsy values + if (value) { + defines += `#define ${name} ${value}\n`; + } } - const nPrimitives = end - start; + return defines; + } - if (nPrimitives === 1) { - return makeLeafNode(primitiveInfo.slice(start, end), bounds); - } else { - const centroidBounds = new THREE$1.Box3(); - for (let i = start; i < end; i++) { - centroidBounds.expandByPoint(primitiveInfo[i].center); - } - const dim = maximumExtent(centroidBounds); + function fragString({ rayTracingRenderTargets, defines }) { + return `#version 300 es - let mid = Math.floor((start + end) / 2); +precision mediump float; +precision mediump int; - // middle split method - // const dimMid = (centroidBounds.max[dim] + centroidBounds.min[dim]) / 2; - // mid = partition(primitiveInfo, p => p.center[dim] < dimMid, start, end); +${addDefines(defines)} - // if (mid === start || mid === end) { - // mid = Math.floor((start + end) / 2); - // nthElement(primitiveInfo, (a, b) => a.center[dim] < b.center[dim], start, end, mid); - // } +${rayTracingRenderTargets.set()} - // surface area heuristic method - if (nPrimitives <= 4) { - nthElement(primitiveInfo, (a, b) => a.center[dim] < b.center[dim], start, end, mid); - } else { - const buckets = []; - for (let i = 0; i < 12; i++) { - buckets.push({ - bounds: new THREE$1.Box3(), - count: 0, - }); - } +#define PI 3.14159265359 +#define TWOPI 6.28318530718 +#define INVPI 0.31830988618 +#define INVPI2 0.10132118364 +#define EPS 0.0005 +#define INF 1.0e999 +#define RAY_MAX_DISTANCE 9999.0 - for (let i = start; i < end; i++) { - let b = Math.floor(buckets.length * boxOffset(centroidBounds, dim, primitiveInfo[i].center)); - if (b === buckets.length) { - b = buckets.length - 1; - } - buckets[b].count++; - buckets[b].bounds.union(primitiveInfo[i].bounds); - } +#define STANDARD 0 +#define THIN_GLASS 1 +#define THICK_GLASS 2 +#define SHADOW_CATCHER 3 - const cost = []; +#define SAMPLES_PER_MATERIAL 8 - for (let i = 0; i < buckets.length - 1; i++) { - const b0 = new THREE$1.Box3(); - const b1 = new THREE$1.Box3(); - let count0 = 0; - let count1 = 0; - for (let j = 0; j <= i; j++) { - b0.union(buckets[j].bounds); - count0 += buckets[j].count; - } - for (let j = i + 1; j < buckets.length; j++) { - b1.union(buckets[j].bounds); - count1 += buckets[j].count; - } - cost.push(0.1 + (count0 * surfaceArea(b0) + count1 * surfaceArea(b1)) / surfaceArea(bounds)); - } +const float IOR = 1.5; +const float INV_IOR = 1.0 / IOR; - let minCost = cost[0]; - let minCostSplitBucket = 0; - for (let i = 1; i < cost.length; i++) { - if (cost[i] < minCost) { - minCost = cost[i]; - minCostSplitBucket = i; - } - } +const float IOR_THIN = 1.015; +const float INV_IOR_THIN = 1.0 / IOR_THIN; - mid = partition(primitiveInfo, p => { - let b = Math.floor(buckets.length * boxOffset(centroidBounds, dim, p.center)); - if (b === buckets.length) { - b = buckets.length - 1; - } - return b <= minCostSplitBucket; - }, start, end); - } +const float R0 = (1.0 - IOR) * (1.0 - IOR) / ((1.0 + IOR) * (1.0 + IOR)); - return makeInteriorNode( - dim, - recursiveBuild(primitiveInfo, start, mid), - recursiveBuild(primitiveInfo, mid, end), - ); - } - } +// https://www.w3.org/WAI/GL/wiki/Relative_luminance +const vec3 luminance = vec3(0.2126, 0.7152, 0.0722); - function makeLeafNode(primitives, bounds) { - return { - primitives, - bounds - }; - } +struct Ray { + vec3 o; + vec3 d; + vec3 invD; + float tMax; +}; - function makeInteriorNode(splitAxis, child0, child1) { - return { - child0, - child1, - bounds: new THREE$1.Box3().union(child0.bounds).union(child1.bounds), - splitAxis, - }; - } +struct SurfaceInteraction { + bool hit; + vec3 position; + vec3 normal; // smoothed normal from the three triangle vertices + vec3 faceNormal; // normal of the triangle + vec3 color; + float roughness; + float metalness; + int materialType; + int meshId; +}; - function maximumExtent(box3) { - box3.getSize(size); - if (size.x > size.z) { - return size.x > size.y ? 'x' : 'y'; - } else { - return size.z > size.y ? 'z' : 'y'; - } - } +struct Camera { + mat4 transform; + float aspect; + float fov; + float focus; + float aperture; +}; - function boxOffset(box3, dim, v) { - let offset = v[dim] - box3.min[dim]; +uniform Camera camera; +uniform vec2 pixelSize; // 1 / screenResolution +uniform vec2 jitter; - if (box3.max[dim] > box3.min[dim]){ - offset /= box3.max[dim] - box3.min[dim]; - } +in vec2 vCoord; - return offset; - } +void initRay(inout Ray ray, vec3 origin, vec3 direction) { + ray.o = origin; + ray.d = direction; + ray.invD = 1.0 / ray.d; + ray.tMax = RAY_MAX_DISTANCE; +} - function surfaceArea(box3) { - box3.getSize(size); - return 2 * (size.x * size.z + size.x * size.y + size.z * size.y); - } +// given the index from a 1D array, retrieve corresponding position from packed 2D texture +ivec2 unpackTexel(int i, int columnsLog2) { + ivec2 u; + u.y = i >> columnsLog2; // equivalent to (i / 2^columnsLog2) + u.x = i - (u.y << columnsLog2); // equivalent to (i % 2^columnsLog2) + return u; +} - // Create a piecewise 2D cumulative distribution function of light intensity from an envmap - // http://www.pbr-book.org/3ed-2018/Monte_Carlo_Integration/2D_Sampling_with_Multidimensional_Transformations.html#Piecewise-Constant2DDistributions +vec4 fetchData(sampler2D s, int i, int columnsLog2) { + return texelFetch(s, unpackTexel(i, columnsLog2), 0); +} - function envmapDistribution(image) { - const data = image.data; +ivec4 fetchData(isampler2D s, int i, int columnsLog2) { + return texelFetch(s, unpackTexel(i, columnsLog2), 0); +} - const cdfImage = { - width: image.width + 2, - height: image.height + 1 - }; +struct Path { + Ray ray; + vec3 li; + vec3 albedo; + float alpha; + vec3 beta; + bool specularBounce; + bool abort; +}; - const cdf = makeTextureArray(cdfImage.width, cdfImage.height, 2); +${textureLinear()} +${intersect(defines)} +${random()} +${envmap()} +${bsdf()} +${sample()} +${sampleMaterial()} +${sampleGlass()} +${sampleShadowCatcher()} - for (let y = 0; y < image.height; y++) { - const sinTheta = Math.sin(Math.PI * (y + 0.5) / image.height); - for (let x = 0; x < image.width; x++) { - const i = 3 * (y * image.width + x); - let r = data[i]; - let g = data[i + 1]; - let b = data[i + 2]; - let luminance = 0.2126 * r + 0.7152 * g + 0.0722 * b; - luminance *= sinTheta; - cdf.set(x + 2, y, 0, cdf.get(x + 1, y, 0) + luminance / image.width); - cdf.set(x + 1, y, 1, luminance); - } +void bounce(inout Path path, int i, inout SurfaceInteraction si) { + if (path.abort) { + return; + } - const rowIntegral = cdf.get(cdfImage.width - 1, y, 0); + si = intersectScene(path.ray); - for (let x = 1; x < cdf.width; x++) { - cdf.set(x, y, 0, cdf.get(x, y, 0) / rowIntegral); - cdf.set(x, y, 1, cdf.get(x, y, 1) / rowIntegral); + if (!si.hit) { + if (path.specularBounce) { + path.li += path.beta * sampleBackgroundFromDirection(path.ray.d); + } + + path.abort = true; + } else { + #ifdef USE_GLASS + if (si.materialType == THIN_GLASS || si.materialType == THICK_GLASS) { + sampleGlassSpecular(si, i, path); + } + #endif + #ifdef USE_SHADOW_CATCHER + if (si.materialType == SHADOW_CATCHER) { + sampleShadowCatcher(si, i, path); } + #endif + if (si.materialType == STANDARD) { + sampleMaterial(si, i, path); + } - cdf.set(0, y + 1, 0, cdf.get(0, y, 0) + rowIntegral / image.height); - cdf.set(0, y, 1, rowIntegral); + // Russian Roulette sampling + if (i >= 2) { + float q = 1.0 - dot(path.beta, luminance); + if (randomSample() < q) { + path.abort = true; + } + path.beta /= 1.0 - q; } + } +} - const integral = cdf.get(0, cdf.height - 1, 0); +// Path tracing integrator as described in +// http://www.pbr-book.org/3ed-2018/Light_Transport_I_Surface_Reflection/Path_Tracing.html# +vec4 integrator(inout Ray ray, inout SurfaceInteraction si) { + Path path; + path.ray = ray; + path.li = vec3(0); + path.alpha = 1.0; + path.beta = vec3(1.0); + path.specularBounce = true; + path.abort = false; - for (let y = 0; y < cdf.height; y++) { - cdf.set(0, y, 0, cdf.get(0, y, 0) / integral); - cdf.set(0, y, 1, cdf.get(0, y, 1) / integral); - } - cdfImage.data = cdf.array; + bounce(path, 1, si); - return cdfImage; - } + SurfaceInteraction indirectSi; + // Manually unroll for loop. + // Some hardware fails to interate over a GLSL loop, so we provide this workaround + // for (int i = 1; i < defines.bounces + 1, i += 1) + // equivelant to + ${unrollLoop('i', 2, defines.BOUNCES + 1, 1, ` + bounce(path, i, indirectSi); + `)} - function makeTextureArray(width, height, channels) { - const array = new Float32Array(channels * width * height); + return vec4(path.li, path.alpha); +} - return { - set(x, y, channel, val) { - array[channels * (y * width + x) + channel] = val; - }, - get(x, y, channel) { - return array[channels * (y * width + x) + channel]; - }, - width, - height, - channels, - array - }; - } +void main() { + initRandom(); - // Convert image data from the RGBE format to a 32-bit floating point format - // See https://www.cg.tuwien.ac.at/research/theses/matkovic/node84.html for a description of the RGBE format - // Optional multiplier argument for performance optimization - function rgbeToFloat(buffer, intensity = 1) { - const texels = buffer.length / 4; - const floatBuffer = new Float32Array(texels * 3); + vec2 vCoordAntiAlias = vCoord + jitter; - const expTable = []; - for (let i = 0; i < 255; i++) { - expTable[i] = intensity * Math.pow(2, i - 128) / 255; - } + vec3 direction = normalize(vec3(vCoordAntiAlias - 0.5, -1.0) * vec3(camera.aspect, 1.0, camera.fov)); + + // Thin lens model with depth-of-field + // http://www.pbr-book.org/3ed-2018/Camera_Models/Projective_Camera_Models.html#TheThinLensModelandDepthofField + // vec2 lensPoint = camera.aperture * sampleCircle(randomSampleVec2()); + // vec3 focusPoint = -direction * camera.focus / direction.z; // intersect ray direction with focus plane + + // vec3 origin = vec3(lensPoint, 0.0); + // direction = normalize(focusPoint - origin); + + // origin = vec3(camera.transform * vec4(origin, 1.0)); + // direction = mat3(camera.transform) * direction; - for (let i = 0; i < texels; i++) { + vec3 origin = camera.transform[3].xyz; + direction = mat3(camera.transform) * direction; - const r = buffer[4 * i]; - const g = buffer[4 * i + 1]; - const b = buffer[4 * i + 2]; - const a = buffer[4 * i + 3]; - const e = expTable[a]; + Ray cam; + initRay(cam, origin, direction); - floatBuffer[3 * i] = r * e; - floatBuffer[3 * i + 1] = g * e; - floatBuffer[3 * i + 2] = b * e; - } + SurfaceInteraction si; - return floatBuffer; - } + vec4 liAndAlpha = integrator(cam, si); - function clamp(x, min, max) { - return Math.min(Math.max(x, min), max); + if (dot(si.position, si.position) == 0.0) { + si.position = origin + direction * RAY_MAX_DISTANCE; } - function shuffle(arr) { - for (let i = arr.length - 1; i > 0; i--) { - const j = Math.floor(Math.random() * (i + 1)); - const x = arr[i]; - arr[i] = arr[j]; - arr[j] = x; - } - return arr; + if (!(liAndAlpha.x < INF && liAndAlpha.x > -EPS)) { + liAndAlpha = vec4(0, 0, 0, 1); } - function numberArraysEqual(a, b, eps = 1e-4) { - for (let i = 0; i < a.length; i++) { - if (Math.abs(a[i] - b[i]) > eps) { - return false; - } - } + out_light = liAndAlpha; + out_position = vec4(si.position, si.meshId); - return true; + // Stratified Sampling Sample Count Test + // --------------- + // Uncomment the following code + // Then observe the colors of the image + // If: + // * The resulting image is pure black + // Extra samples are being passed to the shader that aren't being used. + // * The resulting image contains red + // Not enough samples are being passed to the shader + // * The resulting image contains only white with some black + // All samples are used by the shader. Correct result! + + // fragColor = vec4(0, 0, 0, 1); + // if (sampleIndex == SAMPLING_DIMENSIONS) { + // fragColor = vec4(1, 1, 1, 1); + // } else if (sampleIndex > SAMPLING_DIMENSIONS) { + // fragColor = vec4(1, 0, 0, 1); + // } +} +`; } - // Convert image data from the RGBE format to a 32-bit floating point format + function mergeMeshesToGeometry(meshes) { - const DEFAULT_MAP_RESOLUTION = { - width: 4096, - height: 2048, - }; + let vertexCount = 0; + let indexCount = 0; - // Tools for generating and modify env maps for lighting from scene component data - function generateEnvMapFromSceneComponents(directionalLights, environmentLights) { - let envImage = initializeEnvMap(environmentLights); - directionalLights.forEach( light => { envImage.data = addDirectionalLightToEnvMap(light, envImage); }); + const geometryAndMaterialIndex = []; + const materialIndexMap = new Map(); - return envImage; - } + for (const mesh of meshes) { + const geometry = cloneBufferGeometry(mesh.geometry, ['position', 'normal', 'uv']); - function initializeEnvMap(environmentLights) { - let envImage; + const index = geometry.getIndex(); + if (!index) { + addFlatGeometryIndices(geometry); + } - // Initialize map from environment light if present - if (environmentLights.length > 0) { - // TODO: support multiple environment lights (what if they have different resolutions?) - const environmentLight = environmentLights[0]; + geometry.applyMatrix(mesh.matrixWorld); - envImage = { - width: environmentLight.map.image.width, - height: environmentLight.map.image.height, - data: environmentLight.map.image.data, - }; + if (!geometry.getAttribute('normal')) { + geometry.computeVertexNormals(); + } else { + geometry.normalizeNormals(); + } - envImage.data = rgbeToFloat(envImage.data, environmentLight.intensity); - } else { - // initialize blank map - envImage = generateBlankMap(DEFAULT_MAP_RESOLUTION.width, DEFAULT_MAP_RESOLUTION.height); - } + vertexCount += geometry.getAttribute('position').count; + indexCount += geometry.getIndex().count; - return envImage; - } + const material = mesh.material; + let materialIndex = materialIndexMap.get(material); + if (materialIndex === undefined) { + materialIndex = materialIndexMap.size; + materialIndexMap.set(material, materialIndex); + } - function generateBlankMap(width, height) { - const texels = width * height; - const floatBuffer = new Float32Array(texels * 3); + geometryAndMaterialIndex.push({ + geometry, + materialIndex + }); + } + + const { geometry, materialIndices } = mergeGeometry(geometryAndMaterialIndex, vertexCount, indexCount); return { - width: width, - height: height, - data: floatBuffer, + geometry, + materialIndices, + materials: Array.from(materialIndexMap.keys()) }; } - function addDirectionalLightToEnvMap(light, image) { - const sphericalCoords = new THREE$1.Spherical(); - const lightDirection = light.position.clone().sub(light.target.position); + function mergeGeometry(geometryAndMaterialIndex, vertexCount, indexCount) { + const position = new THREE$1.BufferAttribute(new Float32Array(3 * vertexCount), 3, false); + const normal = new THREE$1.BufferAttribute(new Float32Array(3 * vertexCount), 3, false); + const uv = new THREE$1.BufferAttribute(new Float32Array(2 * vertexCount), 2, false); + const index = new THREE$1.BufferAttribute(new Uint32Array(indexCount), 1, false); - sphericalCoords.setFromVector3(lightDirection); - sphericalCoords.theta = (Math.PI * 3 / 2) - sphericalCoords.theta; - sphericalCoords.makeSafe(); + const materialIndices = []; - return addLightAtCoordinates(light, image, sphericalCoords); - } + const bg = new THREE$1.BufferGeometry(); + bg.addAttribute('position', position); + bg.addAttribute('normal', normal); + bg.addAttribute('uv', uv); + bg.setIndex(index); - // Perform modifications on env map to match input scene - function addLightAtCoordinates(light, image, originCoords) { - const floatBuffer = image.data; - const width = image.width; - const height = image.height; - const xTexels = floatBuffer.length / (3 * height); - const yTexels = floatBuffer.length / (3 * width); + let currentVertex = 0; + let currentIndex = 0; - // default softness for standard directional lights is 0.01, i.e. a hard shadow - const softness = light.softness || 0.01; + for (const { geometry, materialIndex } of geometryAndMaterialIndex) { + const vertexCount = geometry.getAttribute('position').count; + bg.merge(geometry, currentVertex); - // angle from center of light at which no more contributions are projected - const threshold = findThreshold(softness); + const meshIndex = geometry.getIndex(); + for (let i = 0; i < meshIndex.count; i++) { + index.setX(currentIndex + i, currentVertex + meshIndex.getX(i)); + } - // if too few texels are rejected by the threshold then the time to evaluate it is no longer worth it - const useThreshold = threshold < Math.PI / 5; + const triangleCount = meshIndex.count / 3; + for (let i = 0; i < triangleCount; i++) { + materialIndices.push(materialIndex); + } - // functional trick to keep the conditional check out of the main loop - const intensityFromAngleFunction = useThreshold ? getIntensityFromAngleDifferentialThresholded : getIntensityFromAngleDifferential; + currentVertex += vertexCount; + currentIndex += meshIndex.count; + } - let begunAddingContributions = false; - let currentCoords = new THREE$1.Spherical(); + return { geometry: bg, materialIndices }; + } - // Iterates over each row from top to bottom - for (let i = 0; i < xTexels; i++) { + // Similar to buffergeometry.clone(), except we only copy + // specific attributes instead of everything + function cloneBufferGeometry(bufferGeometry, attributes) { + const newGeometry = new THREE$1.BufferGeometry(); - let encounteredInThisRow = false; + for (const name of attributes) { + const attrib = bufferGeometry.getAttribute(name); + if (attrib) { + newGeometry.addAttribute(name, attrib.clone()); + } + } - // Iterates over each texel in row - for (let j = 0; j < yTexels; j++) { - const bufferIndex = j * width + i; - currentCoords = equirectangularToSpherical(i, j, width, height, currentCoords); - const falloff = intensityFromAngleFunction(originCoords, currentCoords, softness, threshold); + const index = bufferGeometry.getIndex(); + if (index) { + newGeometry.setIndex(index); + } - if(falloff > 0) { - encounteredInThisRow = true; - begunAddingContributions = true; - } + return newGeometry; + } - const intensity = light.intensity * falloff; + function addFlatGeometryIndices(geometry) { + const position = geometry.getAttribute('position'); - floatBuffer[bufferIndex * 3] += intensity * light.color.r; - floatBuffer[bufferIndex * 3 + 1] += intensity * light.color.g; - floatBuffer[bufferIndex * 3 + 2] += intensity * light.color.b; - } + if (!position) { + console.warn('No position attribute'); + return; + } - // First row to not add a contribution since adding began - // This means the entire light has been added and we can exit early - if(!encounteredInThisRow && begunAddingContributions) { - return floatBuffer; - } + const index = new Uint32Array(position.count); + + for (let i = 0; i < index.length; i++) { + index[i] = i; } - return floatBuffer; + geometry.setIndex(new THREE$1.BufferAttribute(index, 1, false)); + + return geometry; } - function findThreshold(softness) { - const step = Math.PI / 128; - const maxSteps = (2.0 * Math.PI) / step; + /* + Stratified Sampling + http://www.pbr-book.org/3ed-2018/Sampling_and_Reconstruction/Stratified_Sampling.html - for (let i = 0; i < maxSteps; i++) { - const angle = i * step; - const falloff = getFalloffAtAngle(angle, softness); - if (falloff <= 0.0001) { - return angle; - } + Repeatedly sampling random numbers between [0, 1) has the effect of producing numbers that are coincidentally clustered together, + instead of being evenly spaced across the domain. + This produces low quality results for the path tracer since clustered samples send too many rays in similar directions. + + We can reduce the amount of clustering of random numbers by using stratified sampling. + Stratification divides the [0, 1) range into partitions, or stratum, of equal size. + Each invocation of the stratified sampler draws one uniform random number from one stratum from a shuffled sequence of stratums. + When every stratum has been sampled once, this sequence is shuffled again and the process repeats. + + The returned sample ranges between [0, numberOfStratum). + The integer part ideintifies the stratum (the first stratum being 0). + The fractional part is the random number. + + To obtain the stratified sample between [0, 1), divide the returned sample by the stratum count. + */ + + function makeStratifiedSampler(strataCount, dimensions) { + const strata = []; + const l = strataCount ** dimensions; + for (let i = 0; i < l; i++) { + strata[i] = i; } - } - function getIntensityFromAngleDifferentialThresholded(originCoords, currentCoords, softness, threshold) { - const deltaPhi = getAngleDelta(originCoords.phi, currentCoords.phi); - const deltaTheta = getAngleDelta(originCoords.theta, currentCoords.theta); + let index = strata.length; - if(deltaTheta > threshold && deltaPhi > threshold) { - return 0; - } + const sample = []; - const angle = angleBetweenSphericals(originCoords, currentCoords); - return getFalloffAtAngle(angle, softness); - } + function restart() { + index = 0; + } - function getIntensityFromAngleDifferential(originCoords, currentCoords, softness) { - const angle = angleBetweenSphericals(originCoords, currentCoords); - return getFalloffAtAngle(angle, softness); - } + function next() { + if (index >= strata.length) { + shuffle(strata); + restart(); + } + let stratum = strata[index++]; - function getAngleDelta(angleA, angleB) { - const diff = Math.abs(angleA - angleB) % (2 * Math.PI); - return diff > Math.PI ? (2 * Math.PI - diff) : diff; - } + for (let i = 0; i < dimensions; i++) { + sample[i] = stratum % strataCount + Math.random(); + stratum = Math.floor(stratum / strataCount); + } - const angleBetweenSphericals = function() { - const originVector = new THREE$1.Vector3(); - const currentVector = new THREE$1.Vector3(); + return sample; + } - return (originCoords, currentCoords) => { - originVector.setFromSpherical(originCoords); - currentVector.setFromSpherical(currentCoords); - return originVector.angleTo(currentVector); + return { + next, + restart, + strataCount }; - }(); - - // TODO: possibly clean this up and optimize it - // - // This function was arrived at through experimentation, it provides good - // looking results with percieved softness that scale relatively linearly with - // the softness value in the 0 - 1 range - // - // For now it doesn't incur too much of a performance penalty because for most of our use cases (lights without too much softness) - // the threshold cutoff in getIntensityFromAngleDifferential stops us from running it too many times - function getFalloffAtAngle(angle, softness) { - const softnessCoefficient = Math.pow(2, 14.5 * Math.max(0.001, 1.0 - clamp(softness, 0.0, 1.0))); - const falloff = Math.pow(softnessCoefficient, 1.1) * Math.pow(8, -softnessCoefficient * Math.pow(angle, 1.8)); - return falloff; } - function equirectangularToSpherical(x, y, width, height, target) { - target.phi = (Math.PI * y) / height; - target.theta = (2.0 * Math.PI * x) / width; - return target; - } + /* + Stratified Sampling + http://www.pbr-book.org/3ed-2018/Sampling_and_Reconstruction/Stratified_Sampling.html - // retrieve textures used by meshes, grouping textures from meshes shared by *the same* mesh property - function getTexturesFromMaterials(meshes, textureNames) { - const textureMap = {}; + It is computationally unfeasible to compute stratified sampling for large dimensions (>2) + Instead, we can compute stratified sampling for lower dimensional patterns that sum to the high dimension + e.g. instead of sampling a 6D domain, we sample a 2D + 2D + 2D domain. + This reaps many benefits of stratification while still allowing for small strata sizes. + */ - for (const name of textureNames) { - const textures = []; - textureMap[name] = { - indices: texturesFromMaterials(meshes, name, textures), - textures - }; + function makeStratifiedSamplerCombined(strataCount, listOfDimensions) { + const strataObjs = []; + + for (const dim of listOfDimensions) { + strataObjs.push(makeStratifiedSampler(strataCount, dim)); } - return textureMap; - } + const combined = []; - // retrieve textures used by meshes, grouping textures from meshes shared *across all* mesh properties - function mergeTexturesFromMaterials(meshes, textureNames) { - const textureMap = { - textures: [], - indices: {} - }; + function next() { + let i = 0; - for (const name of textureNames) { - textureMap.indices[name] = texturesFromMaterials(meshes, name, textureMap.textures); - } + for (const strata of strataObjs) { + const nums = strata.next(); - return textureMap; - } + for (const num of nums) { + combined[i++] = num; + } + } - function texturesFromMaterials(materials, textureName, textures) { - const indices = []; + return combined; + } - for (const material of materials) { - if (!material[textureName]) { - indices.push(-1); - } else { - let index = textures.length; - for (let i = 0; i < textures.length; i++) { - if (textures[i] === material[textureName]) { - // Reuse existing duplicate texture. - index = i; - break; - } - } - if (index === textures.length) { - // New texture. Add texture to list. - textures.push(material[textureName]); - } - indices.push(index); + function restart() { + for (const strata of strataObjs) { + strata.restart(); } } - return indices; + return { + next, + restart, + strataCount + }; } function makeTexture(gl, params) { @@ -2317,6 +2419,9 @@ void main() { // Or an array of any of these objects. In this case an Array Texture will be created data = null, + // If greater than 1, create an Array Texture of this length + length = 1, + // Number of channels, [1-4]. If left blank, the the function will decide the number of channels automatically from the data channels = null, @@ -2347,11 +2452,10 @@ void main() { if (Array.isArray(data)) { dataArray = data; data = dataArray[0]; - target = gl.TEXTURE_2D_ARRAY; - } else { - target = gl.TEXTURE_2D; } + target = dataArray || length > 1 ? gl.TEXTURE_2D_ARRAY : gl.TEXTURE_2D; + gl.activeTexture(gl.TEXTURE0); gl.bindTexture(target, texture); @@ -2418,9 +2522,15 @@ void main() { gl.texSubImage3D(target, 0, 0, 0, i, layerWidth, layerHeight, 1, format, type, dataArray[i]); } + } else if (length > 1) { + // create empty array texture + gl.texStorage3D(target, 1, internalFormat, width, height, length); } else { gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, flipY); - gl.texImage2D(target, 0, internalFormat, width, height, 0, format, type, data); + gl.texStorage2D(target, 1, internalFormat, width, height); + if (data) { + gl.texSubImage2D(target, 0, 0, 0, width, height, format, type, data); + } } // return state to default @@ -2432,6 +2542,61 @@ void main() { }; } + // retrieve textures used by meshes, grouping textures from meshes shared by *the same* mesh property + function getTexturesFromMaterials(meshes, textureNames) { + const textureMap = {}; + + for (const name of textureNames) { + const textures = []; + textureMap[name] = { + indices: texturesFromMaterials(meshes, name, textures), + textures + }; + } + + return textureMap; + } + + // retrieve textures used by meshes, grouping textures from meshes shared *across all* mesh properties + function mergeTexturesFromMaterials(meshes, textureNames) { + const textureMap = { + textures: [], + indices: {} + }; + + for (const name of textureNames) { + textureMap.indices[name] = texturesFromMaterials(meshes, name, textureMap.textures); + } + + return textureMap; + } + + function texturesFromMaterials(materials, textureName, textures) { + const indices = []; + + for (const material of materials) { + if (!material[textureName]) { + indices.push(-1); + } else { + let index = textures.length; + for (let i = 0; i < textures.length; i++) { + if (textures[i] === material[textureName]) { + // Reuse existing duplicate texture. + index = i; + break; + } + } + if (index === textures.length) { + // New texture. Add texture to list. + textures.push(material[textureName]); + } + indices.push(index); + } + } + + return indices; + } + // Upload arrays to uniform buffer objects // Packs different arrays into vec4's to take advantage of GLSL's std140 memory layout @@ -2489,120 +2654,56 @@ void main() { const interleaved = []; for (let i = 0; i < maxLength; i++) { for (let j = 0; j < arrays.length; j++) { - const { data, channels } = arrays[j]; - for (let c = 0; c < channels; c++) { - interleaved.push(data[i * channels + c]); - } - } - } - - return interleaved; - } - - /* - Stratified Sampling - http://www.pbr-book.org/3ed-2018/Sampling_and_Reconstruction/Stratified_Sampling.html - - Repeatedly sampling random numbers between [0, 1) has the effect of producing numbers that are coincidentally clustered together, - instead of being evenly spaced across the domain. - This produces low quality results for the path tracer since clustered samples send too many rays in similar directions. - - We can reduce the amount of clustering of random numbers by using stratified sampling. - Stratification divides the [0, 1) range into partitions, or stratum, of equal size. - Each invocation of the stratified sampler draws one uniform random number from one stratum from a shuffled sequence of stratums. - When every stratum has been sampled once, this sequence is shuffled again and the process repeats. - - The returned sample ranges between [0, numberOfStratum). - The integer part ideintifies the stratum (the first stratum being 0). - The fractional part is the random number. - - To obtain the stratified sample between [0, 1), divide the returned sample by the stratum count. - */ - - function makeStratifiedSampler(strataCount, dimensions) { - const strata = []; - const l = strataCount ** dimensions; - for (let i = 0; i < l; i++) { - strata[i] = i; - } - - let index = strata.length; - - const sample = []; - - function restart() { - index = 0; - } - - function next() { - if (index >= strata.length) { - shuffle(strata); - restart(); - } - let stratum = strata[index++]; - - for (let i = 0; i < dimensions; i++) { - sample[i] = stratum % strataCount + Math.random(); - stratum = Math.floor(stratum / strataCount); + const { data, channels } = arrays[j]; + for (let c = 0; c < channels; c++) { + interleaved.push(data[i * channels + c]); + } } - - return sample; } - return { - next, - restart, - strataCount - }; + return interleaved; } - /* - Stratified Sampling - http://www.pbr-book.org/3ed-2018/Sampling_and_Reconstruction/Stratified_Sampling.html - - It is computationally unfeasible to compute stratified sampling for large dimensions (>2) - Instead, we can compute stratified sampling for lower dimensional patterns that sum to the high dimension - e.g. instead of sampling a 6D domain, we sample a 2D + 2D + 2D domain. - This reaps many benefits of stratification while still allowing for small strata sizes. - */ - - function makeStratifiedSamplerCombined(strataCount, listOfDimensions) { - const strataObjs = []; + // targets is array of { name: string, storage: 'byte' | 'float'} + function makeRenderTargets({storage, names}) { + const location = {}; - for (const dim of listOfDimensions) { - strataObjs.push(makeStratifiedSampler(strataCount, dim)); + for (let i = 0; i < names.length; i++) { + location[names[i]] = i; } - const combined = []; - - function next() { - let i = 0; + return { + isRenderTargets: true, + storage, + names, + location, + get(textureName) { + let inputs = ''; - for (const strata of strataObjs) { - const nums = strata.next(); + inputs += `uniform mediump sampler2DArray ${textureName};\n`; - for (const num of nums) { - combined[i++] = num; + for (let i = 0; i < names.length; i++) { + inputs += `#define ${textureName}_${names[i]} ${i}\n`; } - } - return combined; - } + return inputs; + }, + set() { + let outputs = ''; - function restart() { - for (const strata of strataObjs) { - strata.restart(); - } - } + for (let i = 0; i < names.length; i++) { + outputs += `layout(location = ${i}) out vec4 out_${names[i]};\n`; + } - return { - next, - restart, - strataCount + return outputs; + } }; } - //Important TODO: Refactor this file to get rid of duplicate and confusing code + const rayTracingRenderTargets = makeRenderTargets({ + storage: 'float', + names: ['light', 'position'] + }); function makeRayTracingShader({ bounces, // number of global illumination bounces @@ -2616,17 +2717,19 @@ void main() { bounces = clamp(bounces, 1, 6); const samplingDimensions = []; - samplingDimensions.push(2, 2); // anti aliasing, depth of field - for (let i = 0; i < bounces; i++) { + + for (let i = 1; i <= bounces; i++) { // specular or diffuse reflection, light importance sampling, material sampling, next path direction samplingDimensions.push(2, 2, 2, 2); - if (i >= 1) { + if (i >= 2) { // russian roulette sampling // this step is skipped on the first bounce samplingDimensions.push(1); } } + let samples; + const { program, uniforms } = makeProgramFromScene({ bounces, fullscreenQuad, gl, optionalExtensions, samplingDimensions, scene, textureAllocator }); @@ -2655,7 +2758,10 @@ void main() { gl.uniform1f(uniforms['camera.aperture'], camera.aperture || 0); } - let samples; + function setJitter(x, y) { + gl.useProgram(program); + gl.uniform2f(uniforms.jitter, x, y); + } function nextSeed() { gl.useProgram(program); @@ -2678,11 +2784,6 @@ void main() { nextSeed(); } - function useStratifiedSampling(stratifiedSampling) { - gl.useProgram(program); - gl.uniform1f(uniforms.useStratifiedSampling, stratifiedSampling ? 1.0 : 0.0); - } - function draw() { gl.useProgram(program); fullscreenQuad.draw(); @@ -2694,10 +2795,10 @@ void main() { draw, nextSeed, setCamera, + setJitter, setNoise, setSize, setStrataCount, - useStratifiedSampling }; } function makeProgramFromScene({ @@ -2711,7 +2812,7 @@ void main() { }) { const { OES_texture_float_linear } = optionalExtensions; - const { meshes, directionalLights, environmentLights } = decomposeScene(scene); + const { meshes, directionalLights, ambientLights, environmentLights } = decomposeScene(scene); if (meshes.length === 0) { throw 'RayTracingRenderer: Scene contains no renderable meshes.'; } @@ -2732,20 +2833,23 @@ void main() { const useShadowCatcher = materials.some(m => m.shadowCatcher); const fragmentShader = createShader(gl, gl.FRAGMENT_SHADER, fragString({ - OES_texture_float_linear, - BVH_COLUMNS: textureDimensionsFromArray(flattenedBvh.count).columnsLog, - INDEX_COLUMNS: textureDimensionsFromArray(numTris).columnsLog, - VERTEX_COLUMNS: textureDimensionsFromArray(geometry.attributes.position.count).columnsLog, - STACK_SIZE: flattenedBvh.maxDepth, - NUM_TRIS: numTris, - NUM_MATERIALS: materials.length, - NUM_DIFFUSE_MAPS: maps.map.textures.length, - NUM_NORMAL_MAPS: maps.normalMap.textures.length, - NUM_PBR_MAPS: pbrMap.textures.length, - BOUNCES: bounces, - USE_GLASS: useGlass, - USE_SHADOW_CATCHER: useShadowCatcher, - SAMPLING_DIMENSIONS: samplingDimensions.reduce((a, b) => a + b) + rayTracingRenderTargets, + defines: { + OES_texture_float_linear, + BVH_COLUMNS: textureDimensionsFromArray(flattenedBvh.count).columnsLog, + INDEX_COLUMNS: textureDimensionsFromArray(numTris).columnsLog, + VERTEX_COLUMNS: textureDimensionsFromArray(geometry.attributes.position.count).columnsLog, + STACK_SIZE: flattenedBvh.maxDepth, + NUM_TRIS: numTris, + NUM_MATERIALS: materials.length, + NUM_DIFFUSE_MAPS: maps.map.textures.length, + NUM_NORMAL_MAPS: maps.normalMap.textures.length, + NUM_PBR_MAPS: pbrMap.textures.length, + BOUNCES: bounces, + USE_GLASS: useGlass, + USE_SHADOW_CATCHER: useShadowCatcher, + SAMPLING_DIMENSIONS: samplingDimensions.reduce((a, b) => a + b) + } })); const program = createProgram(gl, fullscreenQuad.vertexShader, fragmentShader); @@ -2813,15 +2917,31 @@ void main() { makeDataTexture(gl, flattenedBvh.buffer, 4) ); - const envImage = generateEnvMapFromSceneComponents(directionalLights, environmentLights); - - textureAllocator.bind(uniforms.envmap, makeTexture(gl, { + const envImage = generateEnvMapFromSceneComponents(directionalLights, ambientLights, environmentLights); + const envImageTextureObject = makeTexture(gl, { data: envImage.data, minFilter: OES_texture_float_linear ? gl.LINEAR : gl.NEAREST, magFilter: OES_texture_float_linear ? gl.LINEAR : gl.NEAREST, width: envImage.width, height: envImage.height, - })); + }); + + textureAllocator.bind(uniforms.envmap, envImageTextureObject); + + let backgroundImageTextureObject; + if (scene.background) { + const backgroundImage = generateBackgroundMapFromSceneBackground(scene.background); + backgroundImageTextureObject = makeTexture(gl, { + data: backgroundImage.data, + minFilter: OES_texture_float_linear ? gl.LINEAR : gl.NEAREST, + magFilter: OES_texture_float_linear ? gl.LINEAR : gl.NEAREST, + width: backgroundImage.width, + height: backgroundImage.height, + }); + } else { + backgroundImageTextureObject = envImageTextureObject; + } + textureAllocator.bind(uniforms.backgroundMap, backgroundImageTextureObject); const distribution = envmapDistribution(envImage); textureAllocator.bind(uniforms.envmapDistribution, makeTexture(gl, { @@ -2841,6 +2961,7 @@ void main() { function decomposeScene(scene) { const meshes = []; const directionalLights = []; + const ambientLights = []; const environmentLights = []; scene.traverse(child => { if (child.isMesh) { @@ -2856,20 +2977,24 @@ void main() { if (child.isDirectionalLight) { directionalLights.push(child); } + if (child.isAmbientLight) { + ambientLights.push(child); + } if (child.isEnvironmentLight) { if (environmentLights.length > 1) { console.warn(environmentLights, 'only one environment light can be used per scene'); } - else if (isHDRTexture(child)) { + // Valid lights have HDR texture map in RGBEEncoding + if (isHDRTexture(child)) { environmentLights.push(child); } else { - console.warn(child, 'environment light does not use THREE.RGBEEncoding'); + console.warn(child, 'environment light does not use color value or map with THREE.RGBEEncoding'); } } }); return { - meshes, directionalLights, environmentLights + meshes, directionalLights, ambientLights, environmentLights }; } @@ -2950,7 +3075,7 @@ void main() { && (texture.map.encoding === THREE$1.RGBEEncoding || texture.map.encoding === THREE$1.LinearEncoding); } - function fragString$1(defines) { + function fragString$1({ rayTracingRenderTargets, defines }) { return `#version 300 es precision mediump float; @@ -2960,7 +3085,7 @@ in vec2 vCoord; out vec4 fragColor; -uniform sampler2D image; +${rayTracingRenderTargets.get('hdrBuffer')} ${textureLinear()} @@ -2992,7 +3117,7 @@ vec3 acesFilmic( vec3 color ) { } void main() { - vec4 tex = textureLinear(image, vCoord); + vec4 tex = texture(hdrBuffer, vec3(vCoord, hdrBuffer_light)); // alpha channel stores the number of samples progressively rendered // divide the sum of light by alpha to obtain average contribution of light @@ -3034,20 +3159,23 @@ void main() { const { toneMapping, whitePoint, exposure } = toneMappingParams; const fragmentShader = createShader(gl, gl.FRAGMENT_SHADER, fragString$1({ - OES_texture_float_linear, - toneMapping: toneMapFunctions[toneMapping] || 'linear', - whitePoint: whitePoint.toExponential(), // toExponential allows integers to be represented as GLSL floats - exposure: exposure.toExponential() + rayTracingRenderTargets, + defines: { + OES_texture_float_linear, + toneMapping: toneMapFunctions[toneMapping] || 'linear', + whitePoint: whitePoint.toExponential(), // toExponential allows integers to be represented as GLSL floats + exposure: exposure.toExponential() + } })); const program = createProgram(gl, fullscreenQuad.vertexShader, fragmentShader); const uniforms = getUniforms(gl, program); - const image = textureAllocator.reserveSlot(); + const hdrBufferLocation = textureAllocator.reserveSlot(); - function draw({ texture }) { + function draw(texture) { gl.useProgram(program); - image.bind(uniforms.image, texture); + hdrBufferLocation.bind(uniforms.hdrBuffer, texture); fullscreenQuad.draw(); } @@ -3087,10 +3215,12 @@ void main() { width = Math.floor(w); height = Math.floor(h); - if (Array.isArray(renderTarget)) { - texture = initMultipleTextures(gl, width, height, linearFiltering, renderTarget); + if (renderTarget.isRenderTargets) { + // RenderTargets object + texture = initArrayTexture(gl, width, height, linearFiltering, renderTarget); } else { - texture = initSingleTexture(gl, width, height, linearFiltering, renderTarget); + // single render target in the form { storage } + texture = initTexture(gl, width, height, linearFiltering, renderTarget); } this.unbind(); @@ -3119,7 +3249,7 @@ void main() { }; } - function initSingleTexture(gl, width, height, linearFiltering, { storage }) { + function initTexture(gl, width, height, linearFiltering, { storage }) { const texture = makeTexture(gl, { width, height, @@ -3133,24 +3263,22 @@ void main() { return texture; } - function initMultipleTextures(gl, width, height, linearFiltering, renderTargets) { - const texture = {}; + function initArrayTexture(gl, width, height, linearFiltering, { storage, names }) { const drawBuffers = []; - for (const { name, storage, index } of renderTargets.targets) { - const t = makeTexture(gl, { - width, - height, - storage, - minFilter: linearFiltering ? gl.LINEAR : gl.NEAREST, - magFilter: linearFiltering ? gl.LINEAR : gl.NEAREST, - channels: 4 - }); - - gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0 + index, t.target, t.texture, 0); + const texture = makeTexture(gl, { + width, + height, + length: names.length, + storage: storage, + minFilter: linearFiltering ? gl.LINEAR : gl.NEAREST, + magFilter: linearFiltering ? gl.LINEAR : gl.NEAREST, + channels: 4 + }); - texture[name] = t; - drawBuffers.push(gl.COLOR_ATTACHMENT0 + index); + for (let i = 0; i < names.length; i++) { + gl.framebufferTextureLayer(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0 + i, texture.texture, 0, i); + drawBuffers.push(gl.COLOR_ATTACHMENT0 + i); } gl.drawBuffers(drawBuffers); @@ -3189,31 +3317,17 @@ void main() { let pixelsPerTileQuantized = pixelsPerTile; - let desiredTimePerTile = 22; // 45 fps + let desiredTimePerTile = 20; - let timePerPixelSum = desiredTimePerTile / pixelsPerTile; - let samples = 1; - let resetSum = true; + let timePerPixel = desiredTimePerTile / pixelsPerTile; - function addToTimePerPixel(t) { - if (resetSum) { - timePerPixelSum = 0; - samples = 0; - resetSum = false; - } - - timePerPixelSum += t; - samples++; - } - - function getTimePerPixel() { - return timePerPixelSum / samples; + function restartTimer() { + firstTileTime = 0; } function reset() { currentTile = -1; firstTileTime = 0; - resetSum = true; } function setSize(w, h) { @@ -3239,14 +3353,14 @@ void main() { if (firstTileTime) { const timeElapsed = Date.now() - firstTileTime; const timePerTile = timeElapsed / numTiles; - const error = desiredTimePerTile - timePerTile; - // higher number means framerate converges to targetRenderTime faster - // if set too high, the framerate fluctuates rapidly with small variations in frame-by-frame performance - const convergenceStrength = 1000; + const expAvg = 0.5; - pixelsPerTile = pixelsPerTile + convergenceStrength * error; - addToTimePerPixel(timePerTile / pixelsPerTileQuantized); + const newPixelsPerTile = pixelsPerTile * desiredTimePerTile / timePerTile; + pixelsPerTile = expAvg * pixelsPerTile + (1 - expAvg) * newPixelsPerTile; + + const newTimePerPixel = timePerTile / pixelsPerTileQuantized; + timePerPixel = expAvg * timePerPixel + (1 - expAvg) * newTimePerPixel; } firstTileTime = Date.now(); @@ -3278,33 +3392,25 @@ void main() { } return { - setSize, - reset, - nextTile, - getTimePerPixel, - restartTimer() { - firstTileTime = 0; - }, - setRenderTime(time) { - desiredTimePerTile = time; + getTimePerPixel() { + return timePerPixel; }, + nextTile, + reset, + restartTimer, + setSize }; } function pixelsPerTileEstimate(gl) { const maxRenderbufferSize = gl.getParameter(gl.MAX_RENDERBUFFER_SIZE); - const maxViewportDims = gl.getParameter(gl.MAX_VIEWPORT_DIMS); if (maxRenderbufferSize <= 8192) { - return 25000; - } else if (maxRenderbufferSize === 16384 && maxViewportDims[0] <= 16384) { - return 50000; - } else if (maxRenderbufferSize === 16384 && maxViewportDims[0] >= 32768) { - return 100000; - } else if (maxRenderbufferSize >= 32768) { return 200000; - } else { - return 50000; + } else if (maxRenderbufferSize === 16384) { + return 400000; + } else if (maxRenderbufferSize >= 32768) { + return 600000; } } @@ -3342,6 +3448,165 @@ void main() { }; } + function fragString$2({ rayTracingRenderTargets, defines }) { + return `#version 300 es + +precision mediump float; +precision mediump int; + +in vec2 vCoord; + +${rayTracingRenderTargets.get('historyBuffer')} +${rayTracingRenderTargets.get('hdrBuffer')} +${rayTracingRenderTargets.set()} + +${addDefines(defines)} + +uniform mat4 historyCamera; +uniform float blendAmount; +uniform vec2 jitter; + +vec2 reproject(vec3 position) { + vec4 historyCoord = historyCamera * vec4(position, 1.0); + return 0.5 * historyCoord.xy / historyCoord.w + 0.5; +} + +void main() { + vec4 positionTex = texture(hdrBuffer, vec3(vCoord, hdrBuffer_position)); + vec4 lightTex = texture(hdrBuffer, vec3(vCoord, hdrBuffer_light)); + + vec3 currentPosition = positionTex.xyz; + float currentMeshId = positionTex.w; + + vec2 hCoord = reproject(currentPosition) - jitter; + + ivec2 hSize = textureSize(historyBuffer, 0).xy; + vec2 hSizef = vec2(hSize); + + vec2 hTexelf = hCoord * hSizef - 0.5; + ivec2 hTexel = ivec2(hTexelf); + vec2 f = fract(hTexelf); + + ivec2 texel[] = ivec2[]( + hTexel + ivec2(0, 0), + hTexel + ivec2(1, 0), + hTexel + ivec2(0, 1), + hTexel + ivec2(1, 1) + ); + + float weights[] = float[]( + (1.0 - f.x) * (1.0 - f.y), + f.x * (1.0 - f.y), + (1.0 - f.x) * f.y, + f.x * f.y + ); + + vec4 history; + float sum; + + // bilinear sampling, rejecting samples that don't have a matching mesh id + for (int i = 0; i < 4; i++) { + float histMeshId = texelFetch(historyBuffer, ivec3(texel[i], historyBuffer_position), 0).w; + + float isValid = histMeshId != currentMeshId ? 0.0 : 1.0; + + float weight = isValid * weights[i]; + history += weight * texelFetch(historyBuffer, ivec3(texel[i], historyBuffer_light), 0); + sum += weight; + } + + if (sum > 0.0) { + history /= sum; + } else { + // If all samples of bilinear fail, try a 3x3 box filter + hTexel = ivec2(hTexelf + 0.5); + + for (int x = -1; x <= 1; x++) { + for (int y = -1; y <= 1; y++) { + ivec2 texel = hTexel + ivec2(x, y); + + float histMeshId = texelFetch(historyBuffer, ivec3(texel, historyBuffer_position), 0).w; + + float isValid = histMeshId != currentMeshId ? 0.0 : 1.0; + + float weight = isValid; + vec4 h = texelFetch(historyBuffer, ivec3(texel, historyBuffer_light), 0); + history += weight * h; + sum += weight; + } + } + history = sum > 0.0 ? history / sum : history; + } + + if (history.w > MAX_SAMPLES) { + history.xyz *= MAX_SAMPLES / history.w; + history.w = MAX_SAMPLES; + } + + out_light = blendAmount * history + lightTex; + out_position = positionTex; +} + `; + } + + function makeReprojectShader(params) { + const { + fullscreenQuad, + gl, + maxReprojectedSamples, + textureAllocator, + } = params; + + const fragmentShader = createShader(gl, gl.FRAGMENT_SHADER, fragString$2({ + rayTracingRenderTargets, + defines: { + MAX_SAMPLES: maxReprojectedSamples.toFixed(1) + } + })); + + const program = createProgram(gl, fullscreenQuad.vertexShader, fragmentShader); + const uniforms = getUniforms(gl, program); + + const hdrBufferLocation = textureAllocator.reserveSlot(); + const historyBufferLocation = textureAllocator.reserveSlot(); + + const historyCamera = new THREE$1.Matrix4(); + + function setPreviousCamera(camera) { + gl.useProgram(program); + + historyCamera.multiplyMatrices(camera.projectionMatrix, camera.matrixWorldInverse); + + gl.uniformMatrix4fv(uniforms.historyCamera, false, historyCamera.elements); + } + + function setBlendAmount(x) { + gl.useProgram(program); + gl.uniform1f(uniforms.blendAmount, x); + } + + function setJitter(x, y) { + gl.useProgram(program); + gl.uniform2f(uniforms.jitter, x, y); + } + + function draw(hdrBuffer, historyBuffer) { + gl.useProgram(program); + + hdrBufferLocation.bind(uniforms.hdrBuffer, hdrBuffer); + historyBufferLocation.bind(uniforms.historyBuffer, historyBuffer); + + fullscreenQuad.draw(); + } + + return { + draw, + setBlendAmount, + setJitter, + setPreviousCamera, + }; + } + var noiseBase64 = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAEAAAABAEAAAAADfkvJBAAAbsklEQVR4nA3UhQIIvBoA0E830810M91MN9PNdDPd/ulmupluppvpZrqZbqabe89DHCiDv5GzaossZGYBp2PFIFqKdmMXIKW85edCB/RT11SD3JMQidRlL7n2ufRH1jVkFUNVc3NaZ7DP0T7/112kM1Qc3RDG0K/4uN7CPC7OmtFRZK3Jy3fhSSySKIZXopTsnIhN69JjLHJYYnfpZu44hnV+UkhG/lPd/D+fIVwWtdhhupVPJmtsLFIhjHA7UUqY4fPIQ2qdKxviqH2sugJ2nC+1ZdV0vEF3RGNcMd4KdvIXaJnujdPrKj4ifkeX2f04avjEbqO0ogI/rD7zhmy6GKG/2w32IetIX5vE9DbrS+CNy4sbmgXoiaug48lV4bVKZgluwPujd+Ioa+KjuntypepEEvl/YYCYTq6w4aaReGMShwLkC4nvq7jFKJmLpoepHJTag/h2aMklShou+tyip5wm67P2/CnvH7K6zuq+KGvy2rkkrR4mc4dpUNTEFHDId9TXQiST3RxHO0lHNgNFIA/Ub1kC0pOlNBf77EtyZ0ejxvikzySL8C8hNWyyc1GvcBCusv/otvBO3YSj+KvvRlKgoNaF/GEB64prsx8qFRwVJcRmMk8l5E5swfHMPuhlr9DmtrLeqs7KOrCMQSpeGW/zH5F2dc0AXZhcp9IthLZyuxpHrkNnp0JfnsY+55XkAtgSOvsWzps8uoJ5GtpAXRWZ5TK9cEM1WVRWC81ZUstPZHHkC7GDjZfl7BJ+VcXkI8RfVIMW0Jq95oxE0R+MDQnMX97DPhYjEXzHM0LvUNyODhdDCvJdNmXlfFp0RsbBNclTj8hpXofsCgVYsAnwPRTNTiTLxZkQW43BmK6wHk7Y0iSdXIfyK8/aQULdx1/hJc0JkRE/UgNDc/dGZWanTCs2WQ0W6Xh7PZGuDMXEaLtIRMZcZAM4ieOwO661Qf4xVyhLOOA2mLe0JyvIDrBhUA42ioUiMmrHJ9te6jwtbQ6xWrKf/ED3qKJ0qvzO2of57KkcyMBvNZndbLTX/iWNaWTezm9E8cleKOSEXK1B3LDfeGk4yx/b7L5+uAvp6UVC/UYAhvPLvSwTWm+qqO5saYjh79LadBJaAR90ct9S/GGZ7Q1zhKyTOUJ9MzT85IldVjLLduUOqovEaASJbXeZ37oFv0w/sOGhvMzpVrL/2MeQx8+ldfQU/QBXIqn8NtHAHjCzaTJk+CDS0e6Wk8N7GEDgoR4rG5M/Zig/LD6hEr6VHmxzmijoKu/oZ+p84oEeiwegquE7pBZPYXEoyLeQ66wRicLXmOzWoib6mq6KUoWxuriq62OQh647TUmn0RuuIjtPfuEkcMQtwJ/IaJabRRe9fRX2Q8Z1L2UNlMclpfMFdKYr+XkVEeb6vChZuOBfhNl+l/hly9L0/mzYIxPhBq4oimlnB273mkgwnr+S7Vnp8Fff8/3VC7IJCtqZ9AxZRnujo3wjmQ9n7WtayxwgvUhUNtJ0UjlEU9vPFhePxDLfkl6z43hhdQSW+xbyKooJEEwqTOkL1VHWc1vReFaVxbcnTGM2Uq1XNXRPos0bdtI8VBKXcZdCV1dNpLcL3DE7Cqfmi2w5JGhGFqATTUhzy7sG2+a0II4ZtupikC488mt9abdTvpYXVALXBU6wNzYLXUTPQwTxH/nNttjKDA7pQT47mopOQmxzW/f3GVhXWoguEUl5EHcUoKm8LdpiMoZV9JONpzZa7wa7hG4XzxvquHj2s5lsIrFbtrbew3+SKbiK6Ry+whAyXrTBC0kgDfwZHNOMNRnwOjHVVICdOGVo6LuFsn6GTKN6u4IeZqtN7B6vzlegD7ioW8i/u430kbtO2pABrgTPwb+xchSZ7jK/V6KxPEWK+K+oBXFmeuikt+HzrIU66KQsI9bRaGqQfKqSkMNumbnN4/ljkFsPxqnDElSF32L17D8UhxbUI8xnuwk/0znwXXcGGmD4QpPo5n6kTod70Zb2oI8Y6pFJKiuLoab7bXBEj+CXFTOH4A4kV/1JNjNRLrexaEX5Ht0xQ1RRskzmhCd+rmnFi9hLeqHe7svy7Lq+/+Mq6am+A/X8e+iptvqcbIjzqCOfbW6SpKQ22gPt8HgTFUMPd9kWgKd2O45Pr0EuOlK8waXFfriga7sXrLlKZZbrgeaPnmsrurd+n2H8hugjc+i1OCpJj2vYPyQ27+lT6/f4JM0c6sJIHwm/8AJS4tXuuo6g9qOCjvOZIrI9ZpaaauQAjwb9eTG0RMYPr2y5AHv8YhZLHvZl+DdQqrI5Z1L4QawT/FOLoQCOLR+EyTIrjcqb6YtiA4mg0/L27reYYg7JpvSVOM7G+p2uIb1iJ0hE+/DvvLW+qqfL034nLU5GQh02j8aHi/aDLS2b4ncYk/OcE+V+hhNqmF2rs1j4a1qziXYgaaDWQRetSbOwC60J8VhFSIf62k2osy7FXqpdrDAdZbuQxf5ZOCGLy6Reago9xBydmN9HBdUqX9VtUYdIKZOGbGAFxEDXjLxDmeVXsd5WIOmlhN0kqe2r84o1upy+z9KLRjY/ui5qGkhNiqoL5iXN6hPbeyGa+ckKwRM6l51Ao+EG/yKruXNsrWvHkuDPKKctS4bYRnq7eIQX+at4s8lD2ovy+D/xlXUWuf2jsNiNQx9xDRwjLAgJUSd5AvfTD80U0Qk91fP8DTkBfaXx1Qhv7FMXifZRMw0MlxtxVFVNzoOTrnjoK9ObCZy5HOwjbWgTib1kFo3BJa9t7oojdJK5RpGcifO66LQ2xuIHBvxcnMcLdEoUWc0QjVhs0k3f4dnoXvREODRB5KWJ2UFTX60WcXERxFQ7uo9mDz1YVbzQddDBHQ3QxD0MPfBnsdX+p9+xg+Sybmtum4hKoJW+CG0NGSQxP/TC0AulZ1tozfATr9Ld/QfURp1kg2FqaOQ2QBZ9JNyCoeQfO0eS+SOCa0lLshW6hnulWqHi/qrMTj6Z03gzB/LMzuaXmZXJSUm7nSKACjQDVzafbiNTqUayYpjDNpqhqIzf4SfRU/KF6S+vo0MhAS/v36BoolU4JbKQO3S3nmAL88puH0GoN6tF3vg2rCzscLVcUbmKzHS/dFroBdGk8bP4Hx8DRotKtJdMa4YZKhvR2OgbnULv+lzYUfjhFusD6KaLR8aHFSSPjYmT2MP6tU1L76u4uqJYrqawEqqpW+Onm4G6KIw2CU0Z29/EIc9gKVwjH3wxNV5v8fmxVunIGB94PxYBV+I3RRM4IO8x7Ab6ZXi3aoEeoUXmtzqHVrGCsrUYpOvIFXSMgX4YQp1Qmp6xf/Ae8gR1U19NUzEdSOjApK9nPuoItqt5HE7TXPIm3sff2fm+SbioN9GcPLltyTLKeeGBjGr668sYsfuymdjM8uHjYqL5BLn4SFqRdjbnZJKgyFHIA51lEjEebtEMfqN7LlORlgreiM3B26G2g82iqssbZBQq6k+rGn5J+MMvsVRus95vMpFR9K9K4errLmJFSMO/iepoBu6CfptR4QzqxpOYH6ERP4xmqS4uKzz3V2RS0SnMNwnYKvdW5Bd16FdS0kWlDeQ2VIMEJtgeVJ7GZIdDYQldWQ6UVK2mM1l000/MRyn5GpGZDkRbQ1RUCs/HLcMDV4hV1/OkEZFpRX+f5zfSHGQR7W2obdeiMnK3qQarTK7wEiq5vTqWXayqhyF4By5l6+HDPKK4AZtVRnoHjVBv8Syd1VocyY2UP9g8c15PpXBNVIET8MnVd8/oNlaGcnZJBZoQ7uAe4SjJAWNdX3AkNrQTQ+ClmMxO23i4nXseStC+4agkPDYeChdcOzLRJ2f/2S+ukJqsW/tvKoN4bP5/sOpHxuN5qC3p5VbaizIefWBKkKWkCc+DO5paPAHAP7wQj+VFRVp/zhPy3Ufw+8I4VsE1QVPtS1ZLf6eJ5Qr3Se3GxfURld71EhvEHJXVbLdJzUL/2nk6nX1mGcxdXUpvIg2gt7rADrkoYq0ogKbYXyK1pOwljuEO0rykAh5k2pMp6hR7rVO7h3IY2Y6gOYpsBqhWfp/sQcbbZa6m7uge0dx8pUgjd9GY5CyUldNEXX3L5JRLaHP2G5UhDtfnn8Qk3sak8Y1dUR5BatyTnyTR2PWwnCVCZe09NdwLG8tpvl3nJCd8dfzPNFMp1Wb4YuuihKIPWkP2k5I0o4OVJB96wDby2Oy2TAwv9VAxh8dFJ9EvU1S390Pdekx8d0jrxgik35GaLDoeZR7ZhH4IqyzO+/WiNzkkGNrOm8MvN4dmom9kbtuCzgy14K097SrhJuoeDEMJ7CI5Tjwn+3AmfjkUQpXUTR+DzdDPKVRgh23w1c0MUoI1EYchky6st4hefmS4bhZhr5vJ9/QYfUpbywukv9iib4S8msMqOE6iqH86px6L3oubJike6fJBB1ODDTZb6V+fAvapLL6DTGQ+2hm2k1svL8litoeKxZaRIXq2/U3HsDb6ghQBJqP4OB29iP4Lv/FaVZlctV9QM5tC1UGRbCWRBSfQs/UOFAGtlhX8VJJMLTD7VQY6HRU23ehdXAYlJHN5FlkRvXQHdDzx2I8Lx1A3sxTd8MXdOjVKH4BCOp2pIx6zrHwar6qO6uYB3FaXXdYNycNXCUNlY9TFLwq5SFuemg60UdhieVa8hml4v/2sHOsDNV1JGM5zmx/U2qKhk/lq+7jXaCuuYxaTPba1OuMHhY16GiuJVonzKBUtjEDVtwPxJP+cXUaRfD/1w5zS0Ulr9DXcQPnIK39Xdgkn+WJahGzGkI1cda/xFhfNn6KP1R7c2Y4JZSBnWK26kkJhs51E/tGk8m5oInvSjOI5risjuorqlI8X0oZh+JmKQeuhn7KLjKmvmd6iCVnIKtMH5KOM6zGu5nP5hmixMLo8Ge0P6jWyD0ukR7F0lqIPEMc/gv0OIsqZvCSug8eZ964gnYXr+LsqPmojHrG0apiIzg6TtkyHc7BHIDzTXuL/yQ38Dhsnm5OPfCorYK/LFTKPOU4xr+m/6WzydVCmPWwM5+UuN9e1Ce/8TRbfdJVzbCrWQJTUO+R8V5Ouh6m6T2jpqllYDfew5Ylcb1teraRxUFb8xxp6zFWH+eqtbIhzomc+DRunqvv3doVoKfOEJGoRKilzmAt4B69k+0FyN0m2ED5ss6NkNLTbn1LDAmHU/QDBj5oU8j9cxLxi2dUd+z5E8RfNT9NUHvApzRU/Bv1R0MEPlER9Nzuhpb/lhmsLxUJfP8EkYWdUCbyW3QzlbTco4AfhKEDNUfeY7pLt8U/a063mUaGD+4wtofwtmo0L2WWqlSxHErH0aDltYsbwqHqNq2CnuJ3qdKjJh/hlYYrsKLKwwTy2eOnzyrIMB1A0rmhiNc3Iz9tkvJt44ZqhJQ70F+jhW8CIgNQuO49/Q8bcJ5NxWlaVj6Yx/VVIZWeY2uK+zuw3hSEhIu2hE5NLfiC9p//I7vq6i6+fioJwF2Uyf2lzHoGt521FPlUJrH+AioQzvJtcJnaGEwHewSXxGFExyX7y81hVsQGng6shr9lG74TM5KdX/LyLIevpKyin6sz/Qj/0MjTQh2g594Yct6NVPL5QNUC3QlX/RR3hOXE9th5Nhf2hBswWfdVZVJsvMQNoGnOVfvNx6Qudgo9Ra/hMVJV8wdF1XQwFSYqwzgxjkVQ9kS+cZjHEhzAK6qMKYlZIjg+ZGqIvykCWBy4T0dlkBykCq33WsIAOAoJaQjH/V5w1uekes5plQOPRfBuTFmGvWRueVX9VW2V7GcccoE90CTSW7cXzaU+9hdflUeUTkk001/PDCAnbTRXb2h4jPeCZ2O0Gh1JuOu2M97PnZjBd6QrJDuqBL60+kuH4BK+Fo8uzLjmaoO4Z4DvsCpZM9DJtlWKvUEnVmTVVj/SOUFmOxBHCZV7CJJETIKA8rIuZKavxzKaxvQSlxD/exg9g130ifoH20pBJPKAz2F+bwyVUq2Qrd98mshdVNhVTtjJXSFx4wzegSfhAKECfcY1u4Wamu3pPqogO+Fu4bifDU1MZRfepxAh8EeLYn0i4Ey6NWwYD4Yhp6hfK8uiGimFPubcsYXiI/nO58QmN5V4+zm1kpdl3AtoeFLF0MT0Wbqk5KJ37rmqFTWYR+4vLsGN4BM3uGoYUJgLv5irINGiw+upKhA3qOIxkiQjVGfR+uo7dRAv4B1WLbqApcD472903Hz2T6/0jmR6G0xWmEWz2g3U7uYZF1FNgKX7PK5p85lXoGMBAMzzA17Kb+EnZmFfk/eghNI4W9r1pGjGZ14YvbIHcHQbYy/Cbb0FTcW61x83ySGRGjc0SOC/qqKE+p28MfV0hfJhNV0P4VdGQdICcYrKPz/Lb306IfSKl+66z83LiKPokGeuq4pI5oqFMzY6FSQC50RXxgifnnckXEUfkZS9kFNJCn0b38Q4aWXRRt2Rl/pLMkll4fdwuPNaRXW11xT1lBdE2KfBblwAdDz/dNhIJtSZZzFtdWq+BqHZPKB8ukbZwCkf0Ne19X1hMFAvsLZIWFyPGnTe36TC9Ej8U5Tkk8J/0Ai9JpnCJ7iLz+VWzFqqEdyaXGqSWk8I4vYovWonifKW2Iok7p8boFaozGsinis86MpknWoeJoazD4OW5UEXvcxNoUvdDdDdP5Ag7V2xypbHy/eGcjY56yF2qGQwUz1xSaE2jit++h9mpYZpqYwuYyrAGT+QlXDsjVSrUXcwiiaCxfsYOm2lmszyrh4tY/LbrY9+GQqK8+SdSyYO2qsmqbvEi+old7nrCaL1Ed7Gx8B05gJ82C1FGFds3FM9tDvUJa9E4vNJVZTLzy89i2dg4sLQmFMGZ8TkH61lUf4Q94D1xRPTYMZst/IK9vjhskJdJeTdKfXNMdOfvVR5eDS3STUlGczIYHEvdhxZ2LR1ud/NYpqYIMqEs7P6yTbIpz8eru61QjH4mg1AybF17mgESqAN4PRnl8uvTsBpT9SlsJ4tgBKtjIZXua36TRmirSIo+iqX8FIol7pKx5CNEox1EdpGC3WWR5C4/Qf+wm3Rc9Z+fhdraPGi8KsWdT0Y7idMylzVwldSXGf1MeGZSiFGe+1tin67kr6ixag26TYYaSi771i5ueEjr+U4+neqPY6H37KaEFzBGFqfpuZIXUEsyIJST01xd2walDwvtGd0Xr7al/ALSXKbRNHSh1/xe9cHVDs+1hv7ul6xPX5ppZAjlZm446vuIsuiiW+rf8Yhmil+Bc0N3Ej3UxAXcTzWdZxEhaN3HRJaX5VMyyR3jLXxZDTnkbrsM3cA1eD52UGL2imx3xA7FB2wN+c9Opo3UG3rZDeIn9Wz2kCfTRVwEesH2oCn0MRHFzZWZcHm4y8GmVp/4BBzd7pXZbBd+3Kehjfw/N0duh2e4hTmuouCuvjrbo4uZaX5DqOyT+PxsJXTBMIOfstFd2/BF/8fnyximG1rFk/Bb6AWOywqHHSYhPhjy0zjuOWSndcUAMwVVtGtDZrFT1FCF+Bboxaz+wYujXVBNPSRt3TBel3xHhVk/9xASyFLqjEhr+/FFxMh7YiKktkftn5CDNDW7xTd7kcU1MJRWMm9Vb55YbVIl5D36BxqFk6osFmqjl8GTjLp7qCnHWMPa24NoufkdWuo7+j/zxUx0N+hbaBqQW6VGia52kcsnkb1p1/I5vgo26CIertrZgMfT8jqxrkeJfAMtwmAWX95Uo/g814vXll5BStHMzzG50EN8RE4g1WgWNNwtUpG10jl8S1zZvvfT7Urzi5eCKOEtweoMJWKejoFKoTY0TliqpCCU+WsqI7ywhpzipVFyeKKikfE+o63t11qguWAP/Wau6OEQE52l5dkq3BGeqwimFMnktyn4J4uoS3aNakAj8XbqStjpC/nXpL354q/zo3SxATjjuEtpr7H5uiodjVHoivbLhvoxnCDdMdZn/RMz0x/k0UIz3lv/EdN0K3pYdrO72VeeH24La2aqJ7wjWeFLhjlus/jC89FaKC05oN6biWqpgGjYshGQTpdTP8ggEQ9mkuTmgqglsFkrE4UBUNreIbnEMHcE9xRN8P2wlZTjr0xKv1HOEvn531ApJFLt1WdXRk/UKSyjmdxIkke903Ftc7EEC1PVDiaNfToRT/c2j0km6I6mKqcW44GqobuOOyp4goU26hWewpfxE/QZaoo2+L50vx5N8rmG/IefiDeJeuqDiAUFwjqeWX3VU11fdoFn04N9PVhNJoSdZoDMztbZ42YhfaMvueW4Irkmp+sS+hlJLmL5y6aI2KYvhGr6kG1kopid1vuiNlY4aXO5KhJmmTo8AWmF8/qUugcq5rLxb7gCiunu2jnQhZ2C2CGD6gw71CMzw13kQ0xEVogsZdVtHHjLD4j7LiIvxpxswLwYRguoCG6H7isSi/qwwQ0Rp8U4/IeuNq/oSDsDfto8dJx9ExJJyVqwX3S9Hi2TazjLCsNtu1984NXMdnbPLbaTdCv1Xpf02+UTqMZe8QWquBlDKoeEtp3e6+qTa7gV+SnG+VIhOeWop/0g56o0EFf+QC1wOdwRPyJH1U/AvgPJYffZMqEtzo4jhfoiKdOyrT7uqqA1NIvricqK3ei1gBW8DwE5zM8Jl3CCUC8MRpH0EbscEoihOptLBntDP+/CH5RWLkfvQhn1TCahR/w201XcYEvUGZbJbnajXRWyh/Xgt/TqkIBOcEXkPBsZHtiaaKlMbWbDSdGf7ab3aSl51fe3qf3nMM3e9vF5W5/BwQT/21ZQ611W2YGPtb8hHbuuiBP+nG6Op6HVqJUlEMUexs1YH5qbTBILRCY2nORVUeh0V1X/hwrwJuy5u2KWupx0Bj1NXtBsuKkezra58+Ez9NGN1R3x0VRindg7mRGZMA8XNOd4jXCIL+IfXYMAN3RSbVUT+oTFdmfMOl1R72SvPQtpwl95zZUxn+g9MtnVMOvDbXVcRnOd+Hr6iDcWH0g6/xRvD99FYtwJR/YlbD05AmFUneyl71x3W17k8xNRMrnJR1djaUGxlsThY6ARjgBPUSc7kkeH/GQIKilgG+8KRCv8mVLcW+Z300I7NBzNJ0XZZhSR1OPSLmHdMOJF8Wf5HzD9K5zFFXG/sFIewu1RPFSOrULH1JTwUR1UMdUvNQAv5jHwTb3KxuWt8StXkuz3mfklNIcc0z3DPyhn9opkrClsVI/xqRBbwytYQq7gQTYNXi4bmGPyjk+CYuiHfj8fp3vDMZ+QZSRvzW6Yq7OilGQHFMfx3GyZXBa2DMa7S2YeuWeHyMy6p3lo29LNtDR3rq5Ljf+RI2guPkcHy9rkF2mJEvvqNI+4jRUs50FfgWy+u5uDaynIAq15dF4tPIB9KIp8L7PDUv1NVoWWJht6iQrIdfgcLu05vsbHBkGc5mECeyC2spv8F4rG++C80ICkoNXwOlIwXEOJzSyX23UIU0h/mklVoY9lfNdVL/E36VD20u4QbVxm6GeKyfGkEvrFUqPR/H9s/XjiBWp1EAAAAABJRU5ErkJggg=='; // Important TODO: Refactor this file to get rid of duplicate and confusing code @@ -3356,10 +3621,20 @@ void main() { let ready = false; + const reprojectDecay = 0.975; + const maxReprojectedSamples = Math.round(reprojectDecay / (1 - reprojectDecay)); + const fullscreenQuad = makeFullscreenQuad(gl); + const textureAllocator = makeTextureAllocator(gl); - const rayTracingShader = makeRayTracingShader({gl, optionalExtensions, fullscreenQuad, textureAllocator, scene, bounces}); - const toneMapShader = makeToneMapShader({gl, optionalExtensions, fullscreenQuad, textureAllocator, toneMappingParams}); + + const rayTracingShader = makeRayTracingShader({bounces, fullscreenQuad, gl, optionalExtensions, scene, textureAllocator}); + + const reprojectShader = makeReprojectShader({ fullscreenQuad, gl, maxReprojectedSamples, textureAllocator }); + + const toneMapShader = makeToneMapShader({ + fullscreenQuad, gl, optionalExtensions, textureAllocator, toneMappingParams + }); const noiseImage = new Image(); noiseImage.src = noiseBase64; @@ -3368,25 +3643,42 @@ void main() { ready = true; }; - const useLinearFiltering = optionalExtensions.OES_texture_float_linear; - // full resolution buffer representing the rendered scene with HDR lighting - const hdrBuffer = makeFramebuffer({ + let hdrBuffer = makeFramebuffer({ + gl, + renderTarget: rayTracingRenderTargets, + }); + + let hdrPreviewBuffer = makeFramebuffer({ gl, - renderTarget: { storage: 'float' } + renderTarget: rayTracingRenderTargets, }); - // lower resolution buffer used for the first frame - const hdrPreviewBuffer = makeFramebuffer({ + let historyBuffer = makeFramebuffer({ gl, - renderTarget: { storage: 'float' }, - useLinearFiltering + renderTarget: rayTracingRenderTargets, + linearFiltering: true }); + let reprojectBuffer = makeFramebuffer({ + gl, + renderTarget: rayTracingRenderTargets + }); + + let reprojectPreviewBuffer = makeFramebuffer({ + gl, + renderTarget: rayTracingRenderTargets, + linearFiltering: true + }); + + let lastToneMappedBuffer = reprojectPreviewBuffer; + + const clearToBlack = new Float32Array([0, 0, 0, 0]); + // used to sample only a portion of the scene to the HDR Buffer to prevent the GPU from locking up from excessive computation const tileRender = makeTileRender(gl); - const lastCamera = new LensCamera(); + const lastCamera = new THREE$1.PerspectiveCamera(); // how many samples to render with uniform noise before switching to stratified noise const numUniformSamples = 6; @@ -3395,54 +3687,58 @@ void main() { // higher number results in faster convergence over time, but with lower quality initial samples const strataCount = 6; - let sampleCount = 0; + let sampleCount = 1; let sampleRenderedCallback = () => {}; - function clear() { - hdrBuffer.bind(); - gl.clear(gl.COLOR_BUFFER_BIT); - hdrBuffer.unbind(); - - sampleCount = 0; + function initFirstSample() { + sampleCount = 1; tileRender.reset(); } - function initFirstSample(camera) { - lastCamera.copy(camera); - rayTracingShader.setCamera(camera); - rayTracingShader.useStratifiedSampling(false); - clear(); - } - function setPreviewBufferDimensions() { - const aspectRatio = hdrBuffer.width / hdrBuffer.height; - const desiredTimeForPreview = 16; // 60 fps + const desiredTimeForPreview = 10; const numPixelsForPreview = desiredTimeForPreview / tileRender.getTimePerPixel(); - const previewWidth = clamp(Math.sqrt(numPixelsForPreview * aspectRatio), 1, hdrBuffer.width); - const previewHeight = clamp(previewWidth / aspectRatio, 1, hdrBuffer.height); - if (previewWidth !== hdrPreviewBuffer.width) { + + const aspectRatio = hdrBuffer.width / hdrBuffer.height; + const previewWidth = Math.round(clamp(Math.sqrt(numPixelsForPreview * aspectRatio), 1, hdrBuffer.width)); + const previewHeight = Math.round(clamp(previewWidth / aspectRatio, 1, hdrBuffer.height)); + + const diff = Math.abs(previewWidth - hdrPreviewBuffer.width) / previewWidth; + if (diff > 0.05) { // don't bother resizing if the buffer size is only slightly different hdrPreviewBuffer.setSize(previewWidth, previewHeight); + reprojectPreviewBuffer.setSize(previewWidth, previewHeight); + historyBuffer.setSize(previewWidth, previewHeight); } } - function camerasEqual(cam1, cam2) { + function areCamerasEqual(cam1, cam2) { return numberArraysEqual(cam1.matrixWorld.elements, cam2.matrixWorld.elements) && cam1.aspect === cam2.aspect && cam1.fov === cam2.fov && - cam1.focus === cam2.focus && - cam1.aperture === cam2.aperture; + cam1.focus === cam2.focus; + } + + function clearBuffer(buffer) { + buffer.bind(); + gl.clear(gl.COLOR_BUFFER_BIT); + buffer.unbind(); } function addSampleToBuffer(buffer) { + buffer.bind(); + gl.blendEquation(gl.FUNC_ADD); gl.blendFunc(gl.ONE, gl.ONE); gl.enable(gl.BLEND); - buffer.bind(); + + gl.clearBufferfv(gl.COLOR, rayTracingRenderTargets.location.position, clearToBlack); + gl.viewport(0, 0, buffer.width, buffer.height); rayTracingShader.draw(); - buffer.unbind(); + gl.disable(gl.BLEND); + buffer.unbind(); } function newSampleToBuffer(buffer) { @@ -3452,32 +3748,28 @@ void main() { buffer.unbind(); } - function renderPreview() { - newSampleToBuffer(hdrPreviewBuffer); - + function toneMapToScreen(buffer) { gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight); - toneMapShader.draw({ - texture: hdrPreviewBuffer.texture, - }); + toneMapShader.draw(buffer.texture); + lastToneMappedBuffer = buffer; } - function renderTile(x, y, width, height) { + function renderTile(buffer, x, y, width, height) { gl.scissor(x, y, width, height); gl.enable(gl.SCISSOR_TEST); - addSampleToBuffer(hdrBuffer); + addSampleToBuffer(buffer); gl.disable(gl.SCISSOR_TEST); } - function hdrBufferToScreen() { - gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight); - toneMapShader.draw({ - texture: hdrBuffer.texture, - }); - } + function updateSeed(width, height) { + rayTracingShader.setSize(width, height); - function updateSeed() { - if (sampleCount === 2) { - rayTracingShader.useStratifiedSampling(true); + const jitterX = (Math.random() - 0.5) / width; + const jitterY = (Math.random() - 0.5) / height; + rayTracingShader.setJitter(jitterX, jitterY); + reprojectShader.setJitter(jitterX, jitterY); + + if ( sampleCount === 1) { rayTracingShader.setStrataCount(1); } else if (sampleCount === numUniformSamples) { rayTracingShader.setStrataCount(strataCount); @@ -3489,77 +3781,126 @@ void main() { function drawTile(camera) { if (!ready) { return; - } else if (!camerasEqual(camera, lastCamera)) { - initFirstSample(camera); + } + + if (sampleCount === 1) { + reprojectShader.setPreviousCamera(lastCamera); + } + + if (!areCamerasEqual(camera, lastCamera)) { + initFirstSample(); setPreviewBufferDimensions(); - renderPreview(); + + rayTracingShader.setCamera(camera); + updateSeed(hdrPreviewBuffer.width, hdrPreviewBuffer.height); + newSampleToBuffer(hdrPreviewBuffer); + + reprojectShader.setBlendAmount(reprojectDecay); + + const temp = historyBuffer; + historyBuffer = reprojectPreviewBuffer; + reprojectPreviewBuffer = temp; + + reprojectPreviewBuffer.bind(); + gl.viewport(0, 0, reprojectPreviewBuffer.width, reprojectPreviewBuffer.height); + reprojectShader.draw(hdrPreviewBuffer.texture, lastToneMappedBuffer.texture); + reprojectPreviewBuffer.unbind(); + + toneMapToScreen(reprojectPreviewBuffer); + + clearBuffer(hdrBuffer); + lastCamera.copy(camera); } else { const { x, y, tileWidth, tileHeight, isFirstTile, isLastTile } = tileRender.nextTile(); if (isFirstTile) { sampleCount++; - updateSeed(); + updateSeed(hdrBuffer.width, hdrBuffer.height); } - renderTile(x, y, tileWidth, tileHeight); + renderTile(hdrBuffer, x, y, tileWidth, tileHeight); if (isLastTile) { - hdrBufferToScreen(); + let blendAmount = clamp(1.0 - sampleCount / maxReprojectedSamples, 0, 1); + blendAmount *= blendAmount; + + if (blendAmount > 0.0) { + reprojectShader.setBlendAmount(blendAmount); + reprojectBuffer.bind(); + gl.viewport(0, 0, reprojectBuffer.width, reprojectBuffer.height); + reprojectShader.draw(hdrBuffer.texture, reprojectPreviewBuffer.texture); + reprojectBuffer.unbind(); + + toneMapToScreen(reprojectBuffer); + } else { + toneMapToScreen(hdrBuffer); + } + sampleRenderedCallback(sampleCount); } } } - function drawOffscreenTile(camera) { + // debug draw call to measure performance + // use full resolution buffers every frame + // reproject every frame + function drawFull(camera) { if (!ready) { return; - } else if (!camerasEqual(camera, lastCamera)) { - initFirstSample(camera); } - const { x, y, tileWidth, tileHeight, isFirstTile, isLastTile } = tileRender.nextTile(); + if (sampleCount === 1) { + reprojectShader.setPreviousCamera(lastCamera); + } + + if (!areCamerasEqual(camera, lastCamera)) { + sampleCount = 1; - if (isFirstTile) { + rayTracingShader.setCamera(camera); + + clearBuffer(hdrBuffer); + lastCamera.copy(camera); + } else { sampleCount++; - updateSeed(); } - renderTile(x, y, tileWidth, tileHeight); + updateSeed(hdrBuffer.width, hdrBuffer.height); - if (isLastTile) { - sampleRenderedCallback(sampleCount); - } - } + addSampleToBuffer(hdrBuffer); - function drawFull(camera) { - if (!ready) { - return; - } else if (!camerasEqual(camera, lastCamera)) { - initFirstSample(camera); + let blendAmount = clamp(1.0 - sampleCount / maxReprojectedSamples, 0, 1); + blendAmount *= blendAmount; + reprojectShader.setBlendAmount(blendAmount); + + if (historyBuffer.width !== hdrBuffer.width) { + historyBuffer.setSize(hdrBuffer.width, hdrBuffer.height); } - sampleCount++; + const temp = historyBuffer; + historyBuffer = reprojectBuffer; + reprojectBuffer = temp; - updateSeed(); - addSampleToBuffer(hdrBuffer); - hdrBufferToScreen(); + reprojectBuffer.bind(); + gl.viewport(0, 0, reprojectBuffer.width, reprojectBuffer.height); + reprojectShader.draw(hdrBuffer.texture, historyBuffer.texture); + reprojectBuffer.unbind(); + + toneMapToScreen(reprojectBuffer); } - function setSize(width, height) { - rayTracingShader.setSize(width, height); - hdrBuffer.setSize(width, height); - tileRender.setSize(width, height); - clear(); + function setSize(w, h) { + rayTracingShader.setSize(w, h); + tileRender.setSize(w, h); + hdrBuffer.setSize(w, h); + reprojectBuffer.setSize(w, h); + initFirstSample(); } return { drawTile, - drawOffscreenTile, drawFull, restartTimer: tileRender.restartTimer, - setRenderTime: tileRender.setRenderTime, setSize, - hdrBufferToScreen, getTotalSamplesRendered() { return sampleCount; }, @@ -3574,6 +3915,7 @@ void main() { const glRequiredExtensions = [ 'EXT_color_buffer_float', // enables rendering to float buffers + 'EXT_float_blend', ]; const glOptionalExtensions = [ @@ -3597,7 +3939,6 @@ void main() { let pipeline = null; const size = new THREE$1.Vector2(); - let renderTime = 22; let pixelRatio = 1; const module = { @@ -3632,7 +3973,6 @@ void main() { } }; - module.setRenderTime(renderTime); module.setSize(size.width, size.height); module.needsUpdate = false; } @@ -3676,17 +4016,6 @@ void main() { module.getPixelRatio = () => pixelRatio; - module.setRenderTime = (time) => { - renderTime = time; - if (pipeline) { - pipeline.setRenderTime(time); - } - }; - - module.getRenderTime = () => { - return renderTime; - }; - module.getTotalSamplesRendered = () => { if (pipeline) { return pipeline.getTotalSamplesRendered(); diff --git a/package-lock.json b/package-lock.json index f5b2334..1f20d88 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "ray-tracing-renderer", - "version": "0.2.2", + "version": "0.3.0", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/package.json b/package.json index 463c690..3a936db 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "ray-tracing-renderer", - "version": "0.2.2", + "version": "0.3.0", "description": "A [Three.js](https://github.com/mrdoob/three.js/) renderer which utilizes path tracing to render a scene with true photorealism. The renderer supports global illumination, reflections, soft shadows, and realistic environment lighting.", "main": "build/RayTracingRenderer.js", "scripts": {