diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..b512c09 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +node_modules \ No newline at end of file diff --git a/INSTRUCTION.md b/INSTRUCTION.md new file mode 100644 index 0000000..fad5d78 --- /dev/null +++ b/INSTRUCTION.md @@ -0,0 +1,169 @@ +WebGL Clustered Deferred and Forward+ Shading - Instructions +========================================================== + +## Running the code + +- Clone this repository +- Download and install [Node.js](https://nodejs.org/en/) +- Run `npm install` in the root directory of this project. This will download and install dependences +- Run `npm start` and navigate to [http://localhost:5650](http://localhost:5650) + +This project requires a WebGL-capable browser with support for several extensions. You can check for support on [WebGL Report](http://webglreport.com/): +- OES_texture_float +- OES_texture_float_linear +- OES_element_index_uint +- EXT_frag_depth +- WEBGL_depth_texture +- WEBGL_draw_buffer + +Google Chrome seems to work best on all platforms. If you have problems running the starter code, use Chrome or Chromium, and make sure you have updated your browser and video drivers. + +## Requirements +**Ask on the mailing list for any clarifications** + +In this project, you are given code for: +- Loading glTF models +- Camera control +- Simple forward renderer +- Partial implementation and setup for Clustered Deferred and Forward+ shading +- Many helpful helpers + +## Required Tasks + +**Before doing performance analysis**, you must disable debug mode by changing `DEBUG` to false in `src/init.js`. Keep it enabled when developing - it helps find WebGL errors *much* more easily. + +**Clustered Forward+** + - Build a data structure to keep track of how many lights are in each cluster and what their indices are + - Render the scene using only the lights that overlap a given cluster + +**Clustered Deferred** + - Reuse clustering logic from Clustered Forward+ + - Store vertex attributes in g-buffer + - Read g-buffer in a shader to produce final output + +**Effects** +- Implement deferred Blinn-Phong shading (diffuse + specular) for point lights +- OR +- Implement one of the following effects: + - Bloom using post-process blur (box or Gaussian) + - Toon shading (with ramp shading + simple depth-edge detection for outlines) + +**Optimizations** + - Optimized g-buffer format - reduce the number and size of g-buffers: + - Ideas: + - Pack values together into vec4s + - Use 2-component normals + - Quantize values by using smaller texture types instead of gl.FLOAT + - Reduce number of properties passed via g-buffer, e.g. by: + - Reconstructing world space position using camera matrices and X/Y/depth + - For credit, you must show a good optimization effort and record the performance of each version you test, in a simple table. + - It is expected that you won't need all 4 provided g-buffers for a basic pipeline make sure you disable the unused ones. + +## Performance & Analysis + +Compare your implementations of Clustered Forward+ and Clustered Deferred shading and analyze their differences. + - Is one of them faster? + - Is one of them better at certain types of workloads? + - What are the benefits and tradeoffs of using one over the other? + - For any differences in performance, briefly explain what may be causing the difference. + +**Before doing performance analysis**, you must disable debug mode by changing `DEBUG` to false in `src/init.js`. Keep it enabled when developing - it helps find WebGL errors *much* more easily. + +Optimize your JavaScript and/or GLSL code. Chrome/Firefox's profiling tools (see Resources section) will be useful for this. For each change that improves performance, show the before and after render times. + +For each new effect feature (required or extra), please provide the following analysis: + - Concise overview write-up of the feature. + - Performance change due to adding the feature. + - If applicable, how do parameters (such as number of lights, etc.) affect performance? Show data with simple graphs. + - Show timing in milliseconds, not FPS. + - If you did something to accelerate the feature, what did you do and why? + - How might this feature be optimized beyond your current implementation? + +For each performance feature (required or extra), please provide: + - Concise overview write-up of the feature. + - Detailed performance improvement analysis of adding the feature + - What is the best case scenario for your performance improvement? What is the worst? Explain briefly. + - Are there tradeoffs to this performance feature? Explain briefly. + - How do parameters (such as number of lights, tile size, etc.) affect performance? Show data with graphs. + - Show timing in milliseconds, not FPS. + - Show debug views when possible. + - If the debug view correlates with performance, explain how. + +## Starter Code Tour + +Initialization happens in `src/init.js`. You don't need to worry about this; it is mostly initializing the gl context, debug modes, extensions, etc. + +`src/main.js` is configuration for the renderers. It sets up the gui for switching renderers and initializes the scene and render loop. The only important thing here are the arguments for `ClusteredForwardPlusRenderer` and `ClusteredDeferredRenderer`. These constructors take the number of x, y, and z slices to split the frustum into. + +`src/scene.js` handles loading a .gltf scene and initializes the lights. Here, you can modify the number of lights, their positions, and how they move around. Also, take a look at the `draw` function. This handles binding the vertex attributes, which are hardcoded to `a_position`, `a_normal`, and `a_uv`, as well as the color and normal maps to targets `gl.TEXTURE0` and `gl.TEXTURE1`. + +**Simple Forward Shading Pipeline** +I've written a simple forward shading pipeline as an example for how everything works. Check out `src/forward.js`. + +The constructor for the renderer initializes a `TextureBuffer` to store the lights. This isn't totally necessary for a forward renderer, but you'll need this to do clustered shading. What we're trying to do here is upload to a shader all the positions of our lights. However, we unfortunately can't upload arbitrary data to the GPU with WebGL so we have to pack it as a texture. Figuring out how to do this is terribly painful so I did it for you. + +The constructor for `TextureBuffer` takes two arguments, the number of elements, and the size of each element (in floats). It will allocate a floating point texture of dimension `numElements x ceil(elementSize / 4)`. This is because we pack every 4 adjacent values into a single pixel. + +Go to the `render` function to see how this is used in practice. Here, the buffer for the texture storing the lights is populated with the light positions. Notice that the first four values get stored at locations: `this._lightTexture.bufferIndex(i, 0) + 0` to `this._lightTexture.bufferIndex(i, 0) + 3` and then the next three are at `this._lightTexture.bufferIndex(i, 1) + 0` to `this._lightTexture.bufferIndex(i, 0) + 2`. Keep in mind that the data is stored as a texture, so the 5th element is actually the 1st element of the pixel in the second row. + +Look again at the constructor of `ForwardRenderer`. Also initialized here is the shader program. The shader program takes in a vertex source, a fragment source, and then a map of what uniform and vertex attributes should be extracted from the shader. In this code, the shader location for `u_viewProjectionMatrix` gets stored as `this._shaderProgram.u_viewProjectionMatrix`. If you look at `fsSource`, there's a strange thing happening there. `fsSource` is actually a function and it's being called with a configuration object containing the number of lights. What this is doing is creating a shader source string that is parameterized. We can't have dynamic loops in WebGL, but we can dynamically generate static shaders. If you take a look at `src/shaders/forward.frag.glsl.js`, you'll see that `${numLights}` is used throughout. + +Now go look inside `src/shaders/forward.frag.glsl.js`. Here, there is a simple loop which loops over the lights and applies shading for each one. I've written a helper called `UnpackLight(index)` which unpacks the `index`th light from the texture into a struct. Make sure you fully understand how this is working because you will need to implement something similar for clusters. Inside `UnpackLight` I use another helper called `ExtractFloat(texture, textureWidth, textureHeight, index, component)`. This pulls out the `component`th component from the `index`th value packed inside a `textureWidth x textureHeight` texture. Again, this is meant to be an example implementation. Using this function to pull out four values into a `vec4` will be unecessarily slow. + +**Getting Started** +Here's a few tips to get you started. + +1. Complete `updateClusters` in `src/renderers/clustered.js`. This should update the cluster `TextureBuffer` with a mapping from cluster index to light count and light list (indices). + +2. Update `src/shaders/clusteredForward.frag.glsl.js` to + - Determine the cluster for a fragment + - Read in the lights in that cluster from the populated data + - Do shading for just those lights + - You may find it necessary to bind additional uniforms in `src/renderers/clusteredForwardPlus.js` + +3. Update `src/shaders/deferredToTexture.frag.glsl` to write desired data to the g-buffer +4. Update `src/deferred.frag.glsl` to read values from the g-buffer and perform simple forward rendering. (Right now it just outputs the screen xy coordinate) +5. Update it to use clustered shading. You should be able to reuse lots of stuff from Clustered Forward+ for this. You will also likely need to update shader inputs in `src/renderers/clusteredDeferred.js` + +## README + +Replace the contents of the README.md in a clear manner with the following: +- A brief description of the project and the specific features you implemented. +- At least one screenshot of your project running. +- A 30+ second video/gif of your project running showing all features. (Even though your demo can be seen online, using multiple render targets means it won't run on many computers. A video will work everywhere.) +- Performance analysis (described above) + +**GitHub Pages** +Since this assignment is in WebGL, you can make your project easily viewable by taking advantage of GitHub's project pages feature. + +Once you are done with the assignment, create a new branch: + +`git branch gh-pages` + +Run `npm run build` and commit the compiled files + +Push the branch to GitHub: + +`git push origin gh-pages` + +Now, you can go to `.github.io/` to see your renderer online from anywhere. Add this link to your README. + +## Submit + +Beware of any build issues discussed on the Google Group. + +Open a GitHub pull request so that we can see that you have finished. The title should be "Project 5B: YOUR NAME". The template of the comment section of your pull request is attached below, you can do some copy and paste: + +- Repo Link +- (Briefly) Mentions features that you've completed. Especially those bells and whistles you want to highlight + - Feature 0 + - Feature 1 + - ... +- Feedback on the project itself, if any. + +### Third-Party Code Policy + +- Use of any third-party code must be approved by asking on our mailing list. +- If it is approved, all students are welcome to use it. Generally, we approve use of third-party code that is not a core part of the project. For example, for the path tracer, we would approve using a third-party library for loading models, but would not approve copying and pasting a CUDA function for doing refraction. +- Third-party code **MUST** be credited in README.md. +- Using third-party code without its approval, including using another student's code, is an academic integrity violation, and will, at minimum, result in you receiving an F for the semester. diff --git a/index.html b/index.html new file mode 100644 index 0000000..fc66b57 --- /dev/null +++ b/index.html @@ -0,0 +1,24 @@ + + + + + + + + + + \ No newline at end of file diff --git a/lib/minimal-gltf-loader.js b/lib/minimal-gltf-loader.js new file mode 100644 index 0000000..9126a68 --- /dev/null +++ b/lib/minimal-gltf-loader.js @@ -0,0 +1,686 @@ +// From https://github.com/shrekshao/minimal-gltf-loader +import {vec3, vec4, quat, mat4} from 'gl-matrix'; + +var MinimalGLTFLoader = MinimalGLTFLoader || {}; + +// Data classes +var Scene = MinimalGLTFLoader.Scene = function () { + // not 1-1 to meshes in json file + // each mesh with a different node hierarchy is a new instance + this.meshes = []; + //this.meshes = {}; +}; + +// Node + +var Mesh = MinimalGLTFLoader.Mesh = function () { + this.meshID = ''; // mesh id name in glTF json meshes + this.primitives = []; +}; + +var Primitive = MinimalGLTFLoader.Primitive = function () { + this.mode = 4; // default: gl.TRIANGLES + + this.matrix = mat4.create(); + + this.indices = null; + this.indicesComponentType = 5123; // default: gl.UNSIGNED_SHORT + + // !!: assume vertex buffer is interleaved + // see discussion https://github.com/KhronosGroup/glTF/issues/21 + this.vertexBuffer = null; + + // attribute info (stride, offset, etc) + this.attributes = {}; + + // cur glTF spec supports only one material per primitive + this.material = null; + this.technique = null; + + + + // // Program gl buffer name + // // ?? reconsider if it's suitable to put it here + // this.indicesWebGLBufferName = null; + // this.vertexWebGLBufferName = null; + +}; + + +/** +* +*/ +var glTFModel = MinimalGLTFLoader.glTFModel = function () { + this.defaultScene = ''; + this.scenes = {}; + + this.nodeMatrix = {}; + + this.json = null; + + this.shaders = {}; + this.programs = {}; + + this.images = {}; + +}; + + + +var gl; + +var glTFLoader = MinimalGLTFLoader.glTFLoader = function (glContext) { + gl = glContext; + this._init(); + this.glTF = null; +}; + +glTFLoader.prototype._init = function() { + this._parseDone = false; + this._loadDone = false; + + this._bufferRequested = 0; + this._bufferLoaded = 0; + this._buffers = {}; + this._bufferTasks = {}; + + // ?? Move to glTFModel to avoid collected by GC ?? + this._bufferViews = {}; + + this._shaderRequested = 0; + this._shaderLoaded = 0; + + this._imageRequested = 0; + this._imageLoaded = 0; + + this._pendingTasks = 0; + this._finishedPendingTasks = 0; + + this.onload = null; + +}; + + +glTFLoader.prototype._getBufferViewData = function(json, bufferViewID, callback) { + var bufferViewData = this._bufferViews[bufferViewID]; + if(!bufferViewData) { + // load bufferView for the first time + var bufferView = json.bufferViews[bufferViewID]; + var bufferData = this._buffers[bufferView.buffer]; + if (bufferData) { + // buffer already loaded + //console.log("dependent buffer ready, create bufferView" + bufferViewID); + this._bufferViews[bufferViewID] = bufferData.slice(bufferView.byteOffset, bufferView.byteOffset + bufferView.byteLength); + callback(bufferViewData); + } else { + // buffer not yet loaded + // add pending task to _bufferTasks + //console.log("pending Task: wait for buffer to load bufferView " + bufferViewID); + this._pendingTasks++; + var bufferTask = this._bufferTasks[bufferView.buffer]; + if (!bufferTask) { + this._bufferTasks[bufferView.buffer] = []; + bufferTask = this._bufferTasks[bufferView.buffer]; + } + var loader = this; + bufferTask.push(function(newBufferData) { + // share same bufferView + // hierarchy needs to be post processed in the renderer + var curBufferViewData = loader._bufferViews[bufferViewID]; + if (!curBufferViewData) { + console.log('create new BufferView Data for ' + bufferViewID); + curBufferViewData = loader._bufferViews[bufferViewID] = newBufferData.slice(bufferView.byteOffset, bufferView.byteOffset + bufferView.byteLength); + } + loader._finishedPendingTasks++; + callback(curBufferViewData); + + // // create new bufferView for each mesh access with a different hierarchy + // // hierarchy transformation will be prepared in this way + // console.log('create new BufferView Data for ' + bufferViewID); + // loader._bufferViews[bufferViewID] = newBufferData.slice(bufferView.byteOffset, bufferView.byteOffset + bufferView.byteLength); + // loader._finishedPendingTasks++; + // callback(loader._bufferViews[bufferViewID]); + }); + } + + } else { + // no need to load buffer from file + // use cached ones + //console.log("use cached bufferView " + bufferViewID); + callback(bufferViewData); + } +}; + +// glTFLoader.prototype._doNextLoadTaskInList = function () { +// }; + +glTFLoader.prototype._checkComplete = function () { + if (this._bufferRequested == this._bufferLoaded && + this._shaderRequested == this._shaderLoaded && + this._imageRequested == this._imageLoaded + // && other resources finish loading + ) { + this._loadDone = true; + } + + if (this._loadDone && this._parseDone && this._pendingTasks == this._finishedPendingTasks) { + this.onload(this.glTF); + } +}; + + +glTFLoader.prototype._parseGLTF = function (json) { + + this.glTF.json = json; + this.glTF.defaultScene = json.scene; + + // Iterate through every scene + if (json.scenes) { + for (var sceneID in json.scenes) { + var newScene = new Scene(); + this.glTF.scenes[sceneID] = newScene; + + var scene = json.scenes[sceneID]; + var nodes = scene.nodes; + var nodeLen = nodes.length; + + // Iterate through every node within scene + for (var n = 0; n < nodeLen; ++n) { + var nodeID = nodes[n]; + //var node = json.nodes[nodeName]; + + // Traverse node + this._parseNode(json, nodeID, newScene); + } + } + } + + this._parseDone = true; + this._checkComplete(); +}; + + +var translationVec3 = vec3.create(); +var rotationQuat = quat.create(); +var scaleVec3 = vec3.create(); +var TRMatrix = mat4.create(); + +glTFLoader.prototype._parseNode = function(json, nodeID, newScene, matrix) { + var node = json.nodes[nodeID]; + + if (matrix === undefined) { + matrix = mat4.create(); + } + + var curMatrix = mat4.create(); + + if (node.hasOwnProperty('matrix')) { + // matrix + for(var i = 0; i < 16; ++i) { + curMatrix[i] = node.matrix[i]; + } + mat4.multiply(curMatrix, matrix, curMatrix); + //mat4.multiply(curMatrix, curMatrix, matrix); + } else { + // translation, rotation, scale (TRS) + // TODO: these labels are optional + vec3.set(translationVec3, node.translation[0], node.translation[1], node.translation[2]); + quat.set(rotationQuat, node.rotation[0], node.rotation[1], node.rotation[2], node.rotation[3]); + mat4.fromRotationTranslation(TRMatrix, rotationQuat, translationVec3); + mat4.multiply(curMatrix, curMatrix, TRMatrix); + vec3.set(scaleVec3, node.scale[0], node.scale[1], node.scale[2]); + mat4.scale(curMatrix, curMatrix, scaleVec3); + } + + // store node matrix + this.glTF.nodeMatrix[nodeID] = curMatrix; + + + + // Iterate through every mesh within node + var meshes = node.meshes; + if(!!meshes) { + var meshLen = meshes.length; + for (var m = 0; m < meshLen; ++m) { + var newMesh = new Mesh(); + newScene.meshes.push(newMesh); + + var meshName = meshes[m]; + var mesh = json.meshes[meshName]; + + newMesh.meshID = meshName; + + // Iterate through primitives + var primitives = mesh.primitives; + var primitiveLen = primitives.length; + + for (var p = 0; p < primitiveLen; ++p) { + var newPrimitive = new Primitive(); + newMesh.primitives.push(newPrimitive); + + var primitive = primitives[p]; + + if (primitive.indices) { + this._parseIndices(json, primitive, newPrimitive); + } + + this._parseAttributes(json, primitive, newPrimitive, curMatrix); + + // required + newPrimitive.material = json.materials[primitive.material]; + + if (newPrimitive.material.technique) { + newPrimitive.technique = json.techniques[newPrimitive.material.technique]; + } else { + // TODO: use default technique in glTF spec Appendix A + } + + } + } + } + + + // Go through all the children recursively + var children = node.children; + var childreLen = children.length; + for (var c = 0; c < childreLen; ++c) { + var childNodeID = children[c]; + this._parseNode(json, childNodeID, newScene, curMatrix); + } + +}; + + +glTFLoader.prototype._parseIndices = function(json, primitive, newPrimitive) { + + var accessorName = primitive.indices; + var accessor = json.accessors[accessorName]; + + newPrimitive.mode = primitive.mode || 4; + newPrimitive.indicesComponentType = accessor.componentType; + + var loader = this; + this._getBufferViewData(json, accessor.bufferView, function(bufferViewData) { + newPrimitive.indices = _getAccessorData(bufferViewData, accessor); + loader._checkComplete(); + }); +}; + + + + +//var tmpVec4 = vec4.create(); +//var inverseTransposeMatrix = mat4.create(); + +glTFLoader.prototype._parseAttributes = function(json, primitive, newPrimitive, matrix) { + // !! Assume interleaved vertex attributes + // i.e., all attributes share one bufferView + + + // vertex buffer processing + var firstSemantic = Object.keys(primitive.attributes)[0]; + var firstAccessor = json.accessors[primitive.attributes[firstSemantic]]; + var vertexBufferViewID = firstAccessor.bufferView; + var bufferView = json.bufferViews[vertexBufferViewID]; + + var loader = this; + + this._getBufferViewData(json, vertexBufferViewID, function(bufferViewData) { + var data = newPrimitive.vertexBuffer = _arrayBuffer2TypedArray( + bufferViewData, + 0, + bufferView.byteLength / ComponentType2ByteSize[firstAccessor.componentType], + firstAccessor.componentType + ); + + for (var attributeName in primitive.attributes) { + var accessorName = primitive.attributes[attributeName]; + var accessor = json.accessors[accessorName]; + + var componentTypeByteSize = ComponentType2ByteSize[accessor.componentType]; + + var stride = accessor.byteStride / componentTypeByteSize; + var offset = accessor.byteOffset / componentTypeByteSize; + var count = accessor.count; + + // // Matrix transformation + // if (attributeName === 'POSITION') { + // for (var i = 0; i < count; ++i) { + // // TODO: add vec2 and other(needed?) support + // vec4.set(tmpVec4, data[stride * i + offset] + // , data[stride * i + offset + 1] + // , data[stride * i + offset + 2] + // , 1); + // vec4.transformMat4(tmpVec4, tmpVec4, matrix); + // vec4.scale(tmpVec4, tmpVec4, 1 / tmpVec4[3]); + // data[stride * i + offset] = tmpVec4[0]; + // data[stride * i + offset + 1] = tmpVec4[1]; + // data[stride * i + offset + 2] = tmpVec4[2]; + // } + // } + // else if (attributeName === 'NORMAL') { + // mat4.invert(inverseTransposeMatrix, matrix); + // mat4.transpose(inverseTransposeMatrix, inverseTransposeMatrix); + + // for (var i = 0; i < count; ++i) { + // // @todo: add vec2 and other(needed?) support + // vec4.set(tmpVec4, data[stride * i + offset] + // , data[stride * i + offset + 1] + // , data[stride * i + offset + 2] + // , 0); + // vec4.transformMat4(tmpVec4, tmpVec4, inverseTransposeMatrix); + // vec4.normalize(tmpVec4, tmpVec4); + // data[stride * i + offset] = tmpVec4[0]; + // data[stride * i + offset + 1] = tmpVec4[1]; + // data[stride * i + offset + 2] = tmpVec4[2]; + // } + // } + + + // local transform matrix + + mat4.copy(newPrimitive.matrix, matrix); + + + + // for vertexAttribPointer + newPrimitive.attributes[attributeName] = { + //GLuint program location, + size: Type2NumOfComponent[accessor.type], + type: accessor.componentType, + //GLboolean normalized + stride: accessor.byteStride, + offset: accessor.byteOffset + }; + + } + + loader._checkComplete(); + }); + +}; + +/** +* load a glTF model +* +* @param {String} uri uri of the .glTF file. Other resources (bins, images) are assumed to be in the same base path +* @param {Function} callback the onload callback function +*/ +glTFLoader.prototype.loadGLTF = function (uri, callback) { + + this._init(); + + this.onload = callback || function(glTF) { + console.log('glTF model loaded.'); + console.log(glTF); + }; + + + this.glTF = new glTFModel(); + + this.baseUri = _getBaseUri(uri); + + var loader = this; + + _loadJSON(uri, function (response) { + // Parse JSON string into object + var json = JSON.parse(response); + + var bid; + + var loadArrayBufferCallback = function (resource) { + + loader._buffers[bid] = resource; + loader._bufferLoaded++; + if (loader._bufferTasks[bid]) { + var i,len; + for (i = 0, len = loader._bufferTasks[bid].length; i < len; ++i) { + (loader._bufferTasks[bid][i])(resource); + } + } + loader._checkComplete(); + + }; + + // Launch loading resources task: buffers, etc. + if (json.buffers) { + for (bid in json.buffers) { + + loader._bufferRequested++; + + _loadArrayBuffer(loader.baseUri + json.buffers[bid].uri, loadArrayBufferCallback); + + } + } + + // load images + + + var loadImageCallback = function (img, iid) { + loader._imageLoaded++; + loader.glTF.images[iid] = img; + loader._checkComplete(); + }; + + var iid; + + if (json.images) { + for (iid in json.images) { + loader._imageRequested++; + _loadImage(loader.baseUri + json.images[iid].uri, iid, loadImageCallback); + } + } + + + // load shaders + var pid; + var newProgram; + + var loadVertexShaderFileCallback = function (resource) { + loader._shaderLoaded++; + newProgram.vertexShader = resource; + if (newProgram.fragmentShader) { + // create Program + newProgram.program = _createProgram(gl, newProgram.vertexShader, newProgram.fragmentShader); + loader._checkComplete(); + } + }; + var loadFragmentShaderFileCallback = function (resource) { + loader._shaderLoaded++; + newProgram.fragmentShader = resource; + if (newProgram.vertexShader) { + // create Program + newProgram.program = _createProgram(gl, newProgram.vertexShader, newProgram.fragmentShader); + loader._checkComplete(); + } + }; + + if (json.programs) { + for (pid in json.programs) { + newProgram = loader.glTF.programs[pid] = { + vertexShader: null, + fragmentShader: null, + program: null + }; + var program = json.programs[pid]; + loader._shaderRequested += 2; + + _loadShaderFile(loader.baseUri + json.shaders[program.vertexShader].uri, loadVertexShaderFileCallback); + _loadShaderFile(loader.baseUri + json.shaders[program.fragmentShader].uri, loadFragmentShaderFileCallback); + } + } + + + + + // start glTF scene parsing + loader._parseGLTF(json); + }); +}; + + + + +// TODO: get from gl context +var ComponentType2ByteSize = { + 5120: 1, // BYTE + 5121: 1, // UNSIGNED_BYTE + 5122: 2, // SHORT + 5123: 2, // UNSIGNED_SHORT + 5126: 4 // FLOAT +}; + +var Type2NumOfComponent = { + 'SCALAR': 1, + 'VEC2': 2, + 'VEC3': 3, + 'VEC4': 4, + 'MAT2': 4, + 'MAT3': 9, + 'MAT4': 16 +}; + +MinimalGLTFLoader.Attributes = [ + 'POSITION', + 'NORMAL', + 'TEXCOORD', + 'COLOR', + 'JOINT', + 'WEIGHT' +]; + +// MinimalGLTFLoader.UniformFunctionsBind = { +// 35676: gl.uniformMatrix4fv // FLOAT_MAT4 +// }; + + +// ------ Scope limited private util functions--------------- + +function _arrayBuffer2TypedArray(resource, byteOffset, countOfComponentType, componentType) { + switch(componentType) { + // @todo: finish + case 5122: return new Int16Array(resource, byteOffset, countOfComponentType); + case 5123: return new Uint16Array(resource, byteOffset, countOfComponentType); + case 5124: return new Int32Array(resource, byteOffset, countOfComponentType); + case 5125: return new Uint32Array(resource, byteOffset, countOfComponentType); + case 5126: return new Float32Array(resource, byteOffset, countOfComponentType); + default: return null; + } +} + +function _getAccessorData(bufferViewData, accessor) { + return _arrayBuffer2TypedArray( + bufferViewData, + accessor.byteOffset, + accessor.count * Type2NumOfComponent[accessor.type], + accessor.componentType + ); +} + +function _getBaseUri(uri) { + + // https://github.com/AnalyticalGraphicsInc/cesium/blob/master/Source/Core/getBaseUri.js + + var basePath = ''; + var i = uri.lastIndexOf('/'); + if(i !== -1) { + basePath = uri.substring(0, i + 1); + } + + return basePath; +} + +function _loadJSON(src, callback) { + + // native json loading technique from @KryptoniteDove: + // http://codepen.io/KryptoniteDove/post/load-json-file-locally-using-pure-javascript + + var xobj = new XMLHttpRequest(); + xobj.overrideMimeType("application/json"); + xobj.open('GET', src, true); + xobj.onreadystatechange = function () { + if (xobj.readyState == 4 && // Request finished, response ready + xobj.status == "200") { // Status OK + callback(xobj.responseText, this); + } + }; + xobj.send(null); +} + +function _loadArrayBuffer(url, callback) { + var xobj = new XMLHttpRequest(); + xobj.responseType = 'arraybuffer'; + xobj.open('GET', url, true); + xobj.onreadystatechange = function () { + if (xobj.readyState == 4 && // Request finished, response ready + xobj.status == "200") { // Status OK + var arrayBuffer = xobj.response; + if (arrayBuffer && callback) { + callback(arrayBuffer); + } + } + }; + xobj.send(null); +} + +function _loadShaderFile(url, callback) { + var xobj = new XMLHttpRequest(); + xobj.responseType = 'text'; + xobj.open('GET', url, true); + xobj.onreadystatechange = function () { + if (xobj.readyState == 4 && // Request finished, response ready + xobj.status == "200") { // Status OK + var file = xobj.response; + if (file && callback) { + callback(file); + } + } + }; + xobj.send(null); +} + +function _loadImage(url, iid, onload) { + var img = new Image(); + img.src = url; + img.onload = function() { + onload(img, iid); + }; +} + + +function _createShader(gl, source, type) { + var shader = gl.createShader(type); + gl.shaderSource(shader, source); + gl.compileShader(shader); + return shader; +} + +function _createProgram(gl, vertexShaderSource, fragmentShaderSource) { + var program = gl.createProgram(); + var vshader = _createShader(gl, vertexShaderSource, gl.VERTEX_SHADER); + var fshader = _createShader(gl, fragmentShaderSource, gl.FRAGMENT_SHADER); + gl.attachShader(program, vshader); + gl.deleteShader(vshader); + gl.attachShader(program, fshader); + gl.deleteShader(fshader); + gl.linkProgram(program); + + var log = gl.getProgramInfoLog(program); + if (log) { + console.log(log); + } + + log = gl.getShaderInfoLog(vshader); + if (log) { + console.log(log); + } + + log = gl.getShaderInfoLog(fshader); + if (log) { + console.log(log); + } + + return program; +} + +export { glTFLoader }; \ No newline at end of file diff --git a/models/sponza/buffer_0.bin b/models/sponza/buffer_0.bin new file mode 100644 index 0000000..e1a3c55 Binary files /dev/null and b/models/sponza/buffer_0.bin differ diff --git a/models/sponza/color.jpeg b/models/sponza/color.jpeg new file mode 100644 index 0000000..05248f4 Binary files /dev/null and b/models/sponza/color.jpeg differ diff --git a/models/sponza/fragmentShader0.glsl b/models/sponza/fragmentShader0.glsl new file mode 100644 index 0000000..ad5d407 --- /dev/null +++ b/models/sponza/fragmentShader0.glsl @@ -0,0 +1,43 @@ +precision highp float; +uniform vec4 u_ambient; +uniform sampler2D u_diffuse; +uniform sampler2D u_normal; +uniform vec4 u_emission; +uniform vec4 u_specular; +uniform float u_shininess; +uniform float u_transparency; +varying vec3 v_positionEC; +varying vec3 v_normal; +varying vec2 v_texcoord_0; + +vec3 applyNormalMap(vec3 geomnor, vec3 normap) { + normap = normap * 2.0 - 1.0; + vec3 up = normalize(vec3(0.001, 1, 0.001)); + vec3 surftan = normalize(cross(geomnor, up)); + vec3 surfbinor = cross(geomnor, surftan); + return normap.y * surftan + normap.x * surfbinor + normap.z * geomnor; +} + +void main(void) { + vec3 normal = applyNormalMap(normalize(v_normal), texture2D(u_normal, v_texcoord_0).rgb); + vec4 diffuse = texture2D(u_diffuse, v_texcoord_0); + vec3 diffuseLight = vec3(0.0, 0.0, 0.0); + vec3 specular = u_specular.rgb; + vec3 specularLight = vec3(0.0, 0.0, 0.0); + vec3 emission = u_emission.rgb; + vec3 ambient = u_ambient.rgb; + vec3 viewDir = -normalize(v_positionEC); + vec3 ambientLight = vec3(0.0, 0.0, 0.0); + ambientLight += vec3(0.2, 0.2, 0.2); + vec3 l = vec3(0.0, 0.0, 1.0); + diffuseLight += vec3(1.0, 1.0, 1.0) * max(dot(normal,l), 0.); + vec3 h = normalize(l + viewDir); + float specularIntensity = max(0., pow(max(dot(normal, h), 0.), u_shininess)); + specularLight += vec3(1.0, 1.0, 1.0) * specularIntensity; + vec3 color = vec3(0.0, 0.0, 0.0); + color += diffuse.rgb * diffuseLight; + color += specular * specularLight; + color += emission; + color += ambient * ambientLight; + gl_FragColor = vec4(color * diffuse.a, diffuse.a * u_transparency); +} diff --git a/models/sponza/normal.png b/models/sponza/normal.png new file mode 100644 index 0000000..322412b Binary files /dev/null and b/models/sponza/normal.png differ diff --git a/models/sponza/sponza.gltf b/models/sponza/sponza.gltf new file mode 100644 index 0000000..e21c7eb --- /dev/null +++ b/models/sponza/sponza.gltf @@ -0,0 +1,317 @@ +{ + "accessors": { + "accessor_index_0": { + "bufferView": "bufferView_1", + "byteOffset": 0, + "byteStride": 0, + "componentType": 5125, + "count": 199269, + "type": "SCALAR", + "min": [ + 0 + ], + "max": [ + 199268 + ] + }, + "accessor_position": { + "bufferView": "bufferView_0", + "byteOffset": 0, + "byteStride": 0, + "componentType": 5126, + "count": 148975, + "min": [ + -17.268321990966797, + -0.006653999909758568, + -7.7815141677856445 + ], + "max": [ + 17.551677703857422, + 15.55334758758545, + 7.818483829498291 + ], + "type": "VEC3" + }, + "accessor_normal": { + "bufferView": "bufferView_0", + "byteOffset": 1787700, + "byteStride": 0, + "componentType": 5126, + "count": 148975, + "type": "VEC3", + "min": [ + null, + null, + null + ], + "max": [ + null, + null, + null + ] + }, + "accessor_uv": { + "bufferView": "bufferView_0", + "byteOffset": 3575400, + "byteStride": 0, + "componentType": 5126, + "count": 148975, + "type": "VEC2", + "min": [ + -57.04376983642578, + -61.176544189453125 + ], + "max": [ + 57.97621536254883, + 62.176544189453125 + ] + } + }, + "asset": { + "generator": "OBJ2GLTF", + "premultipliedAlpha": true, + "profile": { + "api": "WebGL", + "version": "1.0" + }, + "version": "1.0" + }, + "buffers": { + "buffer_0": { + "type": "arraybuffer", + "byteLength": 5564276, + "uri": "buffer_0.bin" + } + }, + "bufferViews": { + "bufferView_0": { + "buffer": "buffer_0", + "byteLength": 4767200, + "byteOffset": 0, + "target": 34962 + }, + "bufferView_1": { + "buffer": "buffer_0", + "byteLength": 797076, + "byteOffset": 4767200, + "target": 34963 + } + }, + "images": { + "color": { + "uri": "color.jpeg" + }, + "normals": { + "uri": "normal.png" + } + }, + "materials": { + "material_lambert2SG": { + "name": "lambert2SG", + "extensions": {}, + "values": { + "ambient": [ + 0, + 0, + 0, + 1 + ], + "diffuse": "texture_color", + "normalMap": "texture_normal", + "emission": [ + 0, + 0, + 0, + 1 + ], + "specular": [ + 0, + 0, + 0, + 1 + ], + "shininess": 0, + "transparency": 1 + }, + "technique": "technique0" + } + }, + "meshes": { + "mesh_sponza": { + "name": "sponza", + "primitives": [ + { + "attributes": { + "POSITION": "accessor_position", + "NORMAL": "accessor_normal", + "TEXCOORD_0": "accessor_uv" + }, + "indices": "accessor_index_0", + "material": "material_lambert2SG", + "mode": 4 + } + ] + } + }, + "nodes": { + "rootNode": { + "children": [], + "meshes": [ + "mesh_sponza" + ], + "matrix": [ + 1, + 0, + 0, + 0, + 0, + 1, + 0, + 0, + 0, + 0, + 1, + 0, + 0, + 0, + 0, + 1 + ] + } + }, + "samplers": { + "sampler_0": { + "magFilter": 9729, + "minFilter": 9986, + "wrapS": 10497, + "wrapT": 10497 + } + }, + "scene": "scene_sponza", + "scenes": { + "scene_sponza": { + "nodes": [ + "rootNode" + ] + } + }, + "textures": { + "texture_color": { + "format": 6407, + "internalFormat": 6407, + "sampler": "sampler_0", + "source": "color", + "target": 3553, + "type": 5121 + }, + "texture_normal": { + "format": 6407, + "internalFormat": 6407, + "sampler": "sampler_0", + "source": "normals", + "target": 3553, + "type": 5121 + } + }, + "extensionsUsed": [], + "animations": {}, + "cameras": {}, + "techniques": { + "technique0": { + "attributes": { + "a_position": "position", + "a_normal": "normal", + "a_texcoord_0": "texcoord_0" + }, + "parameters": { + "modelViewMatrix": { + "semantic": "MODELVIEW", + "type": 35676 + }, + "projectionMatrix": { + "semantic": "PROJECTION", + "type": 35676 + }, + "normalMatrix": { + "semantic": "MODELVIEWINVERSETRANSPOSE", + "type": 35675 + }, + "ambient": { + "type": 35666 + }, + "diffuse": { + "type": 35678 + }, + "normalMap": { + "type": 35678 + }, + "emission": { + "type": 35666 + }, + "specular": { + "type": 35666 + }, + "shininess": { + "type": 5126 + }, + "transparency": { + "type": 5126 + }, + "position": { + "semantic": "POSITION", + "type": 35665 + }, + "normal": { + "semantic": "NORMAL", + "type": 35665 + }, + "texcoord_0": { + "semantic": "TEXCOORD_0", + "type": 35664 + } + }, + "program": "program0", + "states": { + "enable": [ + 2884, + 2929 + ] + }, + "uniforms": { + "u_modelViewMatrix": "modelViewMatrix", + "u_projectionMatrix": "projectionMatrix", + "u_normalMatrix": "normalMatrix", + "u_ambient": "ambient", + "u_diffuse": "diffuse", + "u_normal": "normalMap", + "u_emission": "emission", + "u_specular": "specular", + "u_shininess": "shininess", + "u_transparency": "transparency" + } + } + }, + "programs": { + "program0": { + "attributes": [ + "a_position", + "a_normal", + "a_texcoord_0" + ], + "fragmentShader": "fragmentShader0", + "vertexShader": "vertexShader0" + } + }, + "shaders": { + "vertexShader0": { + "type": 35633, + "uri": "vertexShader0.glsl" + }, + "fragmentShader0": { + "type": 35632, + "uri": "fragmentShader0.glsl" + } + }, + "skins": {}, + "extensions": {} +} diff --git a/models/sponza/vertexShader0.glsl b/models/sponza/vertexShader0.glsl new file mode 100644 index 0000000..c489a55 --- /dev/null +++ b/models/sponza/vertexShader0.glsl @@ -0,0 +1,17 @@ +precision highp float; +uniform mat4 u_modelViewMatrix; +uniform mat4 u_projectionMatrix; +uniform mat3 u_normalMatrix; +attribute vec3 a_position; +varying vec3 v_positionEC; +attribute vec3 a_normal; +varying vec3 v_normal; +attribute vec2 a_texcoord_0; +varying vec2 v_texcoord_0; +void main(void) { + vec4 pos = u_modelViewMatrix * vec4(a_position,1.0); + v_positionEC = pos.xyz; + gl_Position = u_projectionMatrix * pos; + v_normal = u_normalMatrix * a_normal; + v_texcoord_0 = a_texcoord_0; +} diff --git a/package.json b/package.json new file mode 100644 index 0000000..44b3339 --- /dev/null +++ b/package.json @@ -0,0 +1,24 @@ +{ + "scripts": { + "start": "webpack-dev-server", + "build": "webpack -p" + }, + "dependencies": { + "dat-gui": "^0.5.0", + "gl-matrix": "^2.4.0", + "spectorjs": "^0.9.0", + "stats-js": "^1.0.0-alpha1", + "three": "^0.87.1", + "three-js": "^79.0.0", + "three-orbitcontrols": "^1.2.1", + "webgl-debug": "^1.0.2" + }, + "devDependencies": { + "babel-core": "^6.26.0", + "babel-loader": "^7.1.2", + "babel-preset-env": "^1.6.0", + "webpack": "^3.7.1", + "webpack-dev-server": "^2.9.2", + "webpack-glsl-loader": "^1.0.1" + } +} diff --git a/src/README.md b/src/README.md new file mode 100644 index 0000000..237b30e --- /dev/null +++ b/src/README.md @@ -0,0 +1,33 @@ +WebGL Clustered Deferred and Forward+ Shading +====================== + +**University of Pennsylvania, CIS 565: GPU Programming and Architecture, Project 5** + +* (TODO) YOUR NAME HERE +* Tested on: (TODO) **Google Chrome 222.2** on + Windows 22, i7-2222 @ 2.22GHz 22GB, GTX 222 222MB (Moore 2222 Lab) + +### Live Online + +[![](img/thumb.png)](http://TODO.github.io/Project5B-WebGL-Deferred-Shading) + +### Demo Video/GIF + +[![](img/video.png)](TODO) + +### (TODO: Your README) + +*DO NOT* leave the README to the last minute! It is a crucial part of the +project, and we will not be able to grade you without a good README. + +This assignment has a considerable amount of performance analysis compared +to implementation work. Complete the implementation early to leave time! + + +### Credits + +* [Three.js](https://github.com/mrdoob/three.js) by [@mrdoob](https://github.com/mrdoob) and contributors +* [Spector.js](https://github.com/BabylonJS/Spector.js) by [@BabylonJS](https://github.com/BabylonJS) and contributors +* [webgl-debug](https://github.com/KhronosGroup/WebGLDeveloperTools) by Khronos Group Inc. +* [glMatrix](https://github.com/toji/gl-matrix) by [@toji](https://github.com/toji) and contributors +* [minimal-gltf-loader](https://github.com/shrekshao/minimal-gltf-loader) by [@shrekshao](https://github.com/shrekshao) \ No newline at end of file diff --git a/src/init.js b/src/init.js new file mode 100644 index 0000000..36df58b --- /dev/null +++ b/src/init.js @@ -0,0 +1,104 @@ +// TODO: Change this to enable / disable debug mode +export const DEBUG = true; + +import DAT from 'dat-gui'; +import WebGLDebug from 'webgl-debug'; +import Stats from 'stats-js'; +import { PerspectiveCamera } from 'three'; +import OrbitControls from 'three-orbitcontrols'; +import { Spector } from 'spectorjs'; + +export var ABORTED = false; +export function abort(message) { + ABORTED = true; + throw message; +} + +// Get the canvas element +export const canvas = document.getElementById('canvas'); + +// Initialize the WebGL context +const glContext = canvas.getContext('webgl'); + +// Get a debug context +export const gl = DEBUG ? WebGLDebug.makeDebugContext(glContext, (err, funcName, args) => { + abort(WebGLDebug.glEnumToString(err) + ' was caused by call to: ' + funcName); +}) : glContext; + +const supportedExtensions = gl.getSupportedExtensions(); +const requiredExtensions = [ + 'OES_texture_float', + 'OES_texture_float_linear', + 'OES_element_index_uint', + 'EXT_frag_depth', + 'WEBGL_depth_texture', + 'WEBGL_draw_buffers', +]; + +// Check that all required extensions are supported +for (let i = 0; i < requiredExtensions.length; ++i) { + if (supportedExtensions.indexOf(requiredExtensions[i]) < 0) { + throw 'Unable to load extension ' + requiredExtensions[i]; + } +} + +// Get the maximum number of draw buffers +gl.getExtension('OES_texture_float'); +gl.getExtension('OES_texture_float_linear'); +gl.getExtension('OES_element_index_uint'); +gl.getExtension('EXT_frag_depth'); +gl.getExtension('WEBGL_depth_texture'); +export const WEBGL_draw_buffers = gl.getExtension('WEBGL_draw_buffers'); +export const MAX_DRAW_BUFFERS_WEBGL = gl.getParameter(WEBGL_draw_buffers.MAX_DRAW_BUFFERS_WEBGL); + +export const gui = new DAT.GUI(); + +// initialize statistics widget +const stats = new Stats(); +stats.setMode(1); // 0: fps, 1: ms +stats.domElement.style.position = 'absolute'; +stats.domElement.style.left = '0px'; +stats.domElement.style.top = '0px'; +document.body.appendChild(stats.domElement); + +// Initialize camera +export const camera = new PerspectiveCamera(75, canvas.clientWidth / canvas.clientHeight, 0.1, 1000); + +// Initialize camera controls +export const cameraControls = new OrbitControls(camera, canvas); +cameraControls.enableDamping = true; +cameraControls.enableZoom = true; +cameraControls.rotateSpeed = 0.3; +cameraControls.zoomSpeed = 1.0; +cameraControls.panSpeed = 2.0; + +function setSize(width, height) { + canvas.width = width; + canvas.height = height; + camera.aspect = width / height; + camera.updateProjectionMatrix(); +} + +setSize(canvas.clientWidth, canvas.clientHeight); +window.addEventListener('resize', () => setSize(canvas.clientWidth, canvas.clientHeight)); + +if (DEBUG) { + const spector = new Spector(); + spector.displayUI(); +} + +// Creates a render loop that is wrapped with camera update and stats logging +export function makeRenderLoop(render) { + return function tick() { + cameraControls.update(); + stats.begin(); + render(); + stats.end(); + if (!ABORTED) { + requestAnimationFrame(tick) + } + } +} + +// import the main application +require('./main'); \ No newline at end of file diff --git a/src/main.js b/src/main.js new file mode 100644 index 0000000..1cbbf9a --- /dev/null +++ b/src/main.js @@ -0,0 +1,46 @@ +import { makeRenderLoop, camera, cameraControls, gui, gl } from './init'; +import ForwardRenderer from './renderers/forward'; +import ClusteredForwardPlusRenderer from './renderers/clusteredForwardPlus'; +import ClusteredDeferredRenderer from './renderers/clusteredDeferred'; +import Scene from './scene'; + +const FORWARD = 'Forward'; +const CLUSTERED_FORWARD_PLUS = 'Clustered Forward+'; +const CLUSTERED_DEFFERED = 'Clustered Deferred'; + +const params = { + renderer: CLUSTERED_FORWARD_PLUS, + _renderer: null, +}; + +setRenderer(params.renderer); + +function setRenderer(renderer) { + switch(renderer) { + case FORWARD: + params._renderer = new ForwardRenderer(); + break; + case CLUSTERED_FORWARD_PLUS: + params._renderer = new ClusteredForwardPlusRenderer(15, 15, 15); + break; + case CLUSTERED_DEFFERED: + params._renderer = new ClusteredDeferredRenderer(15, 15, 15); + break; + } +} + +gui.add(params, 'renderer', [FORWARD, CLUSTERED_FORWARD_PLUS, CLUSTERED_DEFFERED]).onChange(setRenderer); + +const scene = new Scene(); +scene.loadGLTF('models/sponza/sponza.gltf'); + +camera.position.set(-10, 8, 0); +cameraControls.target.set(0, 2, 0); +gl.enable(gl.DEPTH_TEST); + +function render() { + scene.update(); + params._renderer.render(camera, scene); +} + +makeRenderLoop(render)(); \ No newline at end of file diff --git a/src/renderers/clustered.js b/src/renderers/clustered.js new file mode 100644 index 0000000..9521fbd --- /dev/null +++ b/src/renderers/clustered.js @@ -0,0 +1,32 @@ +import { mat4, vec4, vec3 } from 'gl-matrix'; +import { NUM_LIGHTS } from '../scene'; +import TextureBuffer from './textureBuffer'; + +export const MAX_LIGHTS_PER_CLUSTER = 100; + +export default class ClusteredRenderer { + constructor(xSlices, ySlices, zSlices) { + // Create a texture to store cluster data. Each cluster stores the number of lights followed by the light indices + this._clusterTexture = new TextureBuffer(xSlices * ySlices * zSlices, MAX_LIGHTS_PER_CLUSTER + 1); + this._xSlices = xSlices; + this._ySlices = ySlices; + this._zSlices = zSlices; + } + + updateClusters(camera, viewMatrix, scene) { + // TODO: Update the cluster texture with the count and indices of the lights in each cluster + // This will take some time. The math is nontrivial... + + for (let z = 0; z < this._zSlices; ++z) { + for (let y = 0; y < this._ySlices; ++y) { + for (let x = 0; x < this._xSlices; ++x) { + let i = x + y * this._xSlices + z * this._xSlices * this._ySlices; + // Reset the light count to 0 for every cluster + this._clusterTexture.buffer[this._clusterTexture.bufferIndex(i, 0)] = 0; + } + } + } + + this._clusterTexture.update(); + } +} \ No newline at end of file diff --git a/src/renderers/clusteredDeferred.js b/src/renderers/clusteredDeferred.js new file mode 100644 index 0000000..5e28e84 --- /dev/null +++ b/src/renderers/clusteredDeferred.js @@ -0,0 +1,168 @@ +import { gl, WEBGL_draw_buffers, canvas } from '../init'; +import { mat4, vec4 } from 'gl-matrix'; +import { loadShaderProgram, renderFullscreenQuad } from '../utils'; +import { NUM_LIGHTS } from '../scene'; +import toTextureVert from '../shaders/deferredToTexture.vert.glsl'; +import toTextureFrag from '../shaders/deferredToTexture.frag.glsl'; +import QuadVertSource from '../shaders/quad.vert.glsl'; +import fsSource from '../shaders/deferred.frag.glsl.js'; +import TextureBuffer from './textureBuffer'; +import ClusteredRenderer from './clustered'; + +export const NUM_GBUFFERS = 4; + +export default class ClusteredDeferredRenderer extends ClusteredRenderer { + constructor(xSlices, ySlices, zSlices) { + super(xSlices, ySlices, zSlices); + + this.setupDrawBuffers(canvas.width, canvas.height); + + // Create a texture to store light data + this._lightTexture = new TextureBuffer(NUM_LIGHTS, 8); + + this._progCopy = loadShaderProgram(toTextureVert, toTextureFrag, { + uniforms: ['u_viewProjectionMatrix', 'u_colmap', 'u_normap'], + attribs: ['a_position', 'a_normal', 'a_uv'], + }); + + this._progShade = loadShaderProgram(QuadVertSource, fsSource({ + numLights: NUM_LIGHTS, + numGBuffers: NUM_GBUFFERS, + }), { + uniforms: ['u_gbuffers[0]', 'u_gbuffers[1]', 'u_gbuffers[2]', 'u_gbuffers[3]'], + attribs: ['a_uv'], + }); + + this._projectionMatrix = mat4.create(); + this._viewMatrix = mat4.create(); + this._viewProjectionMatrix = mat4.create(); + } + + setupDrawBuffers(width, height) { + this._width = width; + this._height = height; + + this._fbo = gl.createFramebuffer(); + + //Create, bind, and store a depth target texture for the FBO + this._depthTex = gl.createTexture(); + gl.bindTexture(gl.TEXTURE_2D, this._depthTex); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); + gl.texImage2D(gl.TEXTURE_2D, 0, gl.DEPTH_COMPONENT, width, height, 0, gl.DEPTH_COMPONENT, gl.UNSIGNED_SHORT, null); + gl.bindTexture(gl.TEXTURE_2D, null); + + gl.bindFramebuffer(gl.FRAMEBUFFER, this._fbo); + gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, gl.TEXTURE_2D, this._depthTex, 0); + + // Create, bind, and store "color" target textures for the FBO + this._gbuffers = new Array(NUM_GBUFFERS); + let attachments = new Array(NUM_GBUFFERS); + for (let i = 0; i < NUM_GBUFFERS; i++) { + attachments[i] = WEBGL_draw_buffers[`COLOR_ATTACHMENT${i}_WEBGL`]; + this._gbuffers[i] = gl.createTexture(); + gl.bindTexture(gl.TEXTURE_2D, this._gbuffers[i]); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); + gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.FLOAT, null); + gl.bindTexture(gl.TEXTURE_2D, null); + + gl.framebufferTexture2D(gl.FRAMEBUFFER, attachments[i], gl.TEXTURE_2D, this._gbuffers[i], 0); + } + + if (gl.checkFramebufferStatus(gl.FRAMEBUFFER) != gl.FRAMEBUFFER_COMPLETE) { + throw "Framebuffer incomplete"; + } + + // Tell the WEBGL_draw_buffers extension which FBO attachments are + // being used. (This extension allows for multiple render targets.) + WEBGL_draw_buffers.drawBuffersWEBGL(attachments); + + gl.bindFramebuffer(gl.FRAMEBUFFER, null); + } + + resize(width, height) { + this._width = width; + this._height = height; + + gl.bindTexture(gl.TEXTURE_2D, this._depthTex); + gl.texImage2D(gl.TEXTURE_2D, 0, gl.DEPTH_COMPONENT, width, height, 0, gl.DEPTH_COMPONENT, gl.UNSIGNED_SHORT, null); + for (let i = 0; i < NUM_GBUFFERS; i++) { + gl.bindTexture(gl.TEXTURE_2D, this._gbuffers[i]); + gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.FLOAT, null); + } + gl.bindTexture(gl.TEXTURE_2D, null); + } + + render(camera, scene) { + if (canvas.width != this._width || canvas.height != this._height) { + this.resize(canvas.width, canvas.height); + } + + // Update the camera matrices + camera.updateMatrixWorld(); + mat4.invert(this._viewMatrix, camera.matrixWorld.elements); + mat4.copy(this._projectionMatrix, camera.projectionMatrix.elements); + mat4.multiply(this._viewProjectionMatrix, this._projectionMatrix, this._viewMatrix); + + // Render to the whole screen + gl.viewport(0, 0, canvas.width, canvas.height); + + // Bind the framebuffer + gl.bindFramebuffer(gl.FRAMEBUFFER, this._fbo); + + // Clear the frame + gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT); + + // Use the shader program to copy to the draw buffers + gl.useProgram(this._progCopy.glShaderProgram); + + // Upload the camera matrix + gl.uniformMatrix4fv(this._progCopy.u_viewProjectionMatrix, false, this._viewProjectionMatrix); + + // Draw the scene. This function takes the shader program so that the model's textures can be bound to the right inputs + scene.draw(this._progCopy); + + // Update the buffer used to populate the texture packed with light data + for (let i = 0; i < NUM_LIGHTS; ++i) { + this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 0] = scene.lights[i].position[0]; + this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 1] = scene.lights[i].position[1]; + this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 2] = scene.lights[i].position[2]; + this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 3] = scene.lights[i].radius; + + this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 1) + 0] = scene.lights[i].color[0]; + this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 1) + 1] = scene.lights[i].color[1]; + this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 1) + 2] = scene.lights[i].color[2]; + } + // Update the light texture + this._lightTexture.update(); + + // Update the clusters for the frame + this.updateClusters(camera, this._viewMatrix, scene); + + // Bind the default null framebuffer which is the screen + gl.bindFramebuffer(gl.FRAMEBUFFER, null); + + // Clear the frame + gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT); + + // Use this shader program + gl.useProgram(this._progShade.glShaderProgram); + + // TODO: Bind any other shader inputs + + // Bind g-buffers + const firstGBufferBinding = 0; // You may have to change this if you use other texture slots + for (let i = 0; i < NUM_GBUFFERS; i++) { + gl.activeTexture(gl[`TEXTURE${i + firstGBufferBinding}`]); + gl.bindTexture(gl.TEXTURE_2D, this._gbuffers[i]); + gl.uniform1i(this._progShade[`u_gbuffers[${i}]`], i + firstGBufferBinding); + } + + renderFullscreenQuad(this._progShade); + } +}; diff --git a/src/renderers/clusteredForwardPlus.js b/src/renderers/clusteredForwardPlus.js new file mode 100644 index 0000000..9e8afbe --- /dev/null +++ b/src/renderers/clusteredForwardPlus.js @@ -0,0 +1,83 @@ +import { gl } from '../init'; +import { mat4, vec4, vec3 } from 'gl-matrix'; +import { loadShaderProgram } from '../utils'; +import { NUM_LIGHTS } from '../scene'; +import vsSource from '../shaders/clusteredForward.vert.glsl'; +import fsSource from '../shaders/clusteredForward.frag.glsl.js'; +import TextureBuffer from './textureBuffer'; +import ClusteredRenderer from './clustered'; + +export default class ClusteredForwardPlusRenderer extends ClusteredRenderer { + constructor(xSlices, ySlices, zSlices) { + super(xSlices, ySlices, zSlices); + + // Create a texture to store light data + this._lightTexture = new TextureBuffer(NUM_LIGHTS, 8); + + this._shaderProgram = loadShaderProgram(vsSource, fsSource({ + numLights: NUM_LIGHTS, + }), { + uniforms: ['u_viewProjectionMatrix', 'u_colmap', 'u_normap', 'u_lightbuffer', 'u_clusterbuffer'], + attribs: ['a_position', 'a_normal', 'a_uv'], + }); + + this._projectionMatrix = mat4.create(); + this._viewMatrix = mat4.create(); + this._viewProjectionMatrix = mat4.create(); + } + + render(camera, scene) { + // Update the camera matrices + camera.updateMatrixWorld(); + mat4.invert(this._viewMatrix, camera.matrixWorld.elements); + mat4.copy(this._projectionMatrix, camera.projectionMatrix.elements); + mat4.multiply(this._viewProjectionMatrix, this._projectionMatrix, this._viewMatrix); + + // Update cluster texture which maps from cluster index to light list + this.updateClusters(camera, this._viewMatrix, scene); + + // Update the buffer used to populate the texture packed with light data + for (let i = 0; i < NUM_LIGHTS; ++i) { + this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 0] = scene.lights[i].position[0]; + this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 1] = scene.lights[i].position[1]; + this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 2] = scene.lights[i].position[2]; + this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 3] = scene.lights[i].radius; + + this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 1) + 0] = scene.lights[i].color[0]; + this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 1) + 1] = scene.lights[i].color[1]; + this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 1) + 2] = scene.lights[i].color[2]; + } + // Update the light texture + this._lightTexture.update(); + + // Bind the default null framebuffer which is the screen + gl.bindFramebuffer(gl.FRAMEBUFFER, null); + + // Render to the whole screen + gl.viewport(0, 0, canvas.width, canvas.height); + + // Clear the frame + gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT); + + // Use this shader program + gl.useProgram(this._shaderProgram.glShaderProgram); + + // Upload the camera matrix + gl.uniformMatrix4fv(this._shaderProgram.u_viewProjectionMatrix, false, this._viewProjectionMatrix); + + // Set the light texture as a uniform input to the shader + gl.activeTexture(gl.TEXTURE2); + gl.bindTexture(gl.TEXTURE_2D, this._lightTexture.glTexture); + gl.uniform1i(this._shaderProgram.u_lightbuffer, 2); + + // Set the cluster texture as a uniform input to the shader + gl.activeTexture(gl.TEXTURE3); + gl.bindTexture(gl.TEXTURE_2D, this._clusterTexture.glTexture); + gl.uniform1i(this._shaderProgram.u_clusterbuffer, 3); + + // TODO: Bind any other shader inputs + + // Draw the scene. This function takes the shader program so that the model's textures can be bound to the right inputs + scene.draw(this._shaderProgram); + } +}; \ No newline at end of file diff --git a/src/renderers/forward.js b/src/renderers/forward.js new file mode 100644 index 0000000..ac044f9 --- /dev/null +++ b/src/renderers/forward.js @@ -0,0 +1,71 @@ +import { gl } from '../init'; +import { mat4, vec4 } from 'gl-matrix'; +import { loadShaderProgram } from '../utils'; +import { NUM_LIGHTS } from '../scene'; +import vsSource from '../shaders/forward.vert.glsl'; +import fsSource from '../shaders/forward.frag.glsl.js'; +import TextureBuffer from './textureBuffer'; + +export default class ForwardRenderer { + constructor() { + // Create a texture to store light data + this._lightTexture = new TextureBuffer(NUM_LIGHTS, 8); + + // Initialize a shader program. The fragment shader source is compiled based on the number of lights + this._shaderProgram = loadShaderProgram(vsSource, fsSource({ + numLights: NUM_LIGHTS, + }), { + uniforms: ['u_viewProjectionMatrix', 'u_colmap', 'u_normap', 'u_lightbuffer'], + attribs: ['a_position', 'a_normal', 'a_uv'], + }); + + this._projectionMatrix = mat4.create(); + this._viewMatrix = mat4.create(); + this._viewProjectionMatrix = mat4.create(); + } + + render(camera, scene) { + // Update the camera matrices + camera.updateMatrixWorld(); + mat4.invert(this._viewMatrix, camera.matrixWorld.elements); + mat4.copy(this._projectionMatrix, camera.projectionMatrix.elements); + mat4.multiply(this._viewProjectionMatrix, this._projectionMatrix, this._viewMatrix); + + // Update the buffer used to populate the texture packed with light data + for (let i = 0; i < NUM_LIGHTS; ++i) { + this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 0] = scene.lights[i].position[0]; + this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 1] = scene.lights[i].position[1]; + this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 2] = scene.lights[i].position[2]; + this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 0) + 3] = scene.lights[i].radius; + + this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 1) + 0] = scene.lights[i].color[0]; + this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 1) + 1] = scene.lights[i].color[1]; + this._lightTexture.buffer[this._lightTexture.bufferIndex(i, 1) + 2] = scene.lights[i].color[2]; + } + // Update the light texture + this._lightTexture.update(); + + // Bind the default null framebuffer which is the screen + gl.bindFramebuffer(gl.FRAMEBUFFER, null); + + // Render to the whole screen + gl.viewport(0, 0, canvas.width, canvas.height); + + // Clear the frame + gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT); + + // Use this shader program + gl.useProgram(this._shaderProgram.glShaderProgram); + + // Upload the camera matrix + gl.uniformMatrix4fv(this._shaderProgram.u_viewProjectionMatrix, false, this._viewProjectionMatrix); + + // Set the light texture as a uniform input to the shader + gl.activeTexture(gl.TEXTURE2); + gl.bindTexture(gl.TEXTURE_2D, this._lightTexture.glTexture); + gl.uniform1i(this._shaderProgram.u_lightbuffer, 2); + + // Draw the scene. This function takes the shader program so that the model's textures can be bound to the right inputs + scene.draw(this._shaderProgram); + } +}; diff --git a/src/renderers/textureBuffer.js b/src/renderers/textureBuffer.js new file mode 100644 index 0000000..8d97902 --- /dev/null +++ b/src/renderers/textureBuffer.js @@ -0,0 +1,53 @@ +import { gl } from '../init'; + +export default class TextureBuffer { + /** + * This class represents a buffer in a shader. Unforunately we can't bind arbitrary buffers so we need to pack the data as a texture + * @param {Number} elementCount The number of items in the buffer + * @param {Number} elementSize The number of values in each item of the buffer + */ + constructor(elementCount, elementSize) { + // Initialize the texture. We use gl.NEAREST for texture filtering because we don't want to blend between values in the buffer. We want the exact value + this._glTexture = gl.createTexture(); + gl.bindTexture(gl.TEXTURE_2D, this._glTexture); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE); + gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE); + + // The texture stores 4 values in each "pixel". Thus, the texture we create is elementCount x ceil(elementSize / 4) + this._pixelsPerElement = Math.ceil(elementSize / 4); + this._elementCount = elementCount; + gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, elementCount, this._pixelsPerElement, 0, gl.RGBA, gl.FLOAT, null); + gl.bindTexture(gl.TEXTURE_2D, null); + + // Create a buffer to use to upload to the texture + this._buffer = new Float32Array(elementCount * 4 * this._pixelsPerElement); + } + + get glTexture() { + return this._glTexture; + } + + get buffer() { + return this._buffer; + } + + /** + * Computes the starting buffer index to a particular item. + * @param {*} index The index of the item + * @param {*} component The ith float of an element is located in the (i/4)th pixel + */ + bufferIndex(index, component) { + return 4 * index + 4 * component * this._elementCount; + } + + /** + * Update the texture with the data in the buffer + */ + update() { + gl.bindTexture(gl.TEXTURE_2D, this._glTexture); + gl.texSubImage2D(gl.TEXTURE_2D, 0, 0, 0, this._elementCount, this._pixelsPerElement, gl.RGBA, gl.FLOAT, this._buffer); + gl.bindTexture(gl.TEXTURE_2D, null); + } +}; \ No newline at end of file diff --git a/src/scene.js b/src/scene.js new file mode 100644 index 0000000..35f6700 --- /dev/null +++ b/src/scene.js @@ -0,0 +1,201 @@ +const MinimalGLTFLoader = require('../lib/minimal-gltf-loader'); +import { gl } from './init'; + +// TODO: Edit if you want to change the light initial positions +export const LIGHT_MIN = [-14, 0, -6]; +export const LIGHT_MAX = [14, 20, 6]; +export const LIGHT_RADIUS = 5.0; +export const LIGHT_DT = -0.03; + +// TODO: This controls the number of lights +export const NUM_LIGHTS = 100; + +class Scene { + constructor() { + this.lights = []; + this.models = []; + + for (let i = 0; i < NUM_LIGHTS; ++i) { + this.lights.push({ + position: new Float32Array([ + Math.random() * (LIGHT_MAX[0] - LIGHT_MIN[0]) + LIGHT_MIN[0], + Math.random() * (LIGHT_MAX[1] - LIGHT_MIN[1]) + LIGHT_MIN[1], + Math.random() * (LIGHT_MAX[2] - LIGHT_MIN[2]) + LIGHT_MIN[2], + ]), + color: new Float32Array([ + 0.5 + 0.5 * Math.random(), + 0.5 + 0.5 * Math.random(), + 0.5 + Math.random(), + ]), + radius: LIGHT_RADIUS, + }); + } + } + + loadGLTF(url) { + var glTFLoader = new MinimalGLTFLoader.glTFLoader(gl); + glTFLoader.loadGLTF(url, glTF => { + var curScene = glTF.scenes[glTF.defaultScene]; + + var webGLTextures = {}; + + // temp var + var i,len; + var primitiveOrderID; + + var mesh; + var primitive; + var vertexBuffer; + var indicesBuffer; + + // textures setting + var textureID = 0; + var textureInfo; + var samplerInfo; + var target, format, internalFormat, type; // texture info + var magFilter, minFilter, wrapS, wrapT; + var image; + var texture; + + // temp for sponza + var colorTextureName = 'texture_color'; + var normalTextureName = 'texture_normal'; + + for (var tid in glTF.json.textures) { + textureInfo = glTF.json.textures[tid]; + target = textureInfo.target || gl.TEXTURE_2D; + format = textureInfo.format || gl.RGBA; + internalFormat = textureInfo.format || gl.RGBA; + type = textureInfo.type || gl.UNSIGNED_BYTE; + + image = glTF.images[textureInfo.source]; + + texture = gl.createTexture(); + gl.activeTexture(gl.TEXTURE0 + textureID); + gl.bindTexture(target, texture); + + switch(target) { + case 3553: // gl.TEXTURE_2D + gl.texImage2D(target, 0, internalFormat, format, type, image); + break; + } + + // !! Sampler + // raw WebGL 1, no sampler object, set magfilter, wrapS, etc + samplerInfo = glTF.json.samplers[textureInfo.sampler]; + minFilter = samplerInfo.minFilter || gl.NEAREST_MIPMAP_LINEAR; + magFilter = samplerInfo.magFilter || gl.LINEAR; + wrapS = samplerInfo.wrapS || gl.REPEAT; + wrapT = samplerInfo.wrapT || gl.REPEAT; + gl.texParameteri(target, gl.TEXTURE_MIN_FILTER, minFilter); + gl.texParameteri(target, gl.TEXTURE_MAG_FILTER, magFilter); + gl.texParameteri(target, gl.TEXTURE_WRAP_S, wrapS); + gl.texParameteri(target, gl.TEXTURE_WRAP_T, wrapT); + if (minFilter == gl.NEAREST_MIPMAP_NEAREST || + minFilter == gl.NEAREST_MIPMAP_LINEAR || + minFilter == gl.LINEAR_MIPMAP_NEAREST || + minFilter == gl.LINEAR_MIPMAP_LINEAR ) { + gl.generateMipmap(target); + } + + + gl.bindTexture(target, null); + + webGLTextures[tid] = { + texture: texture, + target: target, + id: textureID + }; + + textureID++; + } + + // vertex attributes + for (var mid in curScene.meshes) { + mesh = curScene.meshes[mid]; + + for (i = 0, len = mesh.primitives.length; i < len; ++i) { + primitive = mesh.primitives[i]; + + vertexBuffer = gl.createBuffer(); + indicesBuffer = gl.createBuffer(); + + // initialize buffer + var vertices = primitive.vertexBuffer; + gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer); + gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW); + gl.bindBuffer(gl.ARRAY_BUFFER, null); + + var indices = primitive.indices; + gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, indicesBuffer); + gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, indices, gl.STATIC_DRAW); + gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null); + + var posInfo = primitive.attributes[primitive.technique.parameters['position'].semantic]; + var norInfo = primitive.attributes[primitive.technique.parameters['normal'].semantic]; + var uvInfo = primitive.attributes[primitive.technique.parameters['texcoord_0'].semantic]; + + this.models.push({ + gltf: primitive, + + idx: indicesBuffer, + + attributes: vertexBuffer, + posInfo: {size: posInfo.size, type: posInfo.type, stride: posInfo.stride, offset: posInfo.offset}, + norInfo: {size: norInfo.size, type: norInfo.type, stride: norInfo.stride, offset: norInfo.offset}, + uvInfo: {size: uvInfo.size, type: uvInfo.type, stride: uvInfo.stride, offset: uvInfo.offset}, + + // specific textures temp test + colmap: webGLTextures[colorTextureName].texture, + normap: webGLTextures[normalTextureName].texture + }); + } + } + + }); + } + + update() { + for (let i = 0; i < NUM_LIGHTS; i++) { + // OPTIONAL TODO: Edit if you want to change how lights move + this.lights[i].position[1] += LIGHT_DT; + // wrap lights from bottom to top + this.lights[i].position[1] = (this.lights[i].position[1] + LIGHT_MAX[1] - LIGHT_MIN[1]) % LIGHT_MAX[1] + LIGHT_MIN[1]; + } + } + + draw(shaderProgram) { + for (let i = 0; i < this.models.length; ++i) { + const model = this.models[i]; + if (model.colmap) { + gl.activeTexture(gl.TEXTURE0); + gl.bindTexture(gl.TEXTURE_2D, model.colmap); + gl.uniform1i(shaderProgram.u_colmap, 0); + } + + if (model.normap) { + gl.activeTexture(gl.TEXTURE1); + gl.bindTexture(gl.TEXTURE_2D, model.normap); + gl.uniform1i(shaderProgram.u_normap, 1); + } + + gl.bindBuffer(gl.ARRAY_BUFFER, model.attributes); + + gl.enableVertexAttribArray(shaderProgram.a_position); + gl.vertexAttribPointer(shaderProgram.a_position, model.posInfo.size, model.posInfo.type, false, model.posInfo.stride, model.posInfo.offset); + + gl.enableVertexAttribArray(shaderProgram.a_normal); + gl.vertexAttribPointer(shaderProgram.a_normal, model.norInfo.size, model.norInfo.type, false, model.norInfo.stride, model.norInfo.offset); + + gl.enableVertexAttribArray(shaderProgram.a_uv); + gl.vertexAttribPointer(shaderProgram.a_uv, model.uvInfo.size, model.uvInfo.type, false, model.uvInfo.stride, model.uvInfo.offset); + + gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, model.idx); + + gl.drawElements(model.gltf.mode, model.gltf.indices.length, model.gltf.indicesComponentType, 0); + } + } + +} + +export default Scene; \ No newline at end of file diff --git a/src/shaders/clusteredForward.frag.glsl.js b/src/shaders/clusteredForward.frag.glsl.js new file mode 100644 index 0000000..022fda7 --- /dev/null +++ b/src/shaders/clusteredForward.frag.glsl.js @@ -0,0 +1,101 @@ +export default function(params) { + return ` + // TODO: This is pretty much just a clone of forward.frag.glsl.js + + #version 100 + precision highp float; + + uniform sampler2D u_colmap; + uniform sampler2D u_normap; + uniform sampler2D u_lightbuffer; + + // TODO: Read this buffer to determine the lights influencing a cluster + uniform sampler2D u_clusterbuffer; + + varying vec3 v_position; + varying vec3 v_normal; + varying vec2 v_uv; + + vec3 applyNormalMap(vec3 geomnor, vec3 normap) { + normap = normap * 2.0 - 1.0; + vec3 up = normalize(vec3(0.001, 1, 0.001)); + vec3 surftan = normalize(cross(geomnor, up)); + vec3 surfbinor = cross(geomnor, surftan); + return normap.y * surftan + normap.x * surfbinor + normap.z * geomnor; + } + + struct Light { + vec3 position; + float radius; + vec3 color; + }; + + float ExtractFloat(sampler2D texture, int textureWidth, int textureHeight, int index, int component) { + float u = float(index + 1) / float(textureWidth + 1); + int pixel = component / 4; + float v = float(pixel + 1) / float(textureHeight + 1); + vec4 texel = texture2D(texture, vec2(u, v)); + int pixelComponent = component - pixel * 4; + if (pixelComponent == 0) { + return texel[0]; + } else if (pixelComponent == 1) { + return texel[1]; + } else if (pixelComponent == 2) { + return texel[2]; + } else if (pixelComponent == 3) { + return texel[3]; + } + } + + Light UnpackLight(int index) { + Light light; + float u = float(index + 1) / float(${params.numLights + 1}); + vec4 v1 = texture2D(u_lightbuffer, vec2(u, 0.3)); + vec4 v2 = texture2D(u_lightbuffer, vec2(u, 0.6)); + light.position = v1.xyz; + + // LOOK: This extracts the 4th float (radius) of the (index)th light in the buffer + // Note that this is just an example implementation to extract one float. + // There are more efficient ways if you need adjacent values + light.radius = ExtractFloat(u_lightbuffer, ${params.numLights}, 2, index, 3); + + light.color = v2.rgb; + return light; + } + + // Cubic approximation of gaussian curve so we falloff to exactly 0 at the light radius + float cubicGaussian(float h) { + if (h < 1.0) { + return 0.25 * pow(2.0 - h, 3.0) - pow(1.0 - h, 3.0); + } else if (h < 2.0) { + return 0.25 * pow(2.0 - h, 3.0); + } else { + return 0.0; + } + } + + void main() { + vec3 albedo = texture2D(u_colmap, v_uv).rgb; + vec3 normap = texture2D(u_normap, v_uv).xyz; + vec3 normal = applyNormalMap(v_normal, normap); + + vec3 fragColor = vec3(0.0); + + for (int i = 0; i < ${params.numLights}; ++i) { + Light light = UnpackLight(i); + float lightDistance = distance(light.position, v_position); + vec3 L = (light.position - v_position) / lightDistance; + + float lightIntensity = cubicGaussian(2.0 * lightDistance / light.radius); + float lambertTerm = max(dot(L, normal), 0.0); + + fragColor += albedo * lambertTerm * light.color * vec3(lightIntensity); + } + + const vec3 ambientLight = vec3(0.025); + fragColor += albedo * ambientLight; + + gl_FragColor = vec4(fragColor, 1.0); + } + `; +} diff --git a/src/shaders/clusteredForward.vert.glsl b/src/shaders/clusteredForward.vert.glsl new file mode 100644 index 0000000..9850c7f --- /dev/null +++ b/src/shaders/clusteredForward.vert.glsl @@ -0,0 +1,19 @@ +#version 100 +precision highp float; + +uniform mat4 u_viewProjectionMatrix; + +attribute vec3 a_position; +attribute vec3 a_normal; +attribute vec2 a_uv; + +varying vec3 v_position; +varying vec3 v_normal; +varying vec2 v_uv; + +void main() { + gl_Position = u_viewProjectionMatrix * vec4(a_position, 1.0); + v_position = a_position; + v_normal = a_normal; + v_uv = a_uv; +} \ No newline at end of file diff --git a/src/shaders/deferred.frag.glsl.js b/src/shaders/deferred.frag.glsl.js new file mode 100644 index 0000000..50f1e75 --- /dev/null +++ b/src/shaders/deferred.frag.glsl.js @@ -0,0 +1,20 @@ +export default function(params) { + return ` + #version 100 + precision highp float; + + uniform sampler2D u_gbuffers[${params.numGBuffers}]; + + varying vec2 v_uv; + + void main() { + // TODO: extract data from g buffers and do lighting + // vec4 gb0 = texture2D(u_gbuffers[0], v_uv); + // vec4 gb1 = texture2D(u_gbuffers[1], v_uv); + // vec4 gb2 = texture2D(u_gbuffers[2], v_uv); + // vec4 gb3 = texture2D(u_gbuffers[3], v_uv); + + gl_FragColor = vec4(v_uv, 0.0, 1.0); + } + `; +} \ No newline at end of file diff --git a/src/shaders/deferredToTexture.frag.glsl b/src/shaders/deferredToTexture.frag.glsl new file mode 100644 index 0000000..bafc086 --- /dev/null +++ b/src/shaders/deferredToTexture.frag.glsl @@ -0,0 +1,29 @@ +#version 100 +#extension GL_EXT_draw_buffers: enable +precision highp float; + +uniform sampler2D u_colmap; +uniform sampler2D u_normap; + +varying vec3 v_position; +varying vec3 v_normal; +varying vec2 v_uv; + +vec3 applyNormalMap(vec3 geomnor, vec3 normap) { + normap = normap * 2.0 - 1.0; + vec3 up = normalize(vec3(0.001, 1, 0.001)); + vec3 surftan = normalize(cross(geomnor, up)); + vec3 surfbinor = cross(geomnor, surftan); + return normap.y * surftan + normap.x * surfbinor + normap.z * geomnor; +} + +void main() { + vec3 norm = applyNormalMap(v_normal, vec3(texture2D(u_normap, v_uv))); + vec3 col = vec3(texture2D(u_colmap, v_uv)); + + // TODO: populate your g buffer + // gl_FragData[0] = ?? + // gl_FragData[1] = ?? + // gl_FragData[2] = ?? + // gl_FragData[3] = ?? +} \ No newline at end of file diff --git a/src/shaders/deferredToTexture.vert.glsl b/src/shaders/deferredToTexture.vert.glsl new file mode 100644 index 0000000..9850c7f --- /dev/null +++ b/src/shaders/deferredToTexture.vert.glsl @@ -0,0 +1,19 @@ +#version 100 +precision highp float; + +uniform mat4 u_viewProjectionMatrix; + +attribute vec3 a_position; +attribute vec3 a_normal; +attribute vec2 a_uv; + +varying vec3 v_position; +varying vec3 v_normal; +varying vec2 v_uv; + +void main() { + gl_Position = u_viewProjectionMatrix * vec4(a_position, 1.0); + v_position = a_position; + v_normal = a_normal; + v_uv = a_uv; +} \ No newline at end of file diff --git a/src/shaders/forward.frag.glsl.js b/src/shaders/forward.frag.glsl.js new file mode 100644 index 0000000..47f40a1 --- /dev/null +++ b/src/shaders/forward.frag.glsl.js @@ -0,0 +1,96 @@ +export default function(params) { + return ` + #version 100 + precision highp float; + + uniform sampler2D u_colmap; + uniform sampler2D u_normap; + uniform sampler2D u_lightbuffer; + + varying vec3 v_position; + varying vec3 v_normal; + varying vec2 v_uv; + + vec3 applyNormalMap(vec3 geomnor, vec3 normap) { + normap = normap * 2.0 - 1.0; + vec3 up = normalize(vec3(0.001, 1, 0.001)); + vec3 surftan = normalize(cross(geomnor, up)); + vec3 surfbinor = cross(geomnor, surftan); + return normap.y * surftan + normap.x * surfbinor + normap.z * geomnor; + } + + struct Light { + vec3 position; + float radius; + vec3 color; + }; + + float ExtractFloat(sampler2D texture, int textureWidth, int textureHeight, int index, int component) { + float u = float(index + 1) / float(textureWidth + 1); + int pixel = component / 4; + float v = float(pixel + 1) / float(textureHeight + 1); + vec4 texel = texture2D(texture, vec2(u, v)); + int pixelComponent = component - pixel * 4; + if (pixelComponent == 0) { + return texel[0]; + } else if (pixelComponent == 1) { + return texel[1]; + } else if (pixelComponent == 2) { + return texel[2]; + } else if (pixelComponent == 3) { + return texel[3]; + } + } + + Light UnpackLight(int index) { + Light light; + float u = float(index + 1) / float(${params.numLights + 1}); + vec4 v1 = texture2D(u_lightbuffer, vec2(u, 0.0)); + vec4 v2 = texture2D(u_lightbuffer, vec2(u, 0.5)); + light.position = v1.xyz; + + // LOOK: This extracts the 4th float (radius) of the (index)th light in the buffer + // Note that this is just an example implementation to extract one float. + // There are more efficient ways if you need adjacent values + light.radius = ExtractFloat(u_lightbuffer, ${params.numLights}, 2, index, 3); + + light.color = v2.rgb; + return light; + } + + // Cubic approximation of gaussian curve so we falloff to exactly 0 at the light radius + float cubicGaussian(float h) { + if (h < 1.0) { + return 0.25 * pow(2.0 - h, 3.0) - pow(1.0 - h, 3.0); + } else if (h < 2.0) { + return 0.25 * pow(2.0 - h, 3.0); + } else { + return 0.0; + } + } + + void main() { + vec3 albedo = texture2D(u_colmap, v_uv).rgb; + vec3 normap = texture2D(u_normap, v_uv).xyz; + vec3 normal = applyNormalMap(v_normal, normap); + + vec3 fragColor = vec3(0.0); + + for (int i = 0; i < ${params.numLights}; ++i) { + Light light = UnpackLight(i); + float lightDistance = distance(light.position, v_position); + vec3 L = (light.position - v_position) / lightDistance; + + float lightIntensity = cubicGaussian(2.0 * lightDistance / light.radius); + float lambertTerm = max(dot(L, normal), 0.0); + + fragColor += albedo * lambertTerm * light.color * vec3(lightIntensity); + } + + const vec3 ambientLight = vec3(0.025); + fragColor += albedo * ambientLight; + + gl_FragColor = vec4(fragColor, 1.0); + } + `; +} diff --git a/src/shaders/forward.vert.glsl b/src/shaders/forward.vert.glsl new file mode 100644 index 0000000..9850c7f --- /dev/null +++ b/src/shaders/forward.vert.glsl @@ -0,0 +1,19 @@ +#version 100 +precision highp float; + +uniform mat4 u_viewProjectionMatrix; + +attribute vec3 a_position; +attribute vec3 a_normal; +attribute vec2 a_uv; + +varying vec3 v_position; +varying vec3 v_normal; +varying vec2 v_uv; + +void main() { + gl_Position = u_viewProjectionMatrix * vec4(a_position, 1.0); + v_position = a_position; + v_normal = a_normal; + v_uv = a_uv; +} \ No newline at end of file diff --git a/src/shaders/quad.vert.glsl b/src/shaders/quad.vert.glsl new file mode 100644 index 0000000..31b81ec --- /dev/null +++ b/src/shaders/quad.vert.glsl @@ -0,0 +1,11 @@ +#version 100 +precision highp float; + +attribute vec3 a_position; + +varying vec2 v_uv; + +void main() { + gl_Position = vec4(a_position, 1.0); + v_uv = a_position.xy * 0.5 + 0.5; +} \ No newline at end of file diff --git a/src/utils.js b/src/utils.js new file mode 100644 index 0000000..5cc3bec --- /dev/null +++ b/src/utils.js @@ -0,0 +1,100 @@ +import { gl, canvas, abort } from './init'; +import QuadVertSource from './shaders/quad.vert.glsl'; + +function downloadURI(uri, name) { + var link = document.createElement('a'); + link.download = name; + link.href = uri; + document.body.appendChild(link); + link.click(); + document.body.removeChild(link); +}; + +export function saveCanvas() { + downloadURI(canvas.toDataURL('image/png'), 'webgl-canvas-' + Date.now() + '.png'); +} + +function compileShader(shaderSource, shaderType) { + var shader = gl.createShader(shaderType); + gl.shaderSource(shader, shaderSource); + gl.compileShader(shader); + if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) { + console.error(shaderSource); + abort('shader compiler error:\n' + gl.getShaderInfoLog(shader)); + } + + return shader; +}; + +function linkShader(vs, fs) { + var prog = gl.createProgram(); + gl.attachShader(prog, vs); + gl.attachShader(prog, fs); + gl.linkProgram(prog); + if (!gl.getProgramParameter(prog, gl.LINK_STATUS)) { + abort('shader linker error:\n' + gl.getProgramInfoLog(prog)); + } + return prog; +}; + +function addShaderLocations(result, shaderLocations) { + if (shaderLocations && shaderLocations.uniforms && shaderLocations.uniforms.length) { + for (let i = 0; i < shaderLocations.uniforms.length; ++i) { + result = Object.assign(result, { + [shaderLocations.uniforms[i]]: gl.getUniformLocation(result.glShaderProgram, shaderLocations.uniforms[i]), + }); + } + } + if (shaderLocations && shaderLocations.attribs && shaderLocations.attribs.length) { + for (let i = 0; i < shaderLocations.attribs.length; ++i) { + result = Object.assign(result, { + [shaderLocations.attribs[i]]: gl.getAttribLocation(result.glShaderProgram, shaderLocations.attribs[i]), + }); + } + } + return result; +} + +export function loadShaderProgram(vsSource, fsSource, shaderLocations) { + const vs = compileShader(vsSource, gl.VERTEX_SHADER); + const fs = compileShader(fsSource, gl.FRAGMENT_SHADER); + return addShaderLocations({ + glShaderProgram: linkShader(vs, fs), + }, shaderLocations); +} + +const quadPositions = new Float32Array([ + -1.0, -1.0, 0.0, + 1.0, -1.0, 0.0, + -1.0, 1.0, 0.0, + 1.0, 1.0, 0.0 +]); + +const quadBuffer = gl.createBuffer(); +gl.bindBuffer(gl.ARRAY_BUFFER, quadBuffer); +gl.bufferData(gl.ARRAY_BUFFER, quadPositions, gl.STATIC_DRAW); + +export function renderFullscreenQuad(program) { + // Bind the program to use to draw the quad + gl.useProgram(program.glShaderProgram); + + // Bind the VBO as the gl.ARRAY_BUFFER + gl.bindBuffer(gl.ARRAY_BUFFER, quadBuffer); + + // Enable the bound buffer as the vertex attrib array for + // program.a_position, using gl.enableVertexAttribArray + gl.enableVertexAttribArray(program.a_position); + + // Use gl.vertexAttribPointer to tell WebGL the type/layout for + // program.a_position's access pattern. + gl.vertexAttribPointer(program.a_position, 3, gl.FLOAT, gl.FALSE, 0, 0); + + // Use gl.drawArrays to draw the quad + gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); + + // Disable the enabled vertex attrib array + gl.disableVertexAttribArray(program.a_position); + + // Unbind the array buffer. + gl.bindBuffer(gl.ARRAY_BUFFER, null); +} \ No newline at end of file diff --git a/webpack.config.js b/webpack.config.js new file mode 100644 index 0000000..f9bb0ff --- /dev/null +++ b/webpack.config.js @@ -0,0 +1,34 @@ +const path = require('path'); + +module.exports = { + entry: path.join(__dirname, 'src/init'), + output: { + path: path.join(__dirname, 'build'), + filename: 'bundle.js', + }, + module: { + loaders: [ + { + test: /\.js$/, + exclude: /(node_modules|bower_components)/, + loader: 'babel-loader', + query: { + presets: [['env', { + targets: { + browsers: ['> 1%', 'last 2 major versions'], + }, + }]], + }, + }, + { + test: /\.glsl$/, + loader: 'webpack-glsl-loader' + }, + ], + }, + devtool: 'source-map', + devServer: { + port: 5650, + publicPath: '/build/' + }, +}; \ No newline at end of file