WEBGL学习【十一】光照模型

简介: 版权声明:本文为博主原创文章,未经博主允许不得转载。更多学习资料请访问我爱科技论坛:www.52tech.tech https://blog.csdn.net/m0_37981569/article/details/78515844 ...
版权声明:本文为博主原创文章,未经博主允许不得转载。更多学习资料请访问我爱科技论坛:www.52tech.tech https://blog.csdn.net/m0_37981569/article/details/78515844
<!DOCTYPE HTML>
<html lang="en">
<head>
    <title>Listing 7-3 and 7-4, Texturing and Lighting With the Phong Reflection Model.</title>
    <script src="./lib/webgl-debug.js"></script>
    <script src="./lib/glMatrix.js"></script>
    <script src="./lib/webgl-utils.js"></script>

    <meta charset="utf-8">
    <script id="shader-vs" type="x-shader/x-vertex">
      // Vertex shader implemented to perform lighting according to the
      // Phong reflection model. Forwards texture coordinates to fragment
      // shader.
      attribute vec3 aVertexPosition;
      attribute vec3 aVertexNormal;
      attribute vec2 aTextureCoordinates;

      uniform mat4 uMVMatrix;
      uniform mat4 uPMatrix;
      uniform mat3 uNMatrix;

      uniform vec3 uLightPosition;
      uniform vec3 uAmbientLightColor;
      uniform vec3 uDiffuseLightColor;
      uniform vec3 uSpecularLightColor;

      varying vec2 vTextureCoordinates;
      varying vec3 vLightWeighting;

      const float shininess = 32.0;

      void main() {
        // Get the vertex position in eye coordinates
        vec4 vertexPositionEye4 = uMVMatrix * vec4(aVertexPosition, 1.0);
        vec3 vertexPositionEye3 = vertexPositionEye4.xyz / vertexPositionEye4.w;

        // Calculate the vector (l) to the light source
        vec3 vectorToLightSource = normalize(uLightPosition - vertexPositionEye3);

        // Transform the normal (n) to eye coordinates
        vec3 normalEye = normalize(uNMatrix * aVertexNormal);

        // Calculate n dot l for diffuse lighting
        float diffuseLightWeightning = max(dot(normalEye,
                                           vectorToLightSource), 0.0);

        // Calculate the reflection vector (r) that is needed for specular light
        vec3 reflectionVector = normalize(reflect(-vectorToLightSource,
                                                   normalEye));

        // The camera in eye coordinates is located in the origin and is pointing
        // along the negative z-axis. Calculate viewVector (v) in eye coordinates as:
        // (0.0, 0.0, 0.0) - vertexPositionEye3
        vec3 viewVectorEye = -normalize(vertexPositionEye3);

        float rdotv = max(dot(reflectionVector, viewVectorEye), 0.0);

        float specularLightWeightning = pow(rdotv, shininess);

        // Sum up all three reflection components and send to the fragment shader
        vLightWeighting = uAmbientLightColor +
                          uDiffuseLightColor * diffuseLightWeightning +
                          uSpecularLightColor * specularLightWeightning;

         // Finally transform the geometry
         gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0);
         vTextureCoordinates = aTextureCoordinates;
      }
    </script>

    <!--片元着色器-->
    <script id="shader-fs" type="x-shader/x-fragment">
      precision mediump float;

      varying vec2 vTextureCoordinates;
      varying vec3 vLightWeighting;
      uniform sampler2D uSampler;

      void main() {
        vec4 texelColor = texture2D(uSampler, vTextureCoordinates);

        //结合了纹理和光照的片段着色器(vLightWeighting包含已经计算得到的环境光和漫反射光)
        gl_FragColor = vec4(vLightWeighting.rgb * texelColor.rgb, texelColor.a);
      }
    </script>

    <script type="text/javascript">
        // globals
        var gl;
        var pwgl = {};
        // Keep track of ongoing image loads to be able to handle lost context
        pwgl.ongoingImageLoads = [];
        var canvas;

        function createGLContext(canvas) {
            var names = ["webgl", "experimental-webgl"];
            var context = null;
            for (var i=0; i < names.length; i++) {
                try {
                    context = canvas.getContext(names[i]);
                } catch(e) {}
                if (context) {
                    break;
                }
            }
            if (context) {
                context.viewportWidth = canvas.width;
                context.viewportHeight = canvas.height;
            } else {
                alert("Failed to create WebGL context!");
            }
            return context;
        }

        function loadShaderFromDOM(id) {
            var shaderScript = document.getElementById(id);

            // If we don't find an element with the specified id
            // we do an early exit
            if (!shaderScript) {
                return null;
            }

            // Loop through the children for the found DOM element and
            // build up the shader source code as a string
            var shaderSource = "";
            var currentChild = shaderScript.firstChild;
            while (currentChild) {
                if (currentChild.nodeType == 3) { // 3 corresponds to TEXT_NODE
                    shaderSource += currentChild.textContent;
                }
                currentChild = currentChild.nextSibling;
            }

            var shader;
            if (shaderScript.type == "x-shader/x-fragment") {
                shader = gl.createShader(gl.FRAGMENT_SHADER);
            } else if (shaderScript.type == "x-shader/x-vertex") {
                shader = gl.createShader(gl.VERTEX_SHADER);
            } else {
                return null;
            }

            gl.shaderSource(shader, shaderSource);
            gl.compileShader(shader);

            if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS) &&
                !gl.isContextLost()) {
                alert(gl.getShaderInfoLog(shader));
                return null;
            }
            return shader;
        }

        function setupShaders() {
            var vertexShader = loadShaderFromDOM("shader-vs");
            var fragmentShader = loadShaderFromDOM("shader-fs");

            var shaderProgram = gl.createProgram();
            gl.attachShader(shaderProgram, vertexShader);
            gl.attachShader(shaderProgram, fragmentShader);
            gl.linkProgram(shaderProgram);

            if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS) &&
                !gl.isContextLost()) {
                alert("Failed to link shaders: " + gl.getProgramInfoLog(shaderProgram));
            }

            gl.useProgram(shaderProgram);
            pwgl.vertexPositionAttributeLoc =
                gl.getAttribLocation(shaderProgram, "aVertexPosition");

            pwgl.vertexNormalAttributeLoc =
                gl.getAttribLocation(shaderProgram, "aVertexNormal");

            pwgl.vertexTextureAttributeLoc =
                gl.getAttribLocation(shaderProgram, "aTextureCoordinates");

            pwgl.uniformMVMatrixLoc =
                gl.getUniformLocation(shaderProgram, "uMVMatrix");

            pwgl.uniformProjMatrixLoc =
                gl.getUniformLocation(shaderProgram, "uPMatrix");

            pwgl.uniformNormalMatrixLoc =
                gl.getUniformLocation(shaderProgram, "uNMatrix");

            pwgl.uniformSamplerLoc =
                gl.getUniformLocation(shaderProgram, "uSampler");

            pwgl.uniformLightPositionLoc =
                gl.getUniformLocation(shaderProgram, "uLightPosition");

            pwgl.uniformAmbientLightColorLoc =
                gl.getUniformLocation(shaderProgram, "uAmbientLightColor");

            pwgl.uniformDiffuseLightColorLoc =
                gl.getUniformLocation(shaderProgram, "uDiffuseLightColor");

            pwgl.uniformSpecularLightColorLoc =
                gl.getUniformLocation(shaderProgram, "uSpecularLightColor");


            gl.enableVertexAttribArray(pwgl.vertexPositionAttributeLoc);
            gl.enableVertexAttribArray(pwgl.vertexNormalAttributeLoc);
            gl.enableVertexAttribArray(pwgl.vertexTextureAttributeLoc);

            pwgl.modelViewMatrix = mat4.create();
            pwgl.projectionMatrix = mat4.create();
            pwgl.modelViewMatrixStack = [];
        }

        function pushModelViewMatrix() {
            var copyToPush = mat4.create(pwgl.modelViewMatrix);
            pwgl.modelViewMatrixStack.push(copyToPush);
        }

        function popModelViewMatrix() {
            if (pwgl.modelViewMatrixStack.length == 0) {
                throw "Error popModelViewMatrix() - Stack was empty ";
            }
            pwgl.modelViewMatrix = pwgl.modelViewMatrixStack.pop();
        }

        function setupFloorBuffers() {
            pwgl.floorVertexPositionBuffer = gl.createBuffer();
            gl.bindBuffer(gl.ARRAY_BUFFER, pwgl.floorVertexPositionBuffer);

            var floorVertexPosition = [
                // Plane in y=0
                5.0,   0.0,  5.0,  //v0
                5.0,   0.0, -5.0,  //v1
                -5.0,   0.0, -5.0,  //v2
                -5.0,   0.0,  5.0]; //v3

            gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(floorVertexPosition),
                gl.STATIC_DRAW);

            pwgl.FLOOR_VERTEX_POS_BUF_ITEM_SIZE = 3;
            pwgl.FLOOR_VERTEX_POS_BUF_NUM_ITEMS = 4;

            //指定地板的法向量的方向
            // Specify normals to be able to do lighting calculations
            pwgl.floorVertexNormalBuffer = gl.createBuffer();
            gl.bindBuffer(gl.ARRAY_BUFFER, pwgl.floorVertexNormalBuffer);

            var floorVertexNormals = [
                0.0,   1.0,  0.0,  //v0
                0.0,   1.0,  0.0,  //v1
                0.0,   1.0,  0.0,  //v2
                0.0,   1.0,  0.0]; //v3

            gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(floorVertexNormals),
                gl.STATIC_DRAW);

            pwgl.FLOOR_VERTEX_NORMAL_BUF_ITEM_SIZE = 3;
            pwgl.FLOOR_VERTEX_NORMAL_BUF_NUM_ITEMS = 4;

            // Setup texture coordinates buffer
            pwgl.floorVertexTextureCoordinateBuffer = gl.createBuffer();
            gl.bindBuffer(gl.ARRAY_BUFFER, pwgl.floorVertexTextureCoordinateBuffer);
            var floorVertexTextureCoordinates = [
                2.0, 0.0,
                2.0, 2.0,
                0.0, 2.0,
                0.0, 0.0
            ];

            gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(floorVertexTextureCoordinates),
                gl.STATIC_DRAW);

            pwgl.FLOOR_VERTEX_TEX_COORD_BUF_ITEM_SIZE = 2;
            pwgl.FLOOR_VERTEX_TEX_COORD_BUF_NUM_ITEMS = 4;

            // Setup index buffer
            pwgl.floorVertexIndexBuffer = gl.createBuffer();
            gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, pwgl.floorVertexIndexBuffer);
            var floorVertexIndices = [0, 1, 2, 3];

            gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(floorVertexIndices),
                gl.STATIC_DRAW);

            pwgl.FLOOR_VERTEX_INDEX_BUF_ITEM_SIZE = 1;
            pwgl.FLOOR_VERTEX_INDEX_BUF_NUM_ITEMS = 4;
        }

        function setupCubeBuffers() {
            pwgl.cubeVertexPositionBuffer = gl.createBuffer();
            gl.bindBuffer(gl.ARRAY_BUFFER, pwgl.cubeVertexPositionBuffer);

            var cubeVertexPosition = [
                // Front face
                1.0,  1.0,  1.0, //v0
                -1.0,  1.0,  1.0, //v1
                -1.0, -1.0,  1.0, //v2
                1.0, -1.0,  1.0, //v3

                // Back face
                1.0,  1.0, -1.0, //v4
                -1.0,  1.0, -1.0, //v5
                -1.0, -1.0, -1.0, //v6
                1.0, -1.0, -1.0, //v7

                // Left face
                -1.0,  1.0,  1.0, //v8
                -1.0,  1.0, -1.0, //v9
                -1.0, -1.0, -1.0, //v10
                -1.0, -1.0,  1.0, //v11

                // Right face
                1.0,  1.0,  1.0, //12
                1.0, -1.0,  1.0, //13
                1.0, -1.0, -1.0, //14
                1.0,  1.0, -1.0, //15

                // Top face
                1.0,  1.0,  1.0, //v16
                1.0,  1.0, -1.0, //v17
                -1.0,  1.0, -1.0, //v18
                -1.0,  1.0,  1.0, //v19

                // Bottom face
                1.0, -1.0,  1.0, //v20
                1.0, -1.0, -1.0, //v21
                -1.0, -1.0, -1.0, //v22
                -1.0, -1.0,  1.0, //v23
            ];

            gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(cubeVertexPosition),
                gl.STATIC_DRAW);

            pwgl.CUBE_VERTEX_POS_BUF_ITEM_SIZE = 3;
            pwgl.CUBE_VERTEX_POS_BUF_NUM_ITEMS = 24;

            // Specify normals to be able to do lighting calculations
            pwgl.cubeVertexNormalBuffer = gl.createBuffer();
            gl.bindBuffer(gl.ARRAY_BUFFER, pwgl.cubeVertexNormalBuffer);

            //指定立方体的每一个面的法向量
            var cubeVertexNormals = [
                // Front face
                0.0,  0.0,  1.0, //v0
                0.0,  0.0,  1.0, //v1
                0.0,  0.0,  1.0, //v2
                0.0,  0.0,  1.0, //v3

                // Back face
                0.0,  0.0, -1.0, //v4
                0.0,  0.0, -1.0, //v5
                0.0,  0.0, -1.0, //v6
                0.0,  0.0, -1.0, //v7

                // Left face
                -1.0,  0.0,  0.0, //v8
                -1.0,  0.0,  0.0, //v9
                -1.0,  0.0,  0.0, //v10
                -1.0,  0.0,  0.0, //v11

                // Right face
                1.0,  0.0,  0.0, //12
                1.0,  0.0,  0.0, //13
                1.0,  0.0,  0.0, //14
                1.0,  0.0,  0.0, //15

                // Top face
                0.0,  1.0,  0.0, //v16
                0.0,  1.0,  0.0, //v17
                0.0,  1.0,  0.0, //v18
                0.0,  1.0,  0.0, //v19

                // Bottom face
                0.0, -1.0,  0.0, //v20
                0.0, -1.0,  0.0, //v21
                0.0, -1.0,  0.0, //v22
                0.0, -1.0,  0.0, //v23
            ];

            gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(cubeVertexNormals),
                gl.STATIC_DRAW);

            pwgl.CUBE_VERTEX_NORMAL_BUF_ITEM_SIZE = 3;
            pwgl.CUBE_VERTEX_NORMAL_BUF_NUM_ITEMS = 24;



            // Setup buffer with texture coordinates
            pwgl.cubeVertexTextureCoordinateBuffer = gl.createBuffer();
            gl.bindBuffer(gl.ARRAY_BUFFER, pwgl.cubeVertexTextureCoordinateBuffer);
            var textureCoordinates = [
                //Front face
                0.0, 0.0, //v0
                1.0, 0.0, //v1
                1.0, 1.0, //v2
                0.0, 1.0, //v3

                // Back face
                0.0, 1.0, //v4
                1.0, 1.0, //v5
                1.0, 0.0, //v6
                0.0, 0.0, //v7

                // Left face
                0.0, 1.0, //v8
                1.0, 1.0, //v9
                1.0, 0.0, //v10
                0.0, 0.0, //v11

                // Right face
                0.0, 1.0, //v12
                1.0, 1.0, //v13
                1.0, 0.0, //v14
                0.0, 0.0, //v15

                // Top face
                0.0, 1.0, //v16
                1.0, 1.0, //v17
                1.0, 0.0, //v18
                0.0, 0.0, //v19

                // Bottom face
                0.0, 1.0, //v20
                1.0, 1.0, //v21
                1.0, 0.0, //v22
                0.0, 0.0, //v23
            ];

            gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(textureCoordinates),gl.STATIC_DRAW);
            pwgl.CUBE_VERTEX_TEX_COORD_BUF_ITEM_SIZE = 2;
            pwgl.CUBE_VERTEX_TEX_COORD_BUF_NUM_ITEMS = 24;

            pwgl.cubeVertexIndexBuffer = gl.createBuffer();
            gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, pwgl.cubeVertexIndexBuffer);
            var cubeVertexIndices = [
                0, 1, 2,      0, 2, 3,    // Front face
                4, 6, 5,      4, 7, 6,    // Back face
                8, 9, 10,     8, 10, 11,  // Left face
                12, 13, 14,   12, 14, 15, // Right face
                16, 17, 18,   16, 18, 19, // Top face
                20, 22, 21,   20, 23, 22  // Bottom face
            ];
            gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(cubeVertexIndices),
                gl.STATIC_DRAW);
            pwgl.CUBE_VERTEX_INDEX_BUF_ITEM_SIZE = 1;
            pwgl.CUBE_VERTEX_INDEX_BUF_NUM_ITEMS = 36;
        }

        function textureFinishedLoading(image, texture) {
            gl.bindTexture(gl.TEXTURE_2D, texture);
            gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);

            gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE,
                image);

            gl.generateMipmap(gl.TEXTURE_2D);

            gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
            gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);

            gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.MIRRORED_REPEAT);
            gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.MIRRORED_REPEAT);
            gl.bindTexture(gl.TEXTURE_2D, null);
        }

        function loadImageForTexture(url, texture) {
            var image = new Image();
            image.onload = function() {
                pwgl.ongoingImageLoads.splice(pwgl.ongoingImageLoads.indexOf(image), 1);
                textureFinishedLoading(image, texture);
            }
            pwgl.ongoingImageLoads.push(image);
            image.src = url;
        }

        function setupTextures() {
            // Texture for the table
            pwgl.woodTexture = gl.createTexture();
            loadImageForTexture("./resources/wood_128x128.jpg", pwgl.woodTexture);

            // Texture for the floor
            pwgl.groundTexture = gl.createTexture();
            loadImageForTexture("./resources/wood_floor_256.jpg", pwgl.groundTexture);

            // Texture for the box on the table
            pwgl.boxTexture = gl.createTexture();
            loadImageForTexture("./resources/wicker_256.jpg", pwgl.boxTexture);

            //创建一个立方体
            pwgl.colorCube = gl.createTexture();
            loadImageForTexture("./resources/xiuxiuba.bmp", pwgl.colorCube);
        }

        function setupBuffers() {
            setupFloorBuffers();
            setupCubeBuffers();
        }

        //设置光源位置,环境光颜色,漫反射光颜色,镜面反射光
        function setupLights() {
            gl.uniform3fv(pwgl.uniformLightPositionLoc, [0.0, 20.0, 0.0]);
            gl.uniform3fv(pwgl.uniformAmbientLightColorLoc, [0.2, 0.2, 0.2]);
            gl.uniform3fv(pwgl.uniformDiffuseLightColorLoc, [0.7, 0.7, 0.7]);
            gl.uniform3fv(pwgl.uniformSpecularLightColorLoc, [0.8, 0.8, 0.8]);
        }

        function uploadModelViewMatrixToShader() {
            gl.uniformMatrix4fv(pwgl.uniformMVMatrixLoc, false, pwgl.modelViewMatrix);
        }

        function uploadProjectionMatrixToShader() {
            gl.uniformMatrix4fv(pwgl.uniformProjMatrixLoc,
                false, pwgl.projectionMatrix);
        }

        //上传法向量的矩阵到着色器
        function uploadNormalMatrixToShader() {
            var normalMatrix = mat3.create();
            //计算矩阵的逆
            mat4.toInverseMat3(pwgl.modelViewMatrix, normalMatrix);
            //计算转置矩阵
            mat3.transpose(normalMatrix);
            //把法向量矩阵传给着色器
            gl.uniformMatrix3fv(pwgl.uniformNormalMatrixLoc, false, normalMatrix);
        }

        function drawFloor() {
            // Bind position buffer
            gl.bindBuffer(gl.ARRAY_BUFFER, pwgl.floorVertexPositionBuffer);
            gl.vertexAttribPointer(pwgl.vertexPositionAttributeLoc,
                pwgl.FLOOR_VERTEX_POS_BUF_ITEM_SIZE,
                gl.FLOAT, false, 0, 0);

            // Bind normal buffer
            gl.bindBuffer(gl.ARRAY_BUFFER, pwgl.floorVertexNormalBuffer);
            gl.vertexAttribPointer(pwgl.vertexNormalAttributeLoc,
                pwgl.FLOOR_VERTEX_NORMAL_BUF_ITEM_SIZE,
                gl.FLOAT, false, 0, 0);

            // Bind texture coordinate buffer
            gl.bindBuffer(gl.ARRAY_BUFFER, pwgl.floorVertexTextureCoordinateBuffer);
            gl.vertexAttribPointer(pwgl.vertexTextureAttributeLoc,
                pwgl.FLOOR_VERTEX_TEX_COORD_BUF_ITEM_SIZE,
                gl.FLOAT, false, 0, 0);

            gl.activeTexture(gl.TEXTURE0);
            gl.bindTexture(gl.TEXTURE_2D, pwgl.groundTexture);

            // Bind index buffer and draw the floor
            gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, pwgl.floorVertexIndexBuffer);
            gl.drawElements(gl.TRIANGLE_FAN, pwgl.FLOOR_VERTEX_INDEX_BUF_NUM_ITEMS,
                gl.UNSIGNED_SHORT, 0);
        }

        function drawCube(texture) {
            // Bind position buffer
            gl.bindBuffer(gl.ARRAY_BUFFER, pwgl.cubeVertexPositionBuffer);
            gl.vertexAttribPointer(pwgl.vertexPositionAttributeLoc,
                pwgl.CUBE_VERTEX_POS_BUF_ITEM_SIZE,
                gl.FLOAT, false, 0, 0);

            // Bind normal buffer
            gl.bindBuffer(gl.ARRAY_BUFFER, pwgl.cubeVertexNormalBuffer);
            gl.vertexAttribPointer(pwgl.vertexNormalAttributeLoc,
                pwgl.CUBE_VERTEX_NORMAL_BUF_ITEM_SIZE,
                gl.FLOAT, false, 0, 0);


            // Bind texture coordinate buffer
            gl.bindBuffer(gl.ARRAY_BUFFER, pwgl.cubeVertexTextureCoordinateBuffer);
            gl.vertexAttribPointer(pwgl.vertexTextureAttributeLoc,
                pwgl.CUBE_VERTEX_TEX_COORD_BUF_ITEM_SIZE,
                gl.FLOAT, false, 0, 0);

            gl.activeTexture(gl.TEXTURE0);
            gl.bindTexture(gl.TEXTURE_2D, texture);

            // Bind index buffer and draw cube
            gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, pwgl.cubeVertexIndexBuffer);

            gl.drawElements(gl.TRIANGLES, pwgl.CUBE_VERTEX_INDEX_BUF_NUM_ITEMS,
                gl.UNSIGNED_SHORT, 0);
        }

        function drawTable(){
            // Draw a simple table by modifying the modelview matrix
            // (translate and scale) and then use the function drawCube()
            // to draw a table top and four table legs.

            pushModelViewMatrix();
            mat4.translate(pwgl.modelViewMatrix, [0.0, 1.0, 0.0], pwgl.modelViewMatrix);
            mat4.scale(pwgl.modelViewMatrix, [2.0, 0.1, 2.0], pwgl.modelViewMatrix);
            uploadModelViewMatrixToShader();
            uploadNormalMatrixToShader();
            // Draw the actual cube (now scaled to a cuboid) with woodTexture
            drawCube(pwgl.woodTexture);
            popModelViewMatrix();

            // Draw the table legs
            for (var i=-1; i<=1; i+=2) {
                for (var j= -1; j<=1; j+=2) {
                    pushModelViewMatrix();
                    mat4.translate(pwgl.modelViewMatrix, [i*1.9, -0.1, j*1.9], pwgl.modelViewMatrix);
                    mat4.scale(pwgl.modelViewMatrix, [0.1, 1.0, 0.1], pwgl.modelViewMatrix);
                    uploadModelViewMatrixToShader();
                    uploadNormalMatrixToShader();
                    drawCube(pwgl.woodTexture);
                    popModelViewMatrix();
                }
            }
        }


        pwgl.yRot = 0;
        function draw(currentTime) {
            pwgl.requestId = requestAnimFrame(draw);
            if (currentTime === undefined) {
                currentTime = Date.now();
            }
            currentTime = Date.now();

            // Update FPS if a second or more has passed since last FPS update
            if(currentTime - pwgl.previousFrameTimeStamp >= 1000) {
                pwgl.fpsCounter.innerHTML = pwgl.nbrOfFramesForFPS;
                pwgl.nbrOfFramesForFPS = 0;
                pwgl.previousFrameTimeStamp = currentTime;
            }

            gl.viewport(0, 0, gl.viewportWidth, gl.viewportHeight);
            gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
            mat4.perspective(60, gl.viewportWidth / gl.viewportHeight,
                1, 100.0, pwgl.projectionMatrix);
            mat4.identity(pwgl.modelViewMatrix);
            mat4.lookAt([8, 12, 8],[0, 0, 0], [0, 1,0], pwgl.modelViewMatrix);
            mat4.rotateY(pwgl.modelViewMatrix, pwgl.yRot, pwgl.modelViewMatrix);
            pwgl.yRot += 0.01;

            uploadModelViewMatrixToShader();
            uploadProjectionMatrixToShader();
            uploadNormalMatrixToShader();
            gl.uniform1i(pwgl.uniformSamplerLoc, 0);

            drawFloor();

            // Draw table
            pushModelViewMatrix();
            mat4.translate(pwgl.modelViewMatrix, [0.0, 1.1, 0.0], pwgl.modelViewMatrix);
            uploadModelViewMatrixToShader();
            uploadNormalMatrixToShader();
            drawTable();
            popModelViewMatrix();

            // Calculate the position for the box that is initially
            // on top of the table but will then be moved during animation
            pushModelViewMatrix();
            if (currentTime === undefined) {
                currentTime = Date.now();
            }
            if (pwgl.animationStartTime === undefined) {
                pwgl.animationStartTime = currentTime;
            }
            // Update the position of the box
            if (pwgl.y < 5) {
                // First move the box vertically from its original position on top of
                // the table (where y = 2.7) to 5 units above the floor (y = 5).
                // Let this movement take 3 seconds
                pwgl.y = 2.7 + (currentTime - pwgl.animationStartTime)/3000 * (5.0-2.7);
            }
            else {
                // Then move the box in a circle where one revolution takes 2 seconds
                pwgl.angle = (currentTime - pwgl.animationStartTime)/2000*2*Math.PI % (2*Math.PI);

                pwgl.x = Math.cos(pwgl.angle) * pwgl.circleRadius;
                pwgl.z = Math.sin(pwgl.angle) * pwgl.circleRadius;
            }

            mat4.translate(pwgl.modelViewMatrix, [pwgl.x, pwgl.y, pwgl.z], pwgl.modelViewMatrix);
            mat4.scale(pwgl.modelViewMatrix, [0.5, 0.5, 0.5], pwgl.modelViewMatrix);
            uploadModelViewMatrixToShader();
            uploadNormalMatrixToShader();
            drawCube(pwgl.boxTexture);
            popModelViewMatrix();

            // Update number of drawn frames to be able to count fps
            pwgl.nbrOfFramesForFPS++;
        }

        function handleContextLost(event) {
            event.preventDefault();
            cancelRequestAnimFrame(pwgl.requestId);

            // Ignore all ongoing image loads by removing
            // their onload handler
            for (var i = 0; i < pwgl.ongoingImageLoads.length; i++) {
                pwgl.ongoingImageLoads[i].onload = undefined;
            }
            pwgl.ongoingImageLoads = [];
        }

        function init() {
            // Initialization that is performed during first startup, but when the
            // event webglcontextrestored is received is included in this function.
            setupShaders();
            setupBuffers();
            setupLights();
            setupTextures();
            gl.clearColor(0.0, 0.0, 0.0, 1.0);
            gl.enable(gl.DEPTH_TEST);

            // Initialize some varibles for the moving box
            pwgl.x = 0.0;
            pwgl.y = 2.7;
            pwgl.z = 0.0;
            pwgl.circleRadius = 4.0;
            pwgl.angle = 0;
            // Initialize some variables related to the animation
            pwgl.animationStartTime = undefined;
            pwgl.nbrOfFramesForFPS = 0;
            pwgl.previousFrameTimeStamp = Date.now();
        }

        function handleContextRestored(event) {
            init();
            pwgl.requestId = requestAnimFrame(draw,canvas);
        }

        function startup() {
            canvas = document.getElementById("myGLCanvas");
            canvas = WebGLDebugUtils.makeLostContextSimulatingContext(canvas);

            canvas.addEventListener('webglcontextlost', handleContextLost, false);
            canvas.addEventListener('webglcontextrestored', handleContextRestored, false);

            gl = createGLContext(canvas);
            init();

            pwgl.fpsCounter = document.getElementById("fps");

            // Uncomment the three lines of code below to be able to test lost context
            // window.addEventListener('mousedown', function() {
            //   canvas.loseContext();
            // });

            // Draw the complete scene
            draw();
        }
    </script>

</head>

<body onload="startup();">
<canvas id="myGLCanvas" width="500" height="500"></canvas>
<div id="fps-counter">
    FPS: <span id="fps">--</span>
</div>
</body>

</html>
相关文章
|
1月前
Pixi入门第二章:绘制各种图形
这篇文章是Pixi.js入门系列的第二章,重点在于如何使用Pixi.js绘制各种基本图形,如矩形、圆角矩形、圆形和椭圆,并提供了具体的代码示例。
44 0
Pixi入门第二章:绘制各种图形
|
1月前
|
JavaScript API 图形学
ThreeJS入门-创建一个正方体
这篇文章介绍了如何使用Three.js创建一个基本的正方体模型,并提供了实现这一功能的详细步骤和代码示例。
39 0
ThreeJS入门-创建一个正方体
|
3月前
|
前端开发
【threejs教程】终于搞明白了!原来threejs中的透视相机这么简单!
【8月更文挑战第5天】深入学习threejs中的透视相机!
98 2
【threejs教程】终于搞明白了!原来threejs中的透视相机这么简单!
|
3月前
|
编解码 JavaScript 前端开发
ThreeJs 基础学习
ThreeJs 基础学习
52 0
|
存储 前端开发
WEBGL学习【十五】利用WEBGL实现三维场景的一般思路总结
版权声明:本文为博主原创文章,未经博主允许不得转载。更多学习资料请访问我爱科技论坛:www.52tech.tech https://blog.csdn.net/m0_37981569/article/details/79232119 实现三维场景载入操作的实现步骤: 主要知识点:着色器,纹理贴图,文件载入 实现思路: 获取canvas,初始化WEBGL上下文信息。
1275 0
下一篇
无影云桌面