Week 7 Tutorial Solutions

Question 1:

Discuss the differences between the fixed function pipeline and the programmable pipeline.

The main difference is that we bypass the vertex transformations, vertex illumination and projection stages from the fixed function pipeline and do it ourselves using vertex shaders. We also bypass the fixed function texturing stage and write our own fragment shaders. Writing our own shaders gives us more control over exactly what calculations get done and allow us to explore techniques not 'pre-defined' in the fixed function pipeline. For example it allows us to do Phong Shading. In modern OpenGL the fixed function pipeline is deprecated.

Question 2:

Consider lighting a quad on the surface of a cylindrical mesh with radius 1. You are approximating the cylinder with a six-sided prism, as shown below. The position and distance to the light source and camera are labelled.

Compute the diffuse and specular intensity at the point P on the midpoint of the edge, using:

  1. Flat shading
  2. Gouraud shading
  3. Phong shading
  4. The actual surface of the cylinder.

Assume the source intensity and reflection coefficients are all 1. The Phong exponent is 1. Assume for the specular calculations you are using actual the reflection vector from the Phong lighting model and not the halfway vector or any approximations.

Question 3:

Write a fragment of code that sets up a light to have diffuse and specular components all set to 0.5 for all color components that is

  1. a point light and is positioned at and moves along with the camera.

    Assuming we have enabled lighting

       float[] lightIntensities = {0.5f,0.5f,0.5f,1f};
       float[] lightPosition = {0,0,0,1}; //1 for a point light
    
       gl.glEnable(GL2.GL_LIGHT_0);
       gl.glLightfv(GL2.GL_LIGHT_0,GL2.GL_DIFFUSE,lightIntensities,0);
       gl.glLightfv(GL2.GL_LIGHT_0,GL2.GL_SPECULAR,lightIntensities,0);
    
       gl.glMatrixMode(GL2.GL_MODELVIEW);
       gl.glLoadIdentity(); 
    
       //When we set the light's position, the MODEL_VIEW contains
       //the identity matrix, thus the light is always at position
       //(0,0,0) in camera co-ordinates, so follows the camera/viewpoint around.
       gl.glLightfv(GL2.GL_LIGHT_0,GL2.GL_POSITION,lightPosition,0);
    
       //Set Camera after light's position has been set
       glu.gluLookAt (0, 0, 5, 0.0, 0.0, 0.0, 0, 1, 0);    
    }
    
  2. a directional light in the world. The direction TO the light is (0.5,1,-0.2) in world-coordinates.

    Assuming we have enabled lighting

       float[] lightIntensities = {0.5f,0.5f,0.5f,1f};
       float[] lightPosition = {0.5,1,-0.2,0}; //0 for a directional light
    
       gl.glEnable(GL2.GL_LIGHT_0);
       gl.glLightfv(GL2.GL_LIGHT_0,GL2.GL_DIFFUSE,lightIntensities,0);
       gl.glLightfv(GL2.GL_LIGHT_0,GL2.GL_SPECULAR,lightIntensities,0);
    
       gl.glMatrixMode(GL2.GL_MODELVIEW);
       gl.glLoadIdentity();   
    
       //Set Camera before light's position/direction has been set
       glu.gluLookAt (0, 0, 5, 0.0, 0.0, 0.0, 0, 1, 0);    
    
       //When we set the light's position/direction, the MODEL_VIEW contains
       //the camera transformations and the position/direction is fixed 
       //with respect to 
       //the world. 
       gl.glLightfv(GL2.GL_LIGHT_0,GL2.GL_POSITION,lightPosition,0);
    }
    

Question 4:

Suppose we have a light set up with the following diffuse and specular components, assume ambient light is set to 0 for the light and globally.

float[] lightDiffuse = {1f,0f,0f,1f};
float[] lightSpecular = {1f,1f,1f,1f};

Assume this light is shining on an object with the following material properties

float[] materialDiffuse = {0f,1f,0f,1f};
float[] materialSpecular = {1f,1f,1f,1f};

Describe the color of the object.

We have a light giving off red diffuse light and white (red, green and blue) specular light. We have an object that reflects green diffuse light and white (red, green and blue) specular light. This means that the object can't reflect any of the red diffuse light. It can reflect the white specular light. So the object will appear black with white specular highlights.

Question 5:

  1. Suppose we have the following jogl fragment of code program and suppose we have already loaded compiled and linked our vertex and fragment shaders and the shader is in a variable called shader1.

    void display(GLAutoDrawable drawable){
      GL2 gl = drawable.getGL().getGL2();
    
      gl.glClear(GL_COLOR_BUFFER_MASK | GL_DEPTH_BUFFER_MASK);
      gl.glMatrixMode(GL2.GL_MODELVIEW);
      gl.glLoadIdentity(); 
      glu.gluLookAt (0, 0, 5, 0.0, 0.0, 0.0, 0, 1, 0);
      
      glut.glutSolidTeapot(0.5);    
    
      gl.glTranslated(1,0,0);
      glut.glutSolidSphere(0.5,20,20);   
    }
    
  2. Find and fix the errors in the following shaders that are trying to do per fragment lighting calculations, considering only ambient light and diffuse reflections from one point light.
    //Vertex Shader
    
    in vec3 N;
    in vec4 v;
    
    void main(void){
        v = gl_ModelViewMatrix * gl_Vertex;
        N = vec3(normalize(gl_NormalMatrix * gl_Normal));
        gl_Position = gl_ModelViewMatrix;
    }
    
    //Fragment Shader
    in vec3 N;
    in vec4 v;
    
    void main(void){
       vec3 normal, lightDir;
       float diffuse, globalAmbient,ambient;
       float NdotL;
    
       globalAmbient = gl_LightModel.ambient * gl_FrontMaterial.ambient;
       ambient = gl_LightSource[0].ambient * gl_FrontMaterial.ambient;
    
       normal = normalize(N);
       
       //Direction from fragment to the light
       lightDir = normalize(vec3(v - gl_LightSource[0].position));
    
       float diffuse = max(dot(normal,lightDir),0.0);
    
       gl_FragColor = globalAmbient + ambient + diffuse;
    
    }
    
    //Vertex Shader
    out vec3 N; //these are outputs not inputs.
    out vec4 v;
    
    void main(void){
        v = gl_ModelViewMatrix * gl_Vertex;
        N = vec3(normalize(gl_NormalMatrix * gl_Normal));
    
        //output needs to be in CVV coordinates - 
        //ie multiplied by projection matrix
        gl_Position = gl_ModelViewProjectionMatrix * gl_Vertex;
    }
    
    //Fragment Shader
    in vec3 N;
    in vec4 v;
    
    void main(void){
       vec3 normal, lightDir;
    
       //These all have RGBA components.
       vec4 diffuse, globalAmbient,ambient;
       float NdotL;
    
       globalAmbient = gl_LightModel.ambient * gl_FrontMaterial.ambient;
       ambient = gl_LightSource[0].ambient * gl_FrontMaterial.ambient;
    
       normal = normalize(N);
       
       //Direction from fragment to the light
       lightDir = normalize(vec3(gl_LightSource[0].position - v));
    
       diffuse =  gl_LightSource[0].diffuse * gl_FrontMaterial.diffuse * max(dot(normal,lightDir),0.0);
    
      
       gl_FragColor = globalAmbient + ambient + diffuse;
    
    }
    
  3. Half Lambert lighting is a technique designed to prevent the rear of an object (with respect to the light) losing its shape by only being lit by a constant ambient term.

    In the standard lighting model, the dot product of the normal and light direction lies within the range -1 to 1 which then gets clamped to between 0 and 1 with the max function.

    In Half Lambert shading we scale our dot product by 0.5 and then add 0.5 so it lies within the 0..1 range.

    Modify the code above to implement this technique

    //Fragment Shader
    in vec3 N;
    in vec4 v;
    
    void main(void){
       vec3 normal, lightDir;
       vec4 diffuse, globalAmbient,ambient;
       float NdotL;
    
       globalAmbient = gl_LightModel.ambient * gl_FrontMaterial.ambient;
       ambient = gl_LightSource[0].ambient * gl_FrontMaterial.ambient;
    
       normal = normalize(N);
       
       //Direction from fragment to the light
       lightDir = normalize(vec3(gl_LightSource[0].position - v));
    
       
       diffuse =  gl_LightSource[0].diffuse * gl_FrontMaterial.diffuse * (0.5 * dot(normal,lightDir) + 0.5);
       
    
       gl_FragColor = globalAmbient + ambient + diffuse;
    
    }
    
  4. Suppose we want to model light attenuation using the following equation
    attenuation = 1/(1 + kd^2)
    

    where k is an attenuation factor passed as a uniform into the shader and d is the distance netween the fragment and the light.

    Modify the jogl code from part a to pass in a uniform variable k. And modify the relevant shader/s. Note: We only want to apply attenuation to the diffuse lighting calculations.

    //Fragment Shader
    in vec3 N;
    in vec4 v;
    
    uniform float k;
    
    void main(void){
       vec3 normal, lightDir;
       vec4 diffuse, globalAmbient,ambient;
       float NdotL;
    
       globalAmbient = gl_LightModel.ambient * gl_FrontMaterial.ambient;
       ambient = gl_LightSource[0].ambient * gl_FrontMaterial.ambient;
    
       normal = normalize(N);
       
       //Direction from fragment to the light
       lightDir = normalize(vec3(gl_LightSource[0].position - v));
    
       
       diffuse =  gl_LightSource[0].diffuse * gl_FrontMaterial.diffuse * (0.5 * dot(normal,lightDir) + 0.5);
       
    
       float distanceToLight = length(gl_LightSource[0].position - v);
       float attenuation = 1.0 / (1.0 + k * pow(distanceToLight, 2));
       gl_FragColor = globalAmbient + ambient + attenuation * diffuse;
    
    }
    
    //jogl program
    void display(GLAutoDrawable drawable){
      GL2 gl = drawable.getGL().getGL2();
    
      gl.glClear(GL_COLOR_BUFFER_MASK | GL_DEPTH_BUFFER_MASK);
      gl.glMatrixMode(GL2.GL_MODELVIEW);
      gl.glLoadIdentity(); 
      glu.gluLookAt (0, 0, 5, 0.0, 0.0, 0.0, 0, 1, 0);
      gl.glUseProgram(shader1);
      
      float k = 0.1; //or whatever value you want.
      int loc = gl.glGetUniformLocation(shaderprogram,"k");
      gl.glUniform1f(locCol,);
      
      glut.glutSolidTeapot(0.5);    
      
      gl.glUseProgram(0);
      gl.glTranslated(1,0,0);
      glut.glutSolidSphere(0.5,20,20);   
    }
    
    
Question 6:

The model of lighting we are using (diffuse + specular + ambient) is very limited. Discuss what kinds of lighting effects are being neglected (there are many). How important are they?

We will discuss some extensions in later lectures.

These algorithms ignore:

and many more effects.
We will look at ways to create shadows and reflections of the environment by using textures, however solving these problems properly requires more computationally expensive techniques such as radiosity or ray-tracing which we will look at later in the course. .