c++openglglslfreeglutglu

OpenGL 3.1 lighting messed up, using phong shading


After many painful hours of attempting to figure out why my lighting is messed up I am still at a loss.

The OpenGL normals are correct (backface culling does not cause any of my triangles to disappear)

I calculate my normals in order to interpolate for lighting, all the triangles on the same faces also have the same normals.

If any one has any thoughts that would be appreciated.

I am definitely new to OpenGL, so that is a bit obvious in my code. incorrect lighting

here are my shaders:

and some more....

void display()
{

    setMatrices(); // initilize Matrices
    // Use our shader
    //glUseProgram(programID);

    glClearColor(0.0f, 0.0f, 0.3f, 0.0f);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); 


    // 2nd attribute buffer : colors
    glEnableVertexAttribArray(1);
    glBindBuffer(GL_ARRAY_BUFFER, colorbuffer);
    glVertexAttribPointer(
        1,                                // attribute. No particular reason for 1, but must match the layout in the shader.
        3,                                // size
        GL_FLOAT,                         // type
        GL_FALSE,                         // normalized?
        0,                                // stride
        (void*)0                          // array buffer offset
    );

    glEnableVertexAttribArray(0); // 1rst attribute buffer : vertices   

    // enum platosShapes{tet, cube, octah, dodec, icos};
    switch(shapeInUse)
    {
        case tet:
            {

                glBindBuffer(GL_ARRAY_BUFFER, TetraVertexbuffer);
                glVertexAttribPointer(
                    0,                  // attribute 0. No particular reason for 0, but must match the layout in the shader.
                    3,                  // size
                    GL_FLOAT,           // type
                    GL_FALSE,           // normalized?
                    0,                  // stride
                    (void*)0            // array buffer offset
                );

                glDrawArrays(GL_TRIANGLES, 0, 4*3); // Starting from vertex 0; 3 vertices total -> 1 triangle // need to know amount of vertices here // and change to triangle strips accordingly
            }
            break;
        case cube:
            {

                //GLuint cubeNormal = glGetAttribLocation( programID, "vNormal" ); 
                glEnableVertexAttribArray( cubeNormal );
                glVertexAttribPointer( cubeNormal, 3, GL_FLOAT, GL_FALSE, 0,
                (const GLvoid *) (sizeof(CubeNormalBufferData)) );
                //glDisableVertexAttribArray( cubeNormal );



                glBindBuffer(GL_ARRAY_BUFFER, CubeVertexbuffer);
                glVertexAttribPointer(
                    0,                  // attribute 0. No particular reason for 0, but must match the layout in the shader.
                    3,                  // size
                    GL_FLOAT,           // type
                    GL_FALSE,           // normalized?
                    0,                  // stride
                    (void*)0            // array buffer offset
                );

                glDrawArrays(GL_TRIANGLES, 0, 12*3); // Starting from vertex 0; 3 vertices total -> 1 triangle // need to know amount of vertices here // and change to triangle strips accordingly
            }
            break;
        case octah:
            {
                glBindBuffer(GL_ARRAY_BUFFER, OctaVertexbuffer);
                glVertexAttribPointer(
                    0,                  // attribute 0. No particular reason for 0, but must match the layout in the shader.
                    3,                  // size
                    GL_FLOAT,           // type
                    GL_FALSE,           // normalized?
                    0,                  // stride
                    (void*)0            // array buffer offset
                );

                glDrawArrays(GL_TRIANGLES, 0, 8*3); // Starting from vertex 0; 3 vertices total -> 1 triangle // need to know amount of vertices here // and change to triangle strips accordingly
            }
            break;
        case dodec:
            {
                glBindBuffer(GL_ARRAY_BUFFER, DodecaVertexbuffer);
                glVertexAttribPointer(
                    0,                  // attribute 0. No particular reason for 0, but must match the layout in the shader.
                    3,                  // size
                    GL_FLOAT,           // type
                    GL_FALSE,           // normalized?
                    0,                  // stride
                    (void*)0            // array buffer offset
                );

                glDrawArrays(GL_TRIANGLE_FAN, 0, 5 * 6); // Starting from vertex 0; 3 vertices total -> 1 triangle // need to know amount of vertices here // and change to triangle strips accordingly
                glDrawArrays(GL_TRIANGLE_FAN, (5 * 6) + 1, 30);
                //glutSolidDodecahedron();
                //glDrawArrays(GL_TRIANGLE_STRIP,0,5*12);
            }
            break;
        case icos:
            {
                glBindBuffer(GL_ARRAY_BUFFER, icosaVertexbuffer);
                glVertexAttribPointer(
                    0,                  // attribute 0. No particular reason for 0, but must match the layout in the shader.
                    3,                  // size
                    GL_FLOAT,           // type
                    GL_FALSE,           // normalized?
                    0,                  // stride
                    (void*)0            // array buffer offset
                );

                glDrawArrays(GL_TRIANGLES, 0, 3*20); // Starting from vertex 0; 3 vertices total -> 1 triangle // need to know amount of vertices here // and change to triangle strips accordingly
            }
            break;
        case sphere:
            {
                glBindBuffer(GL_ARRAY_BUFFER, sphereVertexbuffer);
                glVertexAttribPointer(
                    0,                  // attribute 0. No particular reason for 0, but must match the layout in the shader.
                    3,                  // size
                    GL_FLOAT,           // type
                    GL_FALSE,           // normalized?
                    0,                  // stride
                    (void*)0            // array buffer offset
                );
                //glDrawElements(GL_TRIANGLES, cnt2, GL_UNSIGNED_INT, 0)
                glDrawArrays(GL_TRIANGLE_FAN, 0, 100);
            }
    }

    glDisableVertexAttribArray(0);
    glFlush();

}

and some more........

void calculateNormals(GLfloat bufData[], GLfloat normBufData[], int size) // probalby works
{
    int count = 0;
    GLfloat temp[9];

    for(int i = 0; i < size; i++)
    {

        temp[count] = bufData[i];
        count++;

        if((i+1) % 9 == 0)
        {
            count = 0;

            //for(int i = 0; i < 9; i++)
            //{
            //  cout << temp[i] << "!,";
            //  if((i + 1) % 3 == 0)
            //      cout << "\n";
            //}

            calculateCross(temp, normBufData);
        }
    }

    printNormals(normBufData, size);
}
void calculateCross(GLfloat bufData[], GLfloat normBufData[]) // probably works
{
    static int counter = 0; // need to reset in bettween new buffers

    glm::vec3 C1;
    glm::vec3 C2;
    glm::vec3 normal;

    //cout << bufData[0] << "," << bufData[1] << "," << bufData[2] << " buf 1 \n"; 
    //cout << bufData[3] << "," << bufData[4] << "," << bufData[5] << " buf 2 \n"; 
    //cout << bufData[6] << "," << bufData[7] << "," << bufData[8] << " buf 3 \n\n"; 



    //C1.x = bufData[3] - bufData[0];
    //C1.y = bufData[4] - bufData[1];
    //C1.z = bufData[5] - bufData[2];

    //C2.x = bufData[6] - bufData[0];
    //C2.y = bufData[7] - bufData[1];
    //C2.z = bufData[8] - bufData[2];

    C1.x = bufData[0] - bufData[3];
    C1.y = bufData[1] - bufData[4];
    C1.z = bufData[2] - bufData[5];

    C2.x = bufData[0] - bufData[6];
    C2.y = bufData[1] - bufData[7];
    C2.z = bufData[2] - bufData[8];

    //C2.x = bufData[6] - bufData[0];
    //C2.y = bufData[7] - bufData[1];
    //C2.z = bufData[8] - bufData[2];

    //cout << C1.x << " 1x \n";
    //cout << C1.y << " 1y \n";
    //cout << C1.z << " 1z \n";

    //cout << C2.x << " 2x \n";
    //cout << C2.y << " 2y \n";
    //cout << C2.z << " 2z \n";

    normal = glm::cross(C1, C2);

    //cout << "\nNORMAL : " << normal.x << "," << normal.y << "," << normal.z << " counter = " << counter << "\n";

    for(int j = 0; j < 3; j++)
    {
        for(int i = 0; i < 3; i++)
        {
            normBufData[counter] = normal.x;
            normBufData[counter + 1] = normal.y;
            normBufData[counter + 2] = normal.z;

        }
        counter+=3;
    }






}

and main.....

int main(int argc, char **argv)
{
    glutInit(&argc, argv);
    glutInitDisplayMode(GLUT_SINGLE | GLUT_RGB);
    glutInitWindowSize(700, 700);    // Window Size

    glutCreateWindow("Michael - Lab 3");
    glutDisplayFunc(display);
    glutTimerFunc(10, timeFucn, 10);
    glutIdleFunc(Idle);
    glutKeyboardFunc(keyboard);
    glewExperimental = GL_TRUE;
    glewInit();

    glEnable(GL_CULL_FACE);
    glEnable(GL_DEPTH_TEST); // Enable depth test
    glDepthFunc(GL_LESS); // Accept fragment if it closer to the camera than the former one

    GenerateSphere(); // this function generates points for the sphere

    programID = LoadShader( "VertexShader.glsl", "FragmentShader.glsl" ); // Create and compile our GLSL program from the shaders

    setBuffers(); // initilize buffers

    calculateNormals(CubeBufferData,CubeNormalBufferData,108); // calculate norms
    //printNormals(CubeNormalBufferData);

    glutMainLoop();
}

Solution

  • You forgot to bind the buffer object with normals before calling glVertexAttribPointer( cubeNormal, 3,....);. Therefore, the actual data for normals is taken from the color buffer, which causes weirdest Phong evaluation result.

    BTW, nice coding style :)