I'm running this on Visual Studio 2022, with the Win32 (x86) platform. The same bug occurs on x64, though, I've tested it.
I have found no errors in my code, but still I've tried adding compile error checks and printing errors to the console using std::cout
yet nothing pops up on it (the console does work with cout
).
When I remove EVERYTHING from // compile shaders
to // create buffers n stuff
and remove EVERYTHING relating to the VBOs/VAOs, it works perfectly fine! But this is clearly a problem, because I'm pretty sure I can't render a triangle without them.
Why this is happening and how can it be solved?
main.cpp:
#include <windows.h>
#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <iostream>
using std::cout;
const char* vshSrc =
"#version 330 core\n"
"layout (location = 0) in vec3 aPos;\n"
"void main()\n"
"{\n"
" gl_Position = vec4(aPos.x, aPos.y, aPos.z, 1.0);\n"
"}\0";
const char* fshSrc =
"#version 330 core\n"
"out vec4 FragColor;\n"
"void main()\n"
"{\n"
" FragColor = vec4(1.0f, 0.8f, 0.4f, 1.0f);\n"
"}\0";
static void debugMode()
{
AllocConsole();
FILE* file;
freopen_s(&file, "CONOUT$", "w", stdout);
freopen_s(&file, "CONOUT$", "w", stderr);
}
static void processInput(GLFWwindow* window)
{
if (glfwGetKey(window, GLFW_KEY_ESCAPE) == GLFW_PRESS)
glfwSetWindowShouldClose(window, GLFW_TRUE);
}
static void framebufferSize(GLFWwindow* window, int width, int height)
{
glViewport(0, 0, width, height);
}
_Use_decl_annotations_ int APIENTRY WinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance, PSTR lpCmdLine, int nShowCmd)
{
if (!glfwInit())
return -1;
// window size and position
const int vWidth = 1280;
const int vHeight = 720;
const int vX = (GetSystemMetrics(SM_CXSCREEN) / 2) - (vWidth / 2);
const int vY = (GetSystemMetrics(SM_CYSCREEN) / 2) - (vHeight / 2);
glfwWindowHint(GLFW_POSITION_X, vX);
glfwWindowHint(GLFW_POSITION_Y, vY);
// create window
GLFWwindow* window = glfwCreateWindow(vWidth, vHeight, "Game", nullptr, nullptr);
if (!window)
{
glfwTerminate();
return -1;
}
// buncha settings
glfwMakeContextCurrent(window);
glfwSetFramebufferSizeCallback(window, framebufferSize);
glfwSwapInterval(1);
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_LESS);
// comment below to remove console obv
debugMode();
// compile shaders
unsigned int vsh = glCreateShader(GL_VERTEX_SHADER);
glShaderSource(vsh, 1, &vshSrc, NULL);
glCompileShader(vsh);
unsigned int fsh = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(fsh, 1, &fshSrc, NULL);
glCompileShader(fsh);
// create shader program
unsigned int sProgram = glCreateProgram();
glAttachShader(sProgram, vsh);
glAttachShader(sProgram, fsh);
glLinkProgram(sProgram);
glDeleteShader(vsh);
glDeleteShader(fsh);
// get verts
float verts[] =
{
-0.5f, -0.5f, 0.0f,
0.5f, -0.5f, 0.0f,
0.0f, 0.5f, 0.0f
};
// create buffers n stuff
unsigned int VBO, VAO;
glGenVertexArrays(1, &VAO);
glGenBuffers(1, &VBO);
glBindVertexArray(VAO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(verts), verts, GL_STATIC_DRAW);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), (void*)0);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindVertexArray(0);
// the loop ig
while (!glfwWindowShouldClose(window))
{
processInput(window);
// render
glClearColor(0.1f, 0.35f, 0.35f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(sProgram);
glBindVertexArray(VAO);
glDrawArrays(GL_TRIANGLES, 0, 3);
// swap buffers and poll events
glfwSwapBuffers(window);
glfwPollEvents();
}
// kill everyone and leave without elaborating
glDeleteVertexArrays(1, &VAO);
glDeleteBuffers(1, &VBO);
glDeleteProgram(sProgram);
glfwTerminate();
FreeConsole();
return 0;
}
The problem is that GLEW is never initialized. Therefore, the function pointers to OpenGL >1.1 functions are not set. In this case, the program tries to call glCreateShader
, which is an uninitialized function pointer, and crashes.
The solution is to call glewInit()
after glfwMakeContextCurrent(window);
. Preferably with error checking, as in this example from the GLEW documentation:
GLenum err = glewInit();
if (GLEW_OK != err)
{
/* Problem: glewInit failed, something is seriously wrong. */
fprintf(stderr, "Error: %s\n", glewGetErrorString(err));
...
}
Some GPU drivers consider some OpenGL functions to be experimental and do not expose them by default. If this happens to be problem, add glewExperimental = GL_TRUE;
before calling glewInit()
.
The program should now render the background color, but not the triangle. This is due to depth testing being enabled, while the depth buffer is never cleared. To clear the depth buffer as well as the color buffer, use this call:
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
Or in this case, you could also disable depth testing altogether. Now the triangle renders as well: