I'm trying to draw a terrain with the heightmap I have. By using std::vector, I now have an array of positions and an array of indices of the positions in order to use glDrawElements(GL_TRIANGLE_STRIP,...)
when I draw the terrain. However, for some reason, my simple code is not working. Please note that I am using glm for the math library.
Here are the configurations for the VAO, VBO, and the EBO. I double checked the vertex data and the index data and they are all correct.
// positions and indices
vector<glm::vec3> positions;
vector<int> indices;
for (int z = 0; z < gridZNum; z++) {
for (int x = 0; x < gridXNum; x++) {
positions.push_back(glm::vec3(x, _terrain->getHeight(x,z),z));
//normals.push_back(_terrain->getNormal(x, z));
indices.push_back(z*gridXNum + (x+1));
indices.push_back(z*gridXNum + (x+1) + gridXNum);
if (x == gridXNum-1 && z != gridZNum-1) {
indices.push_back(z*gridXNum + 2*gridXNum);
indices.push_back((z+1)*gridXNum+1);
}
}
}
// VAO, VBO configuration
unsigned int VAO, VBO, EBO;
glGenVertexArrays(1, &VAO);
glGenBuffers(1, &VBO);
glGenBuffers(1, &EBO);
glBindBuffer(GL_ARRAY_BUFFER, VAO);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
glBufferData(GL_ARRAY_BUFFER, positions.size() * sizeof(glm::vec3), &positions[0] , GL_STATIC_DRAW);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.size() * sizeof(int), &indices[0], GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3* sizeof(float), 0);
Finally, this is my draw function
//Inside the render loop I call
drawScene(shader, VAO, indices.size());
//......
void drawScene(Shader &shader, unsigned int VAO, unsigned int numIndices) //draw the scene with the parameter data.
{
glm::mat4 model;
//model = glm::translate(model, glm::vec3(-(_terrain->width())/2, 0.0f, -(_terrain->length())/2));
glm::mat4 view = camera.GetViewMatrix();
glm::mat4 projection = glm::perspective(glm::radians(camera.Zoom), (float)SCR_WIDTH / (float)SCR_HEIGHT, 0.1f, 100.0f);
shader.use();
shader.setMat4("model", model);
shader.setMat4("view", view);
shader.setMat4("projection", projection);
glBindVertexArray(VAO);
glDrawElements(GL_TRIANGLE_STRIP, numIndices, GL_UNSIGNED_INT, 0);
glBindVertexArray(0);
}
I'm assuming something is wrong with the parameters like the size parameter, but I can't find anything wrong at the moment.
You have to bind the vertex array object, before you define an array of generic vertex attribute data. See Vertex Array Object:
glBindVertexArray(VAO);
The name of the vertex buffer object is VBO
and not VAO
:
glBindBuffer(GL_ARRAY_BUFFER, VBO);
Change your code somehow like that:
unsigned int VAO, VBO, EBO;
glGenVertexArrays(1, &VAO);
glGenBuffers(1, &VBO);
glGenBuffers(1, &EBO);
glBindVertexArray(VAO);
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, positions.size() * sizeof(glm::vec3), &positions[0] , GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.size() * sizeof(int), &indices[0], GL_STATIC_DRAW);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3* sizeof(float), 0);