My model looks stretched because of corrupted data in vertex shader attribute location
Here's the vertex shader code:
#version 330 core
layout (location = 0) in vec3 vertPos;
layout (location = 1) in vec3 vertNormal;
layout (location = 2) in vec2 texCoord;
layout (location = 3) in vec4 boneWeigths;
layout (location = 4) in ivec4 boneIDs;
out vec3 vNormal;
out vec3 fragPos;
out vec2 fragTexCoord;
const int MAX_BONES = 100;
uniform mat4 MVP;
uniform mat4 M;
uniform mat4 boneTransforms[MAX_BONES];
void main()
{
mat4 boneTx = boneTransforms[boneIDs[0]] * boneWeigths[0]
+ boneTransforms[boneIDs[1]] * boneWeigths[1]
+ boneTransforms[boneIDs[2]] * boneWeigths[2]
+ boneTransforms[boneIDs[3]] * boneWeigths[3];
vec4 pos = boneTx * vec4(vertPos, 1.0f);
gl_Position = MVP * pos;
vec4 normal = boneTx * vec4(vertNormal, 0.0f);
vNormal = normalize(vec3(M * normal));
fragPos = vec3(M * pos);
fragTexCoord = vec2(texCoord.x, texCoord.y);
}
The problem seems to be corrupted data in boneIDs (boneIDs data is fine on CPU, but getting corrupted data in shader). I tried hard-coding boneIDs data in shader, and that works fine.
Here's the code for VAO:
// create buffers/arrays
glGenVertexArrays(1, &VAO);
glGenBuffers(1, &VBO);
glGenBuffers(1, &EBO);
glBindVertexArray(VAO);
// load data into vertex buffers
glBindBuffer(GL_ARRAY_BUFFER, VBO);
unsigned int sz = sizeof(BoneVertex);
glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(BoneVertex), &vertices[0], GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.size() * sizeof(unsigned int), &indices[0], GL_STATIC_DRAW);
// set the vertex attribute pointers
// vertex Positions
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, sizeof(BoneVertex), (void*)0);
// vertex normals
glEnableVertexAttribArray(1);
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, sizeof(BoneVertex), (void*)(3 * sizeof(float)));
// vertex texture coords
glEnableVertexAttribArray(2);
glVertexAttribPointer(2, 2, GL_FLOAT, GL_FALSE, sizeof(BoneVertex), (void*)(6 * sizeof(float)));
// bone weights
glEnableVertexAttribArray(3);
glVertexAttribPointer(3, 4, GL_FLOAT, GL_FALSE, sizeof(BoneVertex), (void*)(8 * sizeof(float)));
// bone ids
glEnableVertexAttribArray(4);
glVertexAttribPointer(4, 4, GL_INT, GL_FALSE, sizeof(BoneVertex), (void*)(12 * sizeof(float)));
glBindVertexArray(0);
BoneVertex structure:
struct BoneVertex
{
glm::vec3 position;
glm::vec3 normal;
glm::vec2 textureCoords;
glm::vec4 boneWeights;
glm::ivec4 boneIDs;
}
This is weird, because the first 3 attributes data seems fine. The problem is with boneIDs and boneWeights Is this somehow related to padding, and how data is arranged in a structure? Or am I missing something else?
Thanks
boneIDs
is a vertex shader input with an integral data type:
layout (location = 4) in ivec4 boneIDs;
If you want to specify the generic vertex attribute data for an integral attribute, then you have to use glVertexAttribIPointer
(focus on I
) rather than glVertexAttribPointer
(see glVertexAttribPointer
).
Note, the type argument doesn't specify the type of the target attribute, it specifies the element type of the source data array. glVertexAttribPointer
converts the source data array to floating point values, but glVertexAttribIPointer
specifies the array for integral target attributes.
glVertexAttribPointer(4, 4, GL_INT, GL_FALSE, sizeof(BoneVertex), (void*)(12 * sizeof(float)));
glVertexAttribIPointer(4, 4, GL_INT, sizeof(BoneVertex), (void*)(12 * sizeof(float)));