I am making a game with OpenGL and I want to make the world out of little tiles. Each tile is an std::map
in an array, and it stores the color of the tile. Every frame the game would loop through the array and draw all the tiles.
However when trying to draw the tiles it creates a weird effect:
This is the code:
/*
* GL01Hello.cpp: Test OpenGL C/C++ Setup
*/
#include <iostream>
#include <windows.h> // For MS Windows
#include <GL/glew.h>
#include <GL/glut.h> // GLUT, includes glu.h and gl.h
// has to be included before gl.h, or any header that includes gl.h
#include <GL/freeglut.h>'
#include "pixel.cpp"
/* Handler for window-repaint event. Call back when the window first appears and
whenever the window needs to be re-painted. */
float X = 0;
//creates the game loop
void Timer(int) {
//std::cout << "No need to store this string";
X = X + 0.001;
glutPostRedisplay();
glutTimerFunc(15,Timer,0);
}
//draws the graphics
void display(void) {
glClearColor(0.25,0.25,1,0);
glClear(GL_COLOR_BUFFER_BIT);
DrawPixels();
glutSwapBuffers();
}
/* Main function: GLUT runs as a console application starting at main() */
int main(int argc, char** argv) {
createLevel();
glutInit(&argc, argv);
int winW = glutGet(GLUT_WINDOW_WIDTH);
int winH = glutGet(GLUT_WINDOW_HEIGHT);
glutInitDisplayMode(GLUT_RGB | GLUT_DOUBLE);
glutInitWindowSize(winW, winH);
glutInitWindowPosition(100, 100);
glutCreateWindow("OpenGL Setup Test");
glutFullScreen();
glutDisplayFunc(display);
glutTimerFunc(15,Timer,0);
glutMainLoop();
return 0;
}
#include <map>
//gets the data needed
GLfloat PixelWidth = 0.005;
GLfloat PixelHeight = 0.005;
GLint columbAmount = 2/PixelWidth;
GLint rowAmount = 2/PixelHeight;
//array of tiles
std::map<std::string,GLfloat> level[400][400];
//generates array of little sqaures
void createLevel() {
//color of top row
GLfloat colorIntensity = 1;
//how much the color changes per row
GLfloat DeltaColorIntensity = 0.0025;
//2 for loops to add the tiles to the array
for(int i = 0;i < columbAmount/2;i++) {
for(int j = 0;j < rowAmount;j++) {
level[i+200][j]["color"] = colorIntensity;
}
//subtracts from the color variable after row is complete to create a gradiant
colorIntensity = colorIntensity - DeltaColorIntensity;
}
}
//draws them onscreen
void DrawPixels(void) {
//loops through the array
for(int i = 0;i < columbAmount/2;i++) {
for(int j = 0;j < rowAmount;j++){
//calculates the tiles x and y position
GLfloat CalculateX = j*PixelWidth;
GLfloat CalculateY = i*PixelHeight;
//sets the tile to the correct color
glColor3f(level[i + 200][j]["color"],level[i+200][j]["color"],0);
//draws the squares
glBegin(GL_POLYGON);
glVertex3f(-1 + CalculateX, 0 - CalculateY , 0.0);
glVertex3f(-0.995 + CalculateX, 0.0, 0.0);
glVertex3f(-0.995 + CalculateX, 0.005 - CalculateY, 0.0);
glVertex3f(0.0, 0.005 - CalculateY, 0.0);
glEnd();
}
}
}
I want the tiles to have a clean gradient, where each row of tiles is darker then the previous one, but it is creating this weird look
For every tile you draw, two of the eight coordinates are constant:
glVertex3f(-1 + CalculateX, 0 - CalculateY , 0.0);
glVertex3f(-0.995 + CalculateX, 0.0, 0.0); // <---- Y is constant
glVertex3f(-0.995 + CalculateX, 0.005 - CalculateY, 0.0);
glVertex3f(0.0, 0.005 - CalculateY, 0.0); // <---- X is constant
This results in some warped polygons being drawn rather than rectangular tiles. You should fix it by correctly calculating the coordinates. I'd usually do it as follows:
// calculate the tile top-left and bottom-right corners:
GLfloat x0 = -1 + j*PixelWidth, y0 = -i*PixelHeight;
GLfloat x1 = -1 + (j+1)*PixelWidth, y1 = -(i+1)*PixelHeight;
Then you can use these neatly in the glVertex
calls (here in CCW order):
glVertex2f(x0, y0);
glVertex2f(x0, y1);
glVertex2f(x1, y1);
glVertex2f(x1, y0);