I have a game project coded with C++ and Lua (I know in the subject is written 'Python', don't worry about it), and, in there, I apply a fragment (not vertex!) shader into the game screen. Since part of my project was not made by me (because its Open-Source), the system to apply fragment shaders using OpenGL was made already by others, but not the 2xSal fragment shader itself. I consider myself as a beginner in this subject. The only piece of code I "made", was the fragment shader (I don't know if it's necessary, but I based my code on the code of the following link: https://github.com/libretro/glsl-shaders/blob/master/xsal/shaders/2xsal-level2-pass2.glsl).
This is my changes for make the 2xSal fragment shader to work (on my game):
#define COMPAT_VARYING varying
#define FragColor gl_FragColor
#define COMPAT_TEXTURE texture2D
#ifdef GL_ES
#ifdef GL_FRAGMENT_PRECISION_HIGH
precision highp float;
#else
precision mediump float;
#endif
#define COMPAT_PRECISION mediump
#else
#define COMPAT_PRECISION
#endif
// uniform COMPAT_PRECISION int FrameDirection; // Not in use
// uniform COMPAT_PRECISION int FrameCount; // Not in use
// uniform COMPAT_PRECISION vec2 OutputSize; // Not in use
// uniform COMPAT_PRECISION vec2 TextureSize;
// uniform COMPAT_PRECISION vec2 InputSize;
uniform sampler2D Texture;
//COMPAT_VARYING vec4 TEX0;
// in variables go here as COMPAT_VARYING whatever
// fragment compatibility #defines
#define Source Texture
//#define vTexCoord TEX0.xy
COMPAT_VARYING vec2 v_TexCoord;
#define InputSize vec2(800.0, 608.0) // Width and height in pixels of game screen
#define SourceSize vec4(InputSize, 1.0 / InputSize) //either TextureSize or InputSize
// #define outsize vec4(OutputSize, 1.0 / OutputSize) // Not in use
void main()
{
vec2 tex = v_TexCoord;
//vec2 texsize = IN.texture_size;
float dx = 0.25*SourceSize.z;
float dy = 0.25*SourceSize.w;
vec3 dt = vec3(1.0, 1.0, 1.0);
vec4 yx = vec4(dx, dy, -dx, -dy);
vec4 xh = yx*vec4(3.0, 1.0, 3.0, 1.0);
vec4 yv = yx*vec4(1.0, 3.0, 1.0, 3.0);
vec3 c11 = COMPAT_TEXTURE(Source, tex ).xyz;
vec3 s00 = COMPAT_TEXTURE(Source, tex + yx.zw).xyz;
vec3 s20 = COMPAT_TEXTURE(Source, tex + yx.xw).xyz;
vec3 s22 = COMPAT_TEXTURE(Source, tex + yx.xy).xyz;
vec3 s02 = COMPAT_TEXTURE(Source, tex + yx.zy).xyz;
vec3 h00 = COMPAT_TEXTURE(Source, tex + xh.zw).xyz;
vec3 h20 = COMPAT_TEXTURE(Source, tex + xh.xw).xyz;
vec3 h22 = COMPAT_TEXTURE(Source, tex + xh.xy).xyz;
vec3 h02 = COMPAT_TEXTURE(Source, tex + xh.zy).xyz;
vec3 v00 = COMPAT_TEXTURE(Source, tex + yv.zw).xyz;
vec3 v20 = COMPAT_TEXTURE(Source, tex + yv.xw).xyz;
vec3 v22 = COMPAT_TEXTURE(Source, tex + yv.xy).xyz;
vec3 v02 = COMPAT_TEXTURE(Source, tex + yv.zy).xyz;
float m1 = 1.0/(dot(abs(s00 - s22), dt) + 0.00001);
float m2 = 1.0/(dot(abs(s02 - s20), dt) + 0.00001);
float h1 = 1.0/(dot(abs(s00 - h22), dt) + 0.00001);
float h2 = 1.0/(dot(abs(s02 - h20), dt) + 0.00001);
float h3 = 1.0/(dot(abs(h00 - s22), dt) + 0.00001);
float h4 = 1.0/(dot(abs(h02 - s20), dt) + 0.00001);
float v1 = 1.0/(dot(abs(s00 - v22), dt) + 0.00001);
float v2 = 1.0/(dot(abs(s02 - v20), dt) + 0.00001);
float v3 = 1.0/(dot(abs(v00 - s22), dt) + 0.00001);
float v4 = 1.0/(dot(abs(v02 - s20), dt) + 0.00001);
vec3 t1 = 0.5*(m1*(s00 + s22) + m2*(s02 + s20))/(m1 + m2);
vec3 t2 = 0.5*(h1*(s00 + h22) + h2*(s02 + h20) + h3*(h00 + s22) + h4*(h02 + s20))/(h1 + h2 + h3 + h4);
vec3 t3 = 0.5*(v1*(s00 + v22) + v2*(s02 + v20) + v3*(v00 + s22) + v4*(v02 + s20))/(v1 + v2 + v3 + v4);
float k1 = 1.0/(dot(abs(t1 - c11), dt) + 0.00001);
float k2 = 1.0/(dot(abs(t2 - c11), dt) + 0.00001);
float k3 = 1.0/(dot(abs(t3 - c11), dt) + 0.00001);
FragColor = vec4((k1*t1 + k2*t2 + k3*t3)/(k1 + k2 + k3), 1.0);
}
This said, what I need now has nothing to do with my game, but with the code itself that I showed you. I need to apply this fragment shader to an input image for a given path and save the output to another (and new) image file, using Python, but I DON'T WANT TO DISPLAY THEM inside a window (like using GLFW or GLUT).
The idea is too simple, but I can't figure out what and how to do it. I just need to save the output result of what the fragment changed in the input image.
I don't need a vertex shader. I don't need a window to display the result, I don't need a window at all. I just want to save the result to a image file.
I found a code in Python in which displays an image input into a GLFW window. I thought that, inch by inch, I would get to where I want, but I can't find how to:
Also, I noticed that the Python code has an issue in which stretches or shrinks the image, because the vertices makes a square and draws the input image texture within it. So with the actual code, I think it would work only for images that are squares.
# # Requirements # #
# Execute these commands on terminal:
# pip install glfw
# pip install pyopengl
# pip install pyrr
# pip install pillow
import glfw
from OpenGL.GL import *
import OpenGL.GL.shaders
import numpy
from PIL import Image
def main():
# initialize glfw
if not glfw.init():
return
window = glfw.create_window(800, 600, "My OpenGL window", None, None)
if not window:
glfw.terminate()
return
glfw.make_context_current(window)
# positions colors texture coords
quad = [ -0.5, -0.5, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0,
0.5, -0.5, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0,
0.5, 0.5, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0,
-0.5, 0.5, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0]
quad = numpy.array(quad, dtype = numpy.float32)
indices = [0, 1, 2,
2, 3, 0]
indices = numpy.array(indices, dtype= numpy.uint32)
print(quad.itemsize * len(quad))
print(indices.itemsize * len(indices))
print(quad.itemsize * 8)
vertex_shader = """
#version 330
in layout(location = 0) vec3 position;
in layout(location = 1) vec3 color;
in layout(location = 2) vec2 inTexCoords;
out vec3 newColor;
out vec2 outTexCoords;
void main()
{
gl_Position = vec4(position, 1.0f);
newColor = color;
outTexCoords = inTexCoords;
}
"""
fragment_shader = """
#version 330
in vec3 newColor;
in vec2 outTexCoords;
out vec4 outColor;
uniform sampler2D samplerTex;
void main()
{
outColor = texture(samplerTex, outTexCoords) * vec4(newColor, 1.0f);
}
"""
shader = OpenGL.GL.shaders.compileProgram(OpenGL.GL.shaders.compileShader(vertex_shader, GL_VERTEX_SHADER),
OpenGL.GL.shaders.compileShader(fragment_shader, GL_FRAGMENT_SHADER))
VBO = glGenBuffers(1)
glBindBuffer(GL_ARRAY_BUFFER, VBO)
glBufferData(GL_ARRAY_BUFFER, quad.itemsize * len(quad), quad, GL_STATIC_DRAW)
EBO = glGenBuffers(1)
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, EBO)
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.itemsize * len(indices), indices, GL_STATIC_DRAW)
# position = glGetAttribLocation(shader, "position")
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, quad.itemsize * 8, ctypes.c_void_p(0))
glEnableVertexAttribArray(0)
# color = glGetAttribLocation(shader, "color")
glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, quad.itemsize * 8, ctypes.c_void_p(12))
glEnableVertexAttribArray(1)
# texture_coords = glGetAttribLocation(shader, "inTexCoords")
glVertexAttribPointer(2, 2, GL_FLOAT, GL_FALSE, quad.itemsize * 8, ctypes.c_void_p(24))
glEnableVertexAttribArray(2)
texture = glGenTextures(1)
glBindTexture(GL_TEXTURE_2D, texture)
# texture wrapping params
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT)
# texture filtering params
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR)
image = Image.open("res/test.jpg")
# img_data = numpy.array(list(image.getdata()), numpy.uint8)
flipped_image = image.transpose(Image.FLIP_TOP_BOTTOM)
img_data = flipped_image.convert("RGBA").tobytes()
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, image.width, image.height, 0, GL_RGBA, GL_UNSIGNED_BYTE, img_data)
# print(image.width, image.height)
glUseProgram(shader)
glClearColor(0.2, 0.3, 0.2, 1.0)
while not glfw.window_should_close(window):
glfw.poll_events()
glClear(GL_COLOR_BUFFER_BIT)
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, None)
glfw.swap_buffers(window)
glfw.terminate()
if __name__ == "__main__":
main()
You have to render to a Framebuffer Object.
Create a frame buffer with a render target, that has the same size as the image:
# create render buffer with size (image.width x image.height)
rb_obj = glGenRenderbuffers(1)
glBindRenderbuffer(GL_RENDERBUFFER, rb_obj )
glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA, image.width, image.height)
# create framebuffer
fb_obj = glGenFramebuffers(1)
glBindFramebuffer(GL_FRAMEBUFFER, fb_obj)
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, rb_obj )
# check framebuffer (that simple buffer should not be an issue)
status = glCheckFramebufferStatus(GL_FRAMEBUFFER)
if status != GL_FRAMEBUFFER_COMPLETE:
print("incomplete framebuffer object")
[...] I don't need a vertex shader. [...]
You'll need a vertex shader anyway, because you have to draw a quad on the entire viewport. The shader program needs a vertex shader and a fragment shader. Since the geometry has to cover the entire viewport, the vertex coordinates have to be in range [-1, 1]. Further more you've to change the texture coordinates, else the window would be flipped:
# positions colors texture coords
quad = [ -1.0, -1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0,
1.0, -1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0,
1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0,
-1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0]
[...] I don't need a window [...]
You need the window to create the OpenGL context. See:
Minimal Windowless OpenGL Context Initialization
Windowless OpenGL
How to render offscreen on OpenGL?
How to use GLUT/OpenGL to render to a file?
Bind the framebuffer, set the viewport rectangle to the size of the framebuffer, install the shader program, bind the texture and render the quad:
# bind texture
glBindTexture(GL_TEXTURE_2D, texture)
# install program
glUseProgram(shader)
# bind framebuffer and set viewport size
glBindFramebuffer(GL_FRAMEBUFFER, fb_obj)
glViewport(0, 0, image.width, image.height)
# draw the quad which covers the entire viewport
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, None)
Finally use glReadPixels
to read the data from the framebuffer:
# read the data and create the image
image_buffer = glReadPixels(0, 0, image.width, image.height, GL_RGBA, GL_UNSIGNED_BYTE)
imageout = numpy.frombuffer(image_buffer, dtype=numpy.uint8)
imageout = imageout.reshape(image.height, image.width, 4)
img = Image.fromarray(imageout, 'RGBA')
img.save(r"image_out.png")
# bind default framebuffer (0) and set viewport rectangle to window size
glBindFramebuffer(GL_FRAMEBUFFER, 0)
glViewport(0, 0, 800, 600)
The code above generated a png file. If you want to generate a jpg, then it change to:
image_buffer = glReadPixels(0, 0, image.width, image.height, GL_RGB, GL_UNSIGNED_BYTE)
imageout = numpy.frombuffer(image_buffer, dtype=numpy.uint8)
imageout = imageout.reshape(image.height, image.width, 3)
img = Image.fromarray(imageout, 'RGB')
img.save(r"image_out.jpg")
You do not need a render loop at all, but after that it would be possible to render to the window.