I'm working on a mac os
app using Syphon-SDK
.
Syphon
is a service that "publishes" render data making it available to video handling tools to use it as an input source.
The sample project (available here: http://syphon.v002.info/) gets the rendering data from a Quartz Composer
file and publishes it.
On my project I have to render a static NSImage
, and for that I have to use OpenGL
, and then publish the resulting texture.
here's the thing:
- (void)textureFromImage:(NSImage*)theImg textureName:(GLuint*)texName
{
NSBitmapImageRep* bitmap = [NSBitmapImageRep alloc];
int samplesPerPixel = 0;
NSSize imgSize = [theImg size];
[theImg lockFocus];
[bitmap initWithFocusedViewRect:
NSMakeRect(0.0, 0.0, imgSize.width, imgSize.height)];
[theImg unlockFocus];
// Set proper unpacking row length for bitmap.
glPixelStorei(GL_UNPACK_ROW_LENGTH, [bitmap pixelsWide]);
// Set byte aligned unpacking (needed for 3 byte per pixel bitmaps).
glPixelStorei (GL_UNPACK_ALIGNMENT, 1);
// Generate a new texture name if one was not provided.
if (*texName == 0)
glGenTextures (1, texName);
glBindTexture (GL_TEXTURE_RECTANGLE_EXT, *texName);
// Non-mipmap filtering (redundant for texture_rectangle).
glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
samplesPerPixel = [bitmap samplesPerPixel];
// Nonplanar, RGB 24 bit bitmap, or RGBA 32 bit bitmap.
if(![bitmap isPlanar] &&
(samplesPerPixel == 3 || samplesPerPixel == 4))
{
glTexImage2D(GL_TEXTURE_RECTANGLE_EXT, 0,
samplesPerPixel == 4 ? GL_RGBA8 : GL_RGB8,
[bitmap pixelsWide],
[bitmap pixelsHigh],
0,
samplesPerPixel == 4 ? GL_RGBA : GL_RGB,
GL_UNSIGNED_BYTE,
[bitmap bitmapData]);
}
else
{
// Handle other bitmap formats.
}
// Clean up.
[bitmap release];
}
hope it helps someone else