问题描述
我正在使用OpenGL ES渲染一些特效,我不想向用户显示这个,我只想将结果保存为UIImage,有人可以帮帮我吗?
I'm using OpenGL ES to render some special effects, I don't want to show this to user, I just want to save the result as an UIImage, can anybody please help me?
这是我正在使用的代码,我可以得到一个包含我使用的红色清晰颜色但没有显示几何图形的图像。
this is the code I'm using, I can get an image which contains the red clear color I use, but no geometry drawing shown.
#import "RendererGL.h"
#import <GLKit/GLKit.h>
#import <UIKit/UIKit.h>
#import <OpenGLES/EAGL.h>
#import <OpenGLES/EAGLDrawable.h>
#import <OpenGLES/ES2/glext.h>
#import <QuartzCore/QuartzCore.h>
static NSInteger WIDTH_IN_PIXEL = 400;
static NSInteger HEIGHT_IN_PIXEL = 300;
typedef struct {
GLKVector3 positionCoords;
}
SceneVertex;
static const SceneVertex vertices[] =
{
{{-0.5f, -0.5f, 0.0}}, // lower left corner
{{ 0.5f, -0.5f, 0.0}}, // lower right corner
{{-0.5f, 0.5f, 0.0}} // upper left corner
};
@implementation RendererGL
{
EAGLContext* _myContext;
GLuint _framebuffer;
GLuint _colorRenderbuffer;
GLuint _depthRenderbuffer;
GLuint _vertexBufferID;
GLKBaseEffect *_baseEffect;
}
- (id) init
{
self = [super init];
if (self)
{
_myContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
[EAGLContext setCurrentContext:_myContext];
[self setupOffscreenBuffer];
[self setUpEffect];
[self renderImage];
[self saveImage]; //this do works, since I get an image, but the image only contains the red color I used to clear
}
return self;
}
-(void)setUpEffect
{
_baseEffect = [[GLKBaseEffect alloc] init];
_baseEffect.useConstantColor = GL_TRUE;
_baseEffect.constantColor = GLKVector4Make(0.0f, 0.0f, 1.0f, 1.0f);
}
//this code is from apples document
-(void)setupOffscreenBuffer
{
glGenFramebuffers(1, &_framebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, _framebuffer);
glGenRenderbuffers(1, &_colorRenderbuffer);
glBindRenderbuffer(GL_RENDERBUFFER, _colorRenderbuffer);
glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA4, WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _colorRenderbuffer);
glGenRenderbuffers(1, &_depthRenderbuffer);
glBindRenderbuffer(GL_RENDERBUFFER, _depthRenderbuffer);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, _depthRenderbuffer);
GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER) ;
if(status != GL_FRAMEBUFFER_COMPLETE) {
NSLog(@"failed to make complete framebuffer object %x", status);
}
}
- (void) renderImage
{
GLenum error = GL_NO_ERROR;
glClearColor(1, 0, 0, 1); //red clear color, this can be seen
glClear(GL_COLOR_BUFFER_BIT);
glEnable(GL_DEPTH_TEST);
[_baseEffect prepareToDraw];
glGenBuffers(1, &_vertexBufferID);
glBindBuffer(GL_ARRAY_BUFFER, _vertexBufferID);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
error = glGetError();
if (error != GL_NO_ERROR) {
NSLog(@"error happend, error is %d, line %d",error,__LINE__);
}
glEnableVertexAttribArray(GLKVertexAttribPosition);
glVertexAttribPointer(GLKVertexAttribPosition,3,GL_FLOAT, GL_FALSE, sizeof(SceneVertex), NULL);
glDrawArrays(GL_TRIANGLES,0,3);
error = glGetError();
if (error != GL_NO_ERROR) {
NSLog(@"error happend, error is %d, line %d",error,__LINE__);
}
glFinish();
error = glGetError();
if (error != GL_NO_ERROR) {
NSLog(@"error happend, error is %d, line %d",error,__LINE__);
}
}
-(void)saveImage
{
GLenum error = GL_NO_ERROR;
NSInteger x = 0, y = 0;
NSInteger dataLength = WIDTH_IN_PIXEL * HEIGHT_IN_PIXEL * 4;
GLubyte *data = (GLubyte*)malloc(dataLength * sizeof(GLubyte));
glPixelStorei(GL_PACK_ALIGNMENT, 4);
glReadPixels(x, y, WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL, GL_RGBA, GL_UNSIGNED_BYTE, data);
NSData *pixelsRead = [NSData dataWithBytes:data length:dataLength];
error = glGetError();
if (error != GL_NO_ERROR) {
NSLog(@"error happend, error is %d, line %d",error,__LINE__);
}
CGDataProviderRef ref = CGDataProviderCreateWithData(NULL, data, dataLength, NULL);
CGColorSpaceRef colorspace = CGColorSpaceCreateDeviceRGB();
CGImageRef iref = CGImageCreate(WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL, 8, 32, WIDTH_IN_PIXEL * 4, colorspace, kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast,
ref, NULL, true, kCGRenderingIntentDefault);
UIGraphicsBeginImageContext(CGSizeMake(WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL));
CGContextRef cgcontext = UIGraphicsGetCurrentContext();
CGContextSetBlendMode(cgcontext, kCGBlendModeCopy);
CGContextDrawImage(cgcontext, CGRectMake(0.0, 0.0, WIDTH_IN_PIXEL, HEIGHT_IN_PIXEL), iref);
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
NSData *d = UIImageJPEGRepresentation(image, 1);
NSString *documentDirPath = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)[0];
static NSInteger imageNO = 1;
imageNO++;
NSString *savingPath = [documentDirPath stringByAppendingPathComponent:[NSString stringWithFormat:@"%d.jpg",imageNO]];
BOOL succ = [d writeToFile:savingPath atomically:NO]; //is succeeded
UIGraphicsEndImageContext();
free(data);
CFRelease(ref);
CFRelease(colorspace);
CGImageRelease(iref);
}
@end
推荐答案
我有一个非常类似的问题 - 渲染一些线并获得UIImage。我使用OpenGL ES 1.1和多重采样。我删除了一些不涉及渲染和一些OpenGL错误检查的附加代码。您可以在此处找到完整代码:。另外,对不起我的一对一方法。
I had a very similar problem - to render some lines and get UIImage. I used OpenGL ES 1.1 and multisampling. I removed some additional code that does not refers to rendering and some OpenGL error checks. You can find the full code here: OSPRendererGL. Also, sorry for my one-for-all method.
@interface OSPRendererGL
{
EAGLContext* myContext;
GLuint framebuffer;
GLuint colorRenderbuffer;
GLuint depthRenderbuffer;
GLuint _vertexArray;
GLuint _vertexBuffer;
GLuint resolveFramebuffer;
GLuint msaaFramebuffer, msaaRenderbuffer, msaaDepthbuffer;
int width;
int height;
}
@implementation OSPRendererGL
- (id) init
{
self = [super init];
if (self)
{
myContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES1];
[EAGLContext setCurrentContext:myContext];
[self setupOpenGL];
[EAGLContext setCurrentContext:nil];
width = 256;
height = 256;
}
return self;
}
-(void) setupOpenGL
{
glGenFramebuffersOES(1, &framebuffer);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, framebuffer);
glGenRenderbuffersOES(1, &colorRenderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, colorRenderbuffer);
glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_RGBA8_OES, width, height);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, colorRenderbuffer);
glGenRenderbuffersOES(1, &depthRenderbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, depthRenderbuffer);
glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_DEPTH_COMPONENT16_OES, width, height);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_DEPTH_ATTACHMENT_OES, GL_RENDERBUFFER_OES, depthRenderbuffer);
glGenFramebuffersOES(1, &msaaFramebuffer);
glGenRenderbuffersOES(1, &msaaRenderbuffer);
glBindFramebufferOES(GL_FRAMEBUFFER_OES, msaaFramebuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, msaaRenderbuffer);
glRenderbufferStorageMultisampleAPPLE(GL_RENDERBUFFER_OES, 4, GL_RGBA8_OES, width, height);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, msaaRenderbuffer);
glGenRenderbuffersOES(1, &msaaDepthbuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, msaaDepthbuffer);
glRenderbufferStorageMultisampleAPPLE(GL_RENDERBUFFER_OES, 4, GL_DEPTH_COMPONENT16_OES, width, height);
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_DEPTH_ATTACHMENT_OES, GL_RENDERBUFFER_OES, msaaDepthbuffer);
}
-(UIImage *) renderImageAtZoom:(int)zoom
{
CGRect b = CGRectMake(0, 0, width, height);
OSPCoordinateRect r = OSPRectForMapAreaInRect([self mapArea], b);
double_scale = b.size.width / r.size.x;
double scale = 1.0/_scale;
[EAGLContext setCurrentContext:myContext];
glBindFramebuffer(GL_FRAMEBUFFER_OES, msaaFramebuffer);
glBindRenderbufferOES(GL_RENDERBUFFER_OES, msaaRenderbuffer);
glViewport(0, 0, width, height);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrthof(0.0f, 256.0f, 256.0f, 0.0f, 1.0f, -1.0f);
glMatrixMode(GL_MODELVIEW);
glPushMatrix();
glScalef(_scale, _scale, 1);
glTranslatef(-r.origin.x, -r.origin.y, 0);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glEnable(GL_LINE_SMOOTH);
glEnable(GL_POINT_SMOOTH);
glEnable(GL_BLEND);
glClearColor(1, 1, 1, 1);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// rendering here
glPopMatrix();
// msaa
glBindFramebufferOES(GL_READ_FRAMEBUFFER_APPLE, msaaFramebuffer);
glBindFramebufferOES(GL_DRAW_FRAMEBUFFER_APPLE, framebuffer);
glResolveMultisampleFramebufferAPPLE();
glBindFramebuffer(GL_FRAMEBUFFER_OES, framebuffer);
glBindRenderbufferOES(GL_RENDERBUFFER, colorRenderbuffer);
// grabbing image from FBO
GLint backingWidth, backingHeight;
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_WIDTH_OES, &backingWidth);
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_HEIGHT_OES, &backingHeight);
NSInteger x = 0, y = 0;
NSInteger dataLength = width * height * 4;
GLubyte *data = (GLubyte*)malloc(dataLength * sizeof(GLubyte));
glPixelStorei(GL_PACK_ALIGNMENT, 4);
glReadPixels(x, y, width, height, GL_RGBA, GL_UNSIGNED_BYTE, data);
CGDataProviderRef ref = CGDataProviderCreateWithData(NULL, data, dataLength, NULL);
CGColorSpaceRef colorspace = CGColorSpaceCreateDeviceRGB();
CGImageRef iref = CGImageCreate(width, height, 8, 32, width * 4, colorspace, kCGBitmapByteOrder32Big | kCGImageAlphaPremultipliedLast,
ref, NULL, true, kCGRenderingIntentDefault);
UIGraphicsBeginImageContext(CGSizeMake(width, height));
CGContextRef cgcontext = UIGraphicsGetCurrentContext();
CGContextSetBlendMode(cgcontext, kCGBlendModeCopy);
CGContextDrawImage(cgcontext, CGRectMake(0.0, 0.0, width, height), iref);
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
free(data);
CFRelease(ref);
CFRelease(colorspace);
CGImageRelease(iref);
[EAGLContext setCurrentContext:nil];
return image;
}
这篇关于iOS OpenGL ES 2.0:屏幕外渲染并将结果保存到UIImage的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持!