Become a MacRumors Supporter for $50/year with no ads, ability to filter front page stories, and private forums.

niralipandey

macrumors newbie
Original poster
May 15, 2015
5
0
I am working on IOS app with using opengl-es. I have an issue regarding ViewRenderBuffer, I want to save & preserve current state of ViewRenderBuffer. I currently use opengl on UIImage, on every user touch-move image get twirl. I use twirl shader for achieve twirl effect.
Now what I want is, I want to save user current RenderBuffer state means where-ever user touch should be saved in buffer and on next touch-move previously touch should remains, should be worked on same buffer.
Currently all working but when next touch-move previously effect will got reset.
Please guide me on this issue.
You can check complete running project Twirl-on-touch - Github
Here is my code for opengl....

Code:
//
//  PaintingView.m
//  TouchTest4
//


#import <QuartzCore/QuartzCore.h>
#import <OpenGLES/EAGLDrawable.h>
#import <GLKit/GLKit.h>

#import "PaintingView.h"

enum {
    ATTRIB_VERTEX,
    NUM_ATTRIBS
};

@interface PaintingView()
{
    // The pixel dimensions of the backbuffer
    GLint backingWidth;
    GLint backingHeight;
    
    EAGLContext *context;
    
    // OpenGL names for the renderbuffer and framebuffers used to render to this view
    GLuint viewRenderbuffer, viewFramebuffer, texture;
    
    // OpenGL name for the depth buffer that is attached to viewFramebuffer, if it exists (0 if it does not exist)
    GLuint depthRenderbuffer;
    
    
    Boolean needsErase;
    
    // Shader objects
//    GLuint vertexShader;
//    GLuint fragmentShader;
//    GLuint shaderProgram;
    
    // Buffer Objects
    GLuint vboId;
    
    BOOL initialized;
    
    GLint inputImageTexture2Uniform, filterPositionAttribute, filterTextureCoordinateAttribute;
    
    NSString *vertexShader, *fragmentShader;
    
    Boolean	firstTouch;
    
    CGPoint twirlCenter;
}

// Program Handle
@property (assign, nonatomic, readonly) GLuint program;

// Attribute Handles
@property (assign, nonatomic, readonly) GLuint aPosition;

@property(nonatomic, readwrite) CGPoint location;
@property(nonatomic, readwrite) CGPoint previousLocation;


@end

@implementation PaintingView

// Implement this to override the default layer class (which is [CALayer class]).
// We do this so that our view will be backed by a layer that is capable of OpenGL ES rendering.
+ (Class)layerClass
{
    return [CAEAGLLayer class];
}

// The GL view is stored in the nib file. When it's unarchived it's sent -initWithCoder:
- (id)initWithCoder:(NSCoder*)coder {
    if ((self = [super initWithCoder:coder])) {
        CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer;
        
        eaglLayer.opaque = NO;
        // In this application, we want to retain the EAGLDrawable contents after a call to presentRenderbuffer.
        eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:
                                        [NSNumber numberWithBool:YES], kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat, nil];
        
        context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
        
        //        context = [[UIImageView alloc] initWithFrame:CGRectMake(0, 0, 100, 100)];
        
        if (!context || ![EAGLContext setCurrentContext:context]) {
            return nil;
        }
        
        // Set the view's scale factor as you wish
        self.contentScaleFactor = [[UIScreen mainScreen] scale];
        
        // Make sure to start with a cleared buffer
        needsErase = YES;
    }
    return self;
}

// If our view is resized, we'll be asked to layout subviews.
// This is the perfect opportunity to also update the framebuffer so that it is
// the same size as our display area.
-(void)layoutSubviews
{
    [EAGLContext setCurrentContext:context];
    
    if (!initialized) {
        initialized = [self initGL];
    }
    else {
        [self resizeFromLayer:(CAEAGLLayer*)self.layer];
    }
}

- (BOOL)initGL
{
    // Generate IDs for a framebuffer object and a color renderbuffer
    glGenFramebuffers(1, &viewFramebuffer);
    glGenRenderbuffers(1, &viewRenderbuffer);
    
    glBindFramebuffer(GL_FRAMEBUFFER, viewFramebuffer);
    glBindRenderbuffer(GL_RENDERBUFFER, viewRenderbuffer);
    // This call associates the storage for the current render buffer with the EAGLDrawable (our CAEAGLLayer)
    // allowing us to draw into a buffer that will later be rendered to screen wherever the layer is (which corresponds with our view).
    [context renderbufferStorage:GL_RENDERBUFFER fromDrawable:(id<EAGLDrawable>)self.layer];
    glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, viewRenderbuffer);
    
    glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &backingWidth);
    glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &backingHeight);
    
    // For this sample, we do not need a depth buffer. If you do, this is how you can create one and attach it to the framebuffer:
    //    glGenRenderbuffers(1, &depthRenderbuffer);
    //    glBindRenderbuffer(GL_RENDERBUFFER, depthRenderbuffer);
    //    glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, backingWidth, backingHeight);
    //    glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, depthRenderbuffer);
    
    if(glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
    {
        NSLog(@"failed to make complete framebuffer object %x", glCheckFramebufferStatus(GL_FRAMEBUFFER));
        return NO;
    }
    
    // Setup the view port in Pixels
    glViewport(0, 0, backingWidth, backingHeight);
    
    // Create a Vertex Buffer Object to hold our data
    glGenBuffers(1, &vboId);
    
    // Load the brush texture
    //    brushTexture = [self textureFromName:@"transprnt.png"];
    
    // Load shaders
    [self setupShaders];
    
    inputImageTexture2Uniform = [self uniformIndex:@"inputImageTexture"];
    UIImage *uyet = [UIImage imageNamed:@"kerala.jpg"];
    [self setImage:uyet];
    
    
    
    
    // Enable blending and set a blending function appropriate for premultiplied alpha pixel data
    glEnable(GL_BLEND);
    glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA);
    
    
    return YES;
}

- (void)setupShaders
{
    vertexShader = @"RWTBase";
    fragmentShader = @"TwirlShader";
    // Program
    _program = [self programWithVertexShader:vertexShader fragmentShader:fragmentShader];
    
    // Attributes
    filterPositionAttribute = glGetAttribLocation(_program, "aPosition");
    filterTextureCoordinateAttribute = glGetAttribLocation(_program, "inputTextureCoordinate");
    
    glEnableVertexAttribArray(filterPositionAttribute);
    glEnableVertexAttribArray(filterTextureCoordinateAttribute);
    
    glBindAttribLocation(_program,
                         filterPositionAttribute,
                         [@"aPosition" UTF8String]);
    glBindAttribLocation(_program,
                         filterTextureCoordinateAttribute,
                         [@"inputTextureCoordinate" UTF8String]);
    
    
    
    // Program
    glUseProgram(_program);
    
    static const GLfloat textureCoordinates[] = {
        0.0f, 0.0f,
        1.0f, 0.0f,
        0.0f, 1.0f,
        1.0f, 1.0f,
    };
    static const GLfloat vertices[] = {
        -1.0f, -1.0f,
        1.0f, -1.0f,
        -1.0f,  1.0f,
        1.0f,  1.0f,
    };
    
    glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
    glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
    
}

- (BOOL)resizeFromLayer:(CAEAGLLayer *)layer
{
    // Allocate color buffer backing based on the current layer size
    glBindRenderbuffer(GL_RENDERBUFFER, viewRenderbuffer);
    [context renderbufferStorage:GL_RENDERBUFFER fromDrawable:layer];
    glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &backingWidth);
    glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &backingHeight);
    
    // For this sample, we do not need a depth buffer. If you do, this is how you can allocate depth buffer backing:
    //    glBindRenderbuffer(GL_RENDERBUFFER, depthRenderbuffer);
    //    glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, backingWidth, backingHeight);
    //    glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, depthRenderbuffer);
    
    if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE)
    {
        NSLog(@"Failed to make complete framebuffer objectz %x", glCheckFramebufferStatus(GL_FRAMEBUFFER));
        return NO;
    }
    
    
    // Update viewport
    glViewport(0, 0, backingWidth, backingHeight);
    
    return YES;
}

// Releases resources when they are not longer needed.
- (void)dealloc
{
    // Destroy framebuffers and renderbuffers
    if (viewFramebuffer) {
        glDeleteFramebuffers(1, &viewFramebuffer);
        viewFramebuffer = 0;
    }
    if (viewRenderbuffer) {
        glDeleteRenderbuffers(1, &viewRenderbuffer);
        viewRenderbuffer = 0;
    }
    if (depthRenderbuffer)
    {
        glDeleteRenderbuffers(1, &depthRenderbuffer);
        depthRenderbuffer = 0;
    }
    // vbo
    if (vboId) {
        glDeleteBuffers(1, &vboId);
        vboId = 0;
    }
    
    // tear down context
    if ([EAGLContext currentContext] == context)
        [EAGLContext setCurrentContext:nil];
}

// Custom Methods....
/*
 ***********
 *         *
 *   ATG   *
 *         *
 ***********
 */
#pragma mark - Compile & Link
- (GLuint)programWithVertexShader:(NSString*)vsh fragmentShader:(NSString*)fsh {
    // Build shaders
    GLuint vertexShader1 = [self shaderWithName:vsh type:GL_VERTEX_SHADER];
    GLuint fragmentShader1 = [self shaderWithName:fsh type:GL_FRAGMENT_SHADER];
    
    // Create program
    GLuint programHandle = glCreateProgram();
    
    // Attach shaders
    glAttachShader(programHandle, vertexShader1);
    glAttachShader(programHandle, fragmentShader1);
    
    // Link program
    glLinkProgram(programHandle);
    
    // Check for errors
    GLint linkSuccess;
    glGetProgramiv(programHandle, GL_LINK_STATUS, &linkSuccess);
    if (linkSuccess == GL_FALSE) {
        GLchar messages[1024];
        glGetProgramInfoLog(programHandle, sizeof(messages), 0, &messages[0]);
        NSLog(@"%@:- GLSL Program Error: %s", [self class], messages);
    }
    
    // Delete shaders
    glDeleteShader(vertexShader1);
    glDeleteShader(fragmentShader1);
    
    return programHandle;
}

- (GLuint)shaderWithName:(NSString*)name type:(GLenum)type {
    // Load the shader file
    NSString* file;
    if (type == GL_VERTEX_SHADER) {
        file = [[NSBundle mainBundle] pathForResource:name ofType:@"vsh"];
    } else if (type == GL_FRAGMENT_SHADER) {
        file = [[NSBundle mainBundle] pathForResource:name ofType:@"fsh"];
    }
    
    // Create the shader source
    const GLchar* source = (GLchar*)[[NSString stringWithContentsOfFile:file encoding:NSUTF8StringEncoding error:nil] UTF8String];
    
    // Create the shader object
    GLuint shaderHandle = glCreateShader(type);
    
    // Load the shader source
    glShaderSource(shaderHandle, 1, &source, 0);
    
    // Compile the shader
    glCompileShader(shaderHandle);
    
    // Check for errors
    GLint compileSuccess;
    glGetShaderiv(shaderHandle, GL_COMPILE_STATUS, &compileSuccess);
    if (compileSuccess == GL_FALSE) {
        GLchar messages[1024];
        glGetShaderInfoLog(shaderHandle, sizeof(messages), 0, &messages[0]);
        NSLog(@"%@:- GLSL Shader Error: %s", [self class], messages);
    }
    
    return shaderHandle;
}

// Touch Methiods..
#pragma mark - Touches

- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event {
    
//    CGRect				bounds = [self.view bounds];
//    UITouch*            touch = [[event touchesForView:self.view] anyObject];
//    //    NSLog(@"Hellossss");
//    firstTouch = YES;
//    _location = [touch locationInView:self.view];
//    _location.y = bounds.size.height - _location.y;
}

// Handles the continuation of a touch.
- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event
{
    
    CGRect				bounds = [self bounds];
    UITouch*			touch = [[event touchesForView:self] anyObject];

    
    UITouch* touchEvent = [touches anyObject];
    CGPoint locationInView = [touchEvent locationInView:self];
    twirlCenter = getNormalizedPoint(self, locationInView);

    
//    [EAGLContext setCurrentContext:context];
//    glBindFramebuffer(GL_FRAMEBUFFER, viewFramebuffer);
    
    // Render the stroke
    [self applyEffect];

//    [self drawEffects];
    if (firstTouch) {
        firstTouch = NO;
        _previousLocation = [touch previousLocationInView:self];
        _previousLocation.y = bounds.size.height - _previousLocation.y;
    } else {
        _location = [touch locationInView:self];
        _location.y = bounds.size.height - _location.y;
        _previousLocation = [touch previousLocationInView:self];
        _previousLocation.y = bounds.size.height - _previousLocation.y;
    }
    [self drawEffects];
    // Render the stroke
//    [self renderLineFromPoint:_previousLocation toPoint:_location];
}

-(void)drawEffects {
//    [EAGLContext setCurrentContext:context];
//    glBindFramebuffer(GL_FRAMEBUFFER, viewFramebuffer);
    
    // Draw
    glBindBuffer(GL_ARRAY_BUFFER, vboId);
    
    glUseProgram(_program);
    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
    
    
    // Display the buffer
    glBindRenderbuffer(GL_RENDERBUFFER, viewRenderbuffer);
    [context presentRenderbuffer:GL_RENDERBUFFER];
}

-(void)applyEffect {
    
    //        [EAGLContext setCurrentContext:context];
    //        glBindFramebuffer(GL_FRAMEBUFFER, viewFramebuffer);
    
    GLint radiusIndex = [self uniformIndex:@"radius"];
    glUniform1f(radiusIndex, 0.1);
    
    GLint angleIndex = [self uniformIndex:@"angle"];
    glUniform1f(angleIndex, -0.5);
    
    //    twirlCenter = CGPointMake(1.0, 0.0);
    GLint centerIndex = [self uniformIndex:@"center"];
    
    GLfloat positionArray[2];
    positionArray[0] = twirlCenter.x;
    positionArray[1] = twirlCenter.y;
    glUniform2fv(centerIndex, 1, positionArray);
    
}

// Drawings a line onscreen based on where the user touches
- (void)renderLineFromPoint:(CGPoint)start toPoint:(CGPoint)end
{
    static GLfloat*		vertexBuffer = NULL;
    static NSUInteger	vertexMax = 64;
    NSUInteger			vertexCount = 0,
    count,
    i;
    
    [EAGLContext setCurrentContext:context];
    glBindFramebuffer(GL_FRAMEBUFFER, viewFramebuffer);
    
    // Convert locations from Points to Pixels
    CGFloat scale = self.contentScaleFactor;
    start.x *= scale;
    start.y *= scale;
    end.x *= scale;
    end.y *= scale;
    
    // Allocate vertex array buffer
    if(vertexBuffer == NULL)
        vertexBuffer = malloc(vertexMax * 2 * sizeof(GLfloat));
    
    // Add points to the buffer so there are drawing points every X pixels
    count = MAX(ceilf(sqrtf((end.x - start.x) * (end.x - start.x) + (end.y - start.y) * (end.y - start.y)) / 3), 1);
    for(i = 0; i < count; ++i) {
        if(vertexCount == vertexMax) {
            vertexMax = 2 * vertexMax;
            vertexBuffer = realloc(vertexBuffer, vertexMax * 2 * sizeof(GLfloat));
        }
        
        vertexBuffer[2 * vertexCount + 0] = start.x + (end.x - start.x) * ((GLfloat)i / (GLfloat)count);
        vertexBuffer[2 * vertexCount + 1] = start.y + (end.y - start.y) * ((GLfloat)i / (GLfloat)count);
        vertexCount += 1;
    }
    
    // Load data to the Vertex Buffer Object
    glBindBuffer(GL_ARRAY_BUFFER, vboId);
    glBufferData(GL_ARRAY_BUFFER, vertexCount*2*sizeof(GLfloat), vertexBuffer, GL_DYNAMIC_DRAW);
    
    glEnableVertexAttribArray(ATTRIB_VERTEX);
    glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, GL_FALSE, 0, 0);
    
    // Draw
    glBindTexture(GL_TEXTURE_2D, texture);
    
    glUseProgram(_program);
    
    
    glDrawArrays(GL_POINTS, 0, (int)vertexCount);
    
    // Display the buffer
    glBindRenderbuffer(GL_RENDERBUFFER, viewRenderbuffer);
    [context presentRenderbuffer:GL_RENDERBUFFER];
}

///fsdffdf


static CGPoint getNormalizedPoint(UIView* view, CGPoint locationInView)
{
    const float normalizedX = (locationInView.x / view.bounds.size.width) * (2.f - 1.f);
    const float normalizedY = ((view.bounds.size.height - locationInView.y) / view.bounds.size.height) * (2.f - 1.f);
    
    return CGPointMake(normalizedX, normalizedY);
}

// set Img...

- (void)setImage:(UIImage *)image
{
    // Create an RGBA bitmap context
    CGImageRef CGImage = image.CGImage;
    GLint width = (GLint)CGImageGetWidth(CGImage);
    GLint height = (GLint)CGImageGetHeight(CGImage);
    size_t bitsPerComponent = 8;
    size_t bytesPerRow = width * 4;
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context1 = CGBitmapContextCreate(NULL, width, height, bitsPerComponent, bytesPerRow, colorSpace, kCGImageAlphaPremultipliedFirst|kCGBitmapByteOrder32Little);
    // Invert vertically for OpenGL
    CGContextTranslateCTM(context1, 0, height);
    CGContextScaleCTM(context1, 1, -1);
    CGContextDrawImage(context1, CGRectMake(0, 0, width, height), CGImage);
    GLubyte *textureData = (GLubyte *)CGBitmapContextGetData(context1);
    
    //    [self setContentSize:CGSizeMake(width, height)];
    //    [self _setTextureData:textureData width:width height:height];
    [self generateDefaultTextureWithWidth:width height:height data:textureData];
    
    CGContextRelease(context1);
    CGColorSpaceRelease(colorSpace);
}

- (GLuint)generateDefaultTextureWithWidth:(GLint)width height:(GLint)height data:(GLvoid *)data
{
    //    texture = 0;
    glActiveTexture(GL_TEXTURE0);
    glGenTextures(1, &texture);
    glBindTexture(GL_TEXTURE_2D, texture);
    glUniform1i(inputImageTexture2Uniform, 0);
    
    
    //    glGenTextures(1, &texture);
    //    glBindTexture(GL_TEXTURE_2D, texture);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_BGRA, GL_UNSIGNED_BYTE, data);
    //    glBindTexture(GL_TEXTURE_2D, 0);
    return texture;
}

- (GLuint)uniformIndex:(NSString *)uniformName
{
    return glGetUniformLocation(_program, [uniformName UTF8String]);
}

- (void)setPaintLine12
{
    [self performSelector:@selector(drawEffects) withObject:nil afterDelay:0.5];
}

// Erases the screen
- (void)erase
{
    NSLog(@"erase");
}

- (BOOL)canBecomeFirstResponder {
    return YES;
}

@end
 

teagls

macrumors regular
May 16, 2013
202
101
It appears you are simply drawing completely over the previous frame. I assume you are trying to composite the swirls, but the sampler2D that you pass into the fragment shader is the same image you are loading from the beginning. You need to do a render to texture so when you render a frame you output that as a new OpenGL texture that you then reference and draw a swirl on top of.
 

niralipandey

macrumors newbie
Original poster
May 15, 2015
5
0
Thank you.
Can you please provide example of sample code or point us where we need to implement this functionality.
 
Register on MacRumors! This sidebar will go away, and you'll see fewer ads.