Skip to content

Instantly share code, notes, and snippets.

@depthlove
Forked from sarsonj/GPUImageLayer.m
Created October 11, 2017 17:24
Show Gist options
  • Save depthlove/95f66e0204a1ab8504a7d0ddbcb9f328 to your computer and use it in GitHub Desktop.
Save depthlove/95f66e0204a1ab8504a7d0ddbcb9f328 to your computer and use it in GitHub Desktop.

Revisions

  1. @sarsonj sarsonj created this gist Nov 12, 2014.
    489 changes: 489 additions & 0 deletions GPUImageLayer.m
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,489 @@
    #import <AVFoundation/AVFoundation.h>
    #import "GPUImageLayer.h"
    #import "GPUImageOutput.h"
    #import "GPUImageFilter.h"

    @interface GPUImageLayer() {
    GPUImageFramebuffer *inputFramebufferForDisplay;

    GLProgram *displayProgram;
    GLint displayPositionAttribute, displayTextureCoordinateAttribute;
    GLint displayInputTextureUniform;

    CGSize inputImageSize;
    GLfloat imageVertices[8];
    GLfloat backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha;
    }



    // Initialization and teardown
    - (void)commonInit;

    // Managing the display FBOs
    - (void)createDisplayFramebuffer;
    - (void)destroyDisplayFramebuffer;

    // Handling fill mode
    - (void)recalculateViewGeometry;

    @end

    @implementation GPUImageLayer

    @synthesize sizeInPixels = _sizeInPixels;
    @synthesize fillMode = _fillMode;
    @synthesize enabled;

    #pragma mark -
    #pragma mark Initialization and teardown


    - (instancetype)init {
    self = [super init];
    if (self) {
    [self commonInit];
    }

    return self;
    }



    - (void)commonInit;
    {
    // I believe each of these views needs a separate OpenGL context, unlike on iOS where you're rendering to an FBO in a layer
    // NSOpenGLPixelFormatAttribute pixelFormatAttributes[] = {
    // NSOpenGLPFADoubleBuffer,
    // NSOpenGLPFAAccelerated, 0,
    // 0
    // };
    //
    // NSOpenGLPixelFormat *pixelFormat = [[NSOpenGLPixelFormat alloc] initWithAttributes:pixelFormatAttributes];
    // if (pixelFormat == nil)
    // {
    // NSLog(@"Error: No appropriate pixel format found");
    // }
    // // TODO: Take into account the sharegroup
    // NSOpenGLContext *context = [[NSOpenGLContext alloc] initWithFormat:pixelFormat shareContext:[[GPUImageContext sharedImageProcessingContext] context]];
    // if (context == nil)
    // {
    // NSAssert(NO, @"Problem creating the GPUImageView context");
    // }
    // [self setOpenGLContext:context];
    // [self setOpenGLContext:[[GPUImageContext sharedImageProcessingContext] context]];
    CGLSetCurrentContext([GPUImageContext sharedImageProcessingContext].context.CGLContextObj);
    self.asynchronous = NO;

    inputRotation = kGPUImageNoRotation;
    // self.hidden = NO;

    self.enabled = YES;

    runSynchronouslyOnVideoProcessingQueue(^{
    CGLSetCurrentContext([GPUImageContext sharedImageProcessingContext].context.CGLContextObj);
    // [self.openGLContext makeCurrentContext];
    displayProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImagePassthroughFragmentShaderString];

    // displayProgram = [[GLProgram alloc] initWithVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImagePassthroughFragmentShaderString];
    if (!displayProgram.initialized)
    {
    [displayProgram addAttribute:@"position"];
    [displayProgram addAttribute:@"inputTextureCoordinate"];

    if (![displayProgram link])
    {
    NSString *progLog = [displayProgram programLog];
    NSLog(@"Program link log: %@", progLog);
    NSString *fragLog = [displayProgram fragmentShaderLog];
    NSLog(@"Fragment shader compile log: %@", fragLog);
    NSString *vertLog = [displayProgram vertexShaderLog];
    NSLog(@"Vertex shader compile log: %@", vertLog);
    displayProgram = nil;
    NSAssert(NO, @"Filter shader link failed");
    }
    }

    displayPositionAttribute = [displayProgram attributeIndex:@"position"];
    displayTextureCoordinateAttribute = [displayProgram attributeIndex:@"inputTextureCoordinate"];
    displayInputTextureUniform = [displayProgram uniformIndex:@"inputImageTexture"];

    [GPUImageContext setActiveShaderProgram:displayProgram];

    // [displayProgram use];
    glEnableVertexAttribArray(displayPositionAttribute);
    glEnableVertexAttribArray(displayTextureCoordinateAttribute);

    [self setBackgroundColorRed:0.0 green:0.0 blue:0.0 alpha:1.0];
    _fillItMode = kGPUImageFillModePreserveAspectRatio;
    [self createDisplayFramebuffer];
    });

    }

    - (void)dealloc
    {
    }

    #pragma mark -
    #pragma mark Managing the display FBOs

    - (void)createDisplayFramebuffer;
    {
    // Perhaps I'll use an FBO at some time later, but for now will render directly to the screen
    _sizeInPixels.width = self.bounds.size.width;
    _sizeInPixels.height = self.bounds.size.height;

    // NSLog(@"Backing width: %d, height: %d", backingWidth, backingHeight);
    }

    - (void)destroyDisplayFramebuffer;
    {
    CGLSetCurrentContext([GPUImageContext sharedImageProcessingContext].context.CGLContextObj);
    // [self.openGLContext makeCurrentContext];
    }

    - (void)setDisplayFramebuffer;
    {
    glBindFramebuffer(GL_FRAMEBUFFER, 0);
    glBindRenderbuffer(GL_RENDERBUFFER, 0);

    glViewport(0, 0, (GLint)_sizeInPixels.width, (GLint)_sizeInPixels.height);
    }

    - (void)presentFramebuffer;
    {
    CGLSetCurrentContext([GPUImageContext sharedImageProcessingContext].context.CGLContextObj);
    // [self.openGLContext flushBuffer];
    CGLFlushDrawable([GPUImageContext sharedImageProcessingContext].context.CGLContextObj);
    }

    - (void)setFrame:(CGRect)frame {
    [super setFrame:frame];
    if ( (_sizeInPixels.width == self.bounds.size.width) && (_sizeInPixels.height == self.bounds.size.height) )
    {
    return;
    }

    _sizeInPixels.width = self.bounds.size.width;
    _sizeInPixels.height = self.bounds.size.height;
    [self recalculateViewGeometry];
    /*
    dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
    [self newFrameReadyAtTime:kCMTimeInvalid atIndex:0];
    });
    */
    [self setNeedsDisplay];
    }


    #pragma mark -
    #pragma mark Handling fill mode

    - (void)recalculateViewGeometry;
    {
    // runSynchronouslyOnVideoProcessingQueue(^{
    CGFloat heightScaling, widthScaling;

    CGSize currentViewSize = self.bounds.size;

    // CGFloat imageAspectRatio = inputImageSize.width / inputImageSize.height;
    // CGFloat viewAspectRatio = currentViewSize.width / currentViewSize.height;

    CGRect insetRect = AVMakeRectWithAspectRatioInsideRect(inputImageSize, self.bounds);

    switch(_fillItMode)
    {
    case kGPUImageFillModeStretch:
    {
    widthScaling = 1.0;
    heightScaling = 1.0;
    }; break;
    case kGPUImageFillModePreserveAspectRatio:
    {
    widthScaling = insetRect.size.width / currentViewSize.width;
    heightScaling = insetRect.size.height / currentViewSize.height;
    }; break;
    case kGPUImageFillModePreserveAspectRatioAndFill:
    {
    // CGFloat widthHolder = insetRect.size.width / currentViewSize.width;
    widthScaling = currentViewSize.height / insetRect.size.height;
    heightScaling = currentViewSize.width / insetRect.size.width;
    }; break;
    }

    imageVertices[0] = -widthScaling;
    imageVertices[1] = -heightScaling;
    imageVertices[2] = widthScaling;
    imageVertices[3] = -heightScaling;
    imageVertices[4] = -widthScaling;
    imageVertices[5] = heightScaling;
    imageVertices[6] = widthScaling;
    imageVertices[7] = heightScaling;
    // });

    // static const GLfloat imageVertices[] = {
    // -1.0f, -1.0f,
    // 1.0f, -1.0f,
    // -1.0f, 1.0f,
    // 1.0f, 1.0f,
    // };
    }

    - (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent;
    {
    backgroundColorRed = redComponent;
    backgroundColorGreen = greenComponent;
    backgroundColorBlue = blueComponent;
    backgroundColorAlpha = alphaComponent;
    }

    + (const GLfloat *)textureCoordinatesForRotation:(GPUImageRotationMode)rotationMode;
    {
    // static const GLfloat noRotationTextureCoordinates[] = {
    // 0.0f, 0.0f,
    // 1.0f, 0.0f,
    // 0.0f, 1.0f,
    // 1.0f, 1.0f,
    // };

    static const GLfloat noRotationTextureCoordinates[] = {
    0.0f, 1.0f,
    1.0f, 1.0f,
    0.0f, 0.0f,
    1.0f, 0.0f,
    };

    static const GLfloat rotateRightTextureCoordinates[] = {
    1.0f, 1.0f,
    1.0f, 0.0f,
    0.0f, 1.0f,
    0.0f, 0.0f,
    };

    static const GLfloat rotateLeftTextureCoordinates[] = {
    0.0f, 0.0f,
    0.0f, 1.0f,
    1.0f, 0.0f,
    1.0f, 1.0f,
    };

    static const GLfloat verticalFlipTextureCoordinates[] = {
    0.0f, 0.0f,
    1.0f, 0.0f,
    0.0f, 1.0f,
    1.0f, 1.0f,
    };

    static const GLfloat horizontalFlipTextureCoordinates[] = {
    1.0f, 1.0f,
    0.0f, 1.0f,
    1.0f, 0.0f,
    0.0f, 0.0f,
    };

    static const GLfloat rotateRightVerticalFlipTextureCoordinates[] = {
    1.0f, 0.0f,
    1.0f, 1.0f,
    0.0f, 0.0f,
    0.0f, 1.0f,
    };

    static const GLfloat rotateRightHorizontalFlipTextureCoordinates[] = {
    1.0f, 1.0f,
    1.0f, 0.0f,
    0.0f, 1.0f,
    0.0f, 0.0f,
    };

    static const GLfloat rotate180TextureCoordinates[] = {
    1.0f, 0.0f,
    0.0f, 0.0f,
    1.0f, 1.0f,
    0.0f, 1.0f,
    };

    switch(rotationMode)
    {
    case kGPUImageNoRotation: return noRotationTextureCoordinates;
    case kGPUImageRotateLeft: return rotateLeftTextureCoordinates;
    case kGPUImageRotateRight: return rotateRightTextureCoordinates;
    case kGPUImageFlipVertical: return verticalFlipTextureCoordinates;
    case kGPUImageFlipHorizonal: return horizontalFlipTextureCoordinates;
    case kGPUImageRotateRightFlipVertical: return rotateRightVerticalFlipTextureCoordinates;
    case kGPUImageRotateRightFlipHorizontal: return rotateRightHorizontalFlipTextureCoordinates;
    case kGPUImageRotate180: return rotate180TextureCoordinates;
    }
    }

    #pragma mark -
    #pragma mark GPUInput protocol


    - (void)drawInCGLContext:(CGLContextObj)ctx pixelFormat:(CGLPixelFormatObj)pf forLayerTime:(CFTimeInterval)t displayTime:(const CVTimeStamp *)ts {

    CGLSetCurrentContext(ctx);

    NSLog(@"Draw in CGLContext");
    runSynchronouslyOnVideoProcessingQueue(^{
    // [[self openGLContext] makeCurrentContext];
    [GPUImageContext setActiveShaderProgram:displayProgram];
    [self setDisplayFramebuffer];
    [displayProgram use];

    // glMatrixMode(GL_MODELVIEW);
    // glLoadIdentity();
    //
    // glMatrixMode(GL_PROJECTION);
    // glLoadIdentity();

    glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);
    glClear(GL_COLOR_BUFFER_BIT);

    glActiveTexture(GL_TEXTURE4);
    glBindTexture(GL_TEXTURE_2D, [inputFramebufferForDisplay texture]);
    glUniform1i(displayInputTextureUniform, 4);

    glVertexAttribPointer(displayPositionAttribute, 2, GL_FLOAT, 0, 0, imageVertices);
    glVertexAttribPointer(displayTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [GPUImageLayer textureCoordinatesForRotation:inputRotation]);

    BOOL canLockFocus = YES;

    /*
    if ([self respondsToSelector:@selector(lockFocusIfCanDraw)])
    {
    canLockFocus = [self lockFocusIfCanDraw];
    }
    else
    {
    [self lockFocus];
    }
    *//*
    */

    if (canLockFocus)
    {
    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);

    [self presentFramebuffer];
    glBindTexture(GL_TEXTURE_2D, 0);
    // [self unlockFocus];
    }


    [inputFramebufferForDisplay unlock];
    inputFramebufferForDisplay = nil;
    });
    [super drawInCGLContext:ctx pixelFormat:pf forLayerTime:t displayTime:ts];


    }



    - (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
    {
    NSLog(@"New frame ready at time %i");
    [self setNeedsDisplay];
    }


    - (NSInteger)nextAvailableTextureIndex;
    {
    return 0;
    }

    - (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;
    {
    inputFramebufferForDisplay = newInputFramebuffer;
    [inputFramebufferForDisplay lock];
    }

    - (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
    {
    inputRotation = newInputRotation;
    }

    - (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
    {
    runSynchronouslyOnVideoProcessingQueue(^{
    CGSize rotatedSize = newSize;

    if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
    {
    rotatedSize.width = newSize.height;
    rotatedSize.height = newSize.width;
    }

    if (!CGSizeEqualToSize(inputImageSize, rotatedSize))
    {
    inputImageSize = rotatedSize;
    [self recalculateViewGeometry];
    }
    });
    }

    - (CGSize)maximumOutputSize;
    {
    if ([self respondsToSelector:@selector(setContentScaleFactor:)])
    {
    CGSize pointSize = self.bounds.size;
    // TODO: Account for Retina displays
    return pointSize;
    // return CGSizeMake(self.contentScaleFactor * pointSize.width, self.contentScaleFactor * pointSize.height);
    }
    else
    {
    return self.bounds.size;
    }
    }

    - (void)endProcessing
    {
    }

    - (BOOL)shouldIgnoreUpdatesToThisTarget;
    {
    return NO;
    }

    - (void)conserveMemoryForNextFrame;
    {

    }

    - (BOOL)wantsMonochromeInput;
    {
    return NO;
    }

    - (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;
    {

    }

    #pragma mark -
    #pragma mark Accessors

    - (CGSize)sizeInPixels;
    {
    if (CGSizeEqualToSize(_sizeInPixels, CGSizeZero))
    {
    return [self maximumOutputSize];
    }
    else
    {
    return _sizeInPixels;
    }
    }

    - (void)setFillMode:(GPUImageFillModeType)newValue;
    {
    _fillItMode = newValue;
    [self recalculateViewGeometry];
    }



    @end