From 407560c06433860ac844e1f7c79133984396fe9a Mon Sep 17 00:00:00 2001 From: Brad Larson Date: Sat, 25 Feb 2012 11:37:27 -0600 Subject: [PATCH] Made the ColorObjectTracking example functional, finally. It's a little slow because of the CPU-bound color averaging. --- .../ImageFilteringBenchmarkController.m | 10 + .../project.pbxproj | 6 +- .../ColorTrackingViewController.h | 6 +- .../ColorTrackingViewController.m | 103 +++++++++-- .../ColorObjectTracking/PositionColor.fsh | 4 +- framework/GPUImage.xcodeproj/project.pbxproj | 6 +- framework/Source/GPUImageOutput.m | 2 +- framework/Source/GPUImageRawData.h | 3 +- framework/Source/GPUImageRawData.m | 172 +++++++++++++++++- framework/Source/GPUImageSwirlFilter.m | 1 - framework/Source/GPUImageView.m | 41 +---- 11 files changed, 292 insertions(+), 62 deletions(-) diff --git a/examples/BenchmarkSuite/BenchmarkSuite/ImageFilteringBenchmarkController.m b/examples/BenchmarkSuite/BenchmarkSuite/ImageFilteringBenchmarkController.m index 928accbb2..ae2ae930e 100644 --- a/examples/BenchmarkSuite/BenchmarkSuite/ImageFilteringBenchmarkController.m +++ b/examples/BenchmarkSuite/BenchmarkSuite/ImageFilteringBenchmarkController.m @@ -94,6 +94,16 @@ - (UIImage *)imageProcessedOnCPU:(UIImage *)imageToProcess; - (UIImage *)imageProcessedUsingCoreImage:(UIImage *)imageToProcess; { + /* + NSArray *filterNames = [CIFilter filterNamesInCategory:kCICategoryBuiltIn]; + + NSLog(@"Built in filters"); + for (NSString *currentFilterName in filterNames) + { + NSLog(@"%@", currentFilterName); + } + */ + CFAbsoluteTime elapsedTime, startTime = CFAbsoluteTimeGetCurrent(); CIImage *inputImage = [[CIImage alloc] initWithCGImage:imageToProcess.CGImage]; diff --git a/examples/ColorObjectTracking/ColorObjectTracking.xcodeproj/project.pbxproj b/examples/ColorObjectTracking/ColorObjectTracking.xcodeproj/project.pbxproj index 1ef7061d4..98581b506 100644 --- a/examples/ColorObjectTracking/ColorObjectTracking.xcodeproj/project.pbxproj +++ b/examples/ColorObjectTracking/ColorObjectTracking.xcodeproj/project.pbxproj @@ -60,7 +60,7 @@ BC245DED14DDC959009FE7EB /* ColorTrackingAppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ColorTrackingAppDelegate.h; sourceTree = ""; }; BC245DEE14DDC959009FE7EB /* ColorTrackingAppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ColorTrackingAppDelegate.m; sourceTree = ""; }; BC245E0C14DDCA1F009FE7EB /* GPUImage.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; name = GPUImage.xcodeproj; path = ../../framework/GPUImage.xcodeproj; sourceTree = ""; }; - BCB5DD8214E85CB0000AF3C2 /* PositionColor.fsh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.glsl; path = PositionColor.fsh; sourceTree = ""; }; + BCB5DD8214E85CB0000AF3C2 /* PositionColor.fsh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.glsl; path = PositionColor.fsh; sourceTree = ""; xcLanguageSpecificationIdentifier = xcode.lang.glsl; }; BCB5E74414DDCF4F00701302 /* ColorTrackingViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ColorTrackingViewController.h; sourceTree = ""; }; BCB5E74514DDCF4F00701302 /* ColorTrackingViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ColorTrackingViewController.m; sourceTree = ""; }; BCB5E74714DDD1B300701302 /* QuartzCore.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = QuartzCore.framework; path = System/Library/Frameworks/QuartzCore.framework; sourceTree = SDKROOT; }; @@ -68,7 +68,7 @@ BCB5E7A514E346F100701302 /* OpenGLES.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = OpenGLES.framework; path = System/Library/Frameworks/OpenGLES.framework; sourceTree = SDKROOT; }; BCB5E7A714E3471100701302 /* CoreVideo.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreVideo.framework; path = System/Library/Frameworks/CoreVideo.framework; sourceTree = SDKROOT; }; BCB5E7A914E3472100701302 /* CoreMedia.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreMedia.framework; path = System/Library/Frameworks/CoreMedia.framework; sourceTree = SDKROOT; }; - BCB5E7B114E460D300701302 /* Threshold.fsh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.glsl; path = Threshold.fsh; sourceTree = ""; }; + BCB5E7B114E460D300701302 /* Threshold.fsh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.glsl; path = Threshold.fsh; sourceTree = ""; xcLanguageSpecificationIdentifier = xcode.lang.glsl; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ @@ -336,6 +336,7 @@ buildSettings = { GCC_PRECOMPILE_PREFIX_HEADER = YES; GCC_PREFIX_HEADER = "ColorObjectTracking/ColorObjectTracking-Prefix.pch"; + GCC_THUMB_SUPPORT = ""; INFOPLIST_FILE = "ColorObjectTracking/ColorObjectTracking-Info.plist"; PRODUCT_NAME = "$(TARGET_NAME)"; WRAPPER_EXTENSION = app; @@ -347,6 +348,7 @@ buildSettings = { GCC_PRECOMPILE_PREFIX_HEADER = YES; GCC_PREFIX_HEADER = "ColorObjectTracking/ColorObjectTracking-Prefix.pch"; + GCC_THUMB_SUPPORT = ""; INFOPLIST_FILE = "ColorObjectTracking/ColorObjectTracking-Info.plist"; PRODUCT_NAME = "$(TARGET_NAME)"; WRAPPER_EXTENSION = app; diff --git a/examples/ColorObjectTracking/ColorObjectTracking/ColorTrackingViewController.h b/examples/ColorObjectTracking/ColorObjectTracking/ColorTrackingViewController.h index 403447ca7..ca66ede1f 100644 --- a/examples/ColorObjectTracking/ColorObjectTracking/ColorTrackingViewController.h +++ b/examples/ColorObjectTracking/ColorObjectTracking/ColorTrackingViewController.h @@ -3,12 +3,13 @@ typedef enum { PASSTHROUGH_VIDEO, SIMPLE_THRESHOLDING, POSITION_THRESHOLDING, OBJECT_TRACKING} ColorTrackingDisplayMode; -@interface ColorTrackingViewController : UIViewController +@interface ColorTrackingViewController : UIViewController { CALayer *trackingDot; GPUImageVideoCamera *videoCamera; GPUImageFilter *rotationFilter, *thresholdFilter, *positionFilter; + GPUImageRawData *positionRawData, *videoRawData; GPUImageView *filteredVideoView; ColorTrackingDisplayMode displayMode; @@ -23,4 +24,7 @@ typedef enum { PASSTHROUGH_VIDEO, SIMPLE_THRESHOLDING, POSITION_THRESHOLDING, OB - (void)configureToolbar; - (void)configureTrackingDot; +// Image processing +- (CGPoint)centroidFromTexture:(GLubyte *)pixels ofSize:(CGSize)textureSize; + @end diff --git a/examples/ColorObjectTracking/ColorObjectTracking/ColorTrackingViewController.m b/examples/ColorObjectTracking/ColorObjectTracking/ColorTrackingViewController.m index 49f2e56f7..6e5664961 100644 --- a/examples/ColorObjectTracking/ColorObjectTracking/ColorTrackingViewController.m +++ b/examples/ColorObjectTracking/ColorObjectTracking/ColorTrackingViewController.m @@ -51,19 +51,34 @@ - (void)configureVideoFiltering; videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionBack]; filteredVideoView = [[GPUImageView alloc] initWithFrame:CGRectMake(0.0, 0.0, mainScreenFrame.size.width, mainScreenFrame.size.height)]; [self.view addSubview:filteredVideoView]; - + thresholdFilter = [[GPUImageFilter alloc] initWithFragmentShaderFromFile:@"Threshold"]; - [thresholdFilter setFloat:thresholdSensitivity forUniform:@"threshold"]; + [thresholdFilter setFloat:thresholdSensitivity forUniform:@"threshold"]; [thresholdFilter setFloatVec3:thresholdColor forUniform:@"inputColor"]; positionFilter = [[GPUImageFilter alloc] initWithFragmentShaderFromFile:@"PositionColor"]; [positionFilter setFloat:thresholdSensitivity forUniform:@"threshold"]; [positionFilter setFloatVec3:thresholdColor forUniform:@"inputColor"]; rotationFilter = [[GPUImageRotationFilter alloc] initWithRotation:kGPUImageRotateRight]; - // videoCamera -> thresholdFilter -> filteredVideoView +// CGSize videoPixelSize = filteredVideoView.bounds.size; +// videoPixelSize.width *= [filteredVideoView contentScaleFactor]; +// videoPixelSize.height *= [filteredVideoView contentScaleFactor]; + + CGSize videoPixelSize = CGSizeMake(480.0, 640.0); + + positionRawData = [[GPUImageRawData alloc] initWithImageSize:videoPixelSize]; + positionRawData.delegate = self; + + videoRawData = [[GPUImageRawData alloc] initWithImageSize:videoPixelSize]; + videoRawData.delegate = self; + [videoCamera addTarget:rotationFilter]; [rotationFilter addTarget:filteredVideoView]; - + [rotationFilter addTarget:videoRawData]; +// [rotationFilter addTarget:positionFilter]; +// [positionFilter addTarget:filteredVideoView]; +// [positionFilter addTarget:videoRawData]; + [videoCamera startCameraCapture]; } @@ -104,6 +119,8 @@ - (void)configureTrackingDot; // [glView.layer addSublayer:trackingDot]; trackingDot.position = CGPointMake(100.0f, 100.0f); trackingDot.opacity = 0.0f; + + [self.view.layer addSublayer:trackingDot]; } - (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation @@ -121,12 +138,7 @@ - (void)handleSwitchOfDisplayMode:(id)sender; if (newDisplayMode != displayMode) { - switch (displayMode) - { - case SIMPLE_THRESHOLDING: [thresholdFilter removeTarget:filteredVideoView]; break; - case POSITION_THRESHOLDING: [positionFilter removeTarget:filteredVideoView]; break; - default: break; - } + displayMode = newDisplayMode; if (displayMode == OBJECT_TRACKING) { trackingDot.opacity = 1.0f; @@ -136,9 +148,10 @@ - (void)handleSwitchOfDisplayMode:(id)sender; trackingDot.opacity = 0.0f; } - displayMode = newDisplayMode; [rotationFilter removeAllTargets]; - + [positionFilter removeAllTargets]; + [thresholdFilter removeAllTargets]; + [rotationFilter addTarget:videoRawData]; switch(displayMode) { @@ -160,11 +173,74 @@ - (void)handleSwitchOfDisplayMode:(id)sender; { [rotationFilter addTarget:filteredVideoView]; [rotationFilter addTarget:positionFilter]; + [positionFilter addTarget:positionRawData]; }; break; } } } +#pragma mark - +#pragma mark Image processing + +- (CGPoint)centroidFromTexture:(GLubyte *)pixels ofSize:(CGSize)textureSize; +{ + CGFloat currentXTotal = 0.0f, currentYTotal = 0.0f, currentPixelTotal = 0.0f; + + for (NSUInteger currentPixel = 0; currentPixel < (textureSize.width * textureSize.height); currentPixel++) + { + currentXTotal += (CGFloat)pixels[currentPixel * 4] / 255.0f; + currentYTotal += (CGFloat)pixels[(currentPixel * 4) + 1] / 255.0f; + currentPixelTotal += (CGFloat)pixels[(currentPixel * 4) + 3] / 255.0f; + } + + return CGPointMake(currentXTotal / currentPixelTotal, currentYTotal / currentPixelTotal); +} + +#pragma mark - +#pragma mark GPUImageRawDataProcessor protocol + +- (void)newImageFrameAvailableFromDataSource:(GPUImageRawData *)rawDataSource; +{ + if (rawDataSource == positionRawData) + { + GLubyte *bytesForPositionData = rawDataSource.rawBytesForImage; + CGPoint currentTrackingLocation = [self centroidFromTexture:bytesForPositionData ofSize:[rawDataSource maximumOutputSize]]; + CGSize currentViewSize = self.view.bounds.size; + trackingDot.position = CGPointMake(currentTrackingLocation.x * currentViewSize.width, currentTrackingLocation.y * currentViewSize.height); + } + else + { + if (shouldReplaceThresholdColor) + { + CGSize currentViewSize = self.view.bounds.size; + CGSize rawPixelsSize = [rawDataSource maximumOutputSize]; + + + CGPoint scaledTouchPoint; + scaledTouchPoint.x = (currentTouchPoint.x / currentViewSize.width) * rawPixelsSize.width; + scaledTouchPoint.y = (currentTouchPoint.y / currentViewSize.height) * rawPixelsSize.height; + + GPUByteColorVector colorAtTouchPoint = [rawDataSource colorAtLocation:scaledTouchPoint]; + + thresholdColor[0] = (float)colorAtTouchPoint.red / 255.0; + thresholdColor[1] = (float)colorAtTouchPoint.green / 255.0; + thresholdColor[2] = (float)colorAtTouchPoint.blue / 255.0; + +// NSLog(@"Color at touch point: %d, %d, %d, %d", colorAtTouchPoint.red, colorAtTouchPoint.green, colorAtTouchPoint.blue, colorAtTouchPoint.alpha); + + [[NSUserDefaults standardUserDefaults] setFloat:thresholdColor[0] forKey:@"thresholdColorR"]; + [[NSUserDefaults standardUserDefaults] setFloat:thresholdColor[1] forKey:@"thresholdColorG"]; + [[NSUserDefaults standardUserDefaults] setFloat:thresholdColor[2] forKey:@"thresholdColorB"]; + + [thresholdFilter setFloatVec3:thresholdColor forUniform:@"inputColor"]; + [positionFilter setFloatVec3:thresholdColor forUniform:@"inputColor"]; + + shouldReplaceThresholdColor = NO; + } + } + +} + #pragma mark - #pragma mark Touch handling @@ -181,6 +257,9 @@ - (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event; thresholdSensitivity = distanceMoved / 160.0f; [[NSUserDefaults standardUserDefaults] setFloat:thresholdSensitivity forKey:@"thresholdSensitivity"]; + + [thresholdFilter setFloat:thresholdSensitivity forUniform:@"threshold"]; + [positionFilter setFloat:thresholdSensitivity forUniform:@"threshold"]; } - (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event diff --git a/examples/ColorObjectTracking/ColorObjectTracking/PositionColor.fsh b/examples/ColorObjectTracking/ColorObjectTracking/PositionColor.fsh index 95587826f..39556ffaa 100644 --- a/examples/ColorObjectTracking/ColorObjectTracking/PositionColor.fsh +++ b/examples/ColorObjectTracking/ColorObjectTracking/PositionColor.fsh @@ -29,7 +29,7 @@ vec4 maskPixel(vec3 pixelColor, vec3 maskColor) vec4 coordinateMask(vec4 maskColor, vec2 coordinate) { // Return this vector weighted by the mask value - return maskColor * vec4(coordinate, vec2(1.0)); + return maskColor * vec4(coordinate, vec2(0.0, 1.0)); } void main() @@ -40,6 +40,6 @@ void main() pixelColor = texture2D(inputImageTexture, textureCoordinate); maskedColor = maskPixel(pixelColor.rgb, inputColor); coordinateColor = coordinateMask(maskedColor, textureCoordinate); - + gl_FragColor = coordinateColor; } \ No newline at end of file diff --git a/framework/GPUImage.xcodeproj/project.pbxproj b/framework/GPUImage.xcodeproj/project.pbxproj index eefee87ed..55a482c37 100644 --- a/framework/GPUImage.xcodeproj/project.pbxproj +++ b/framework/GPUImage.xcodeproj/project.pbxproj @@ -275,7 +275,7 @@ name = Sources; sourceTree = ""; }; - BCB5E78214E232D600701302 /* Display */ = { + BCB5E78214E232D600701302 /* Outputs */ = { isa = PBXGroup; children = ( BCB5E75A14E2086300701302 /* GPUImageView.h */, @@ -283,7 +283,7 @@ BC1B715514F49DAA00ACA2AB /* GPUImageRawData.h */, BC1B715614F49DAA00ACA2AB /* GPUImageRawData.m */, ); - name = Display; + name = Outputs; sourceTree = ""; }; BCF1A32914DDB1EC00852800 = { @@ -332,7 +332,7 @@ BCB5E76B14E20AD700701302 /* GPUImageOpenGLESContext.m */, BCB5E78114E232BC00701302 /* Sources */, BC245DC314DDBE6B009FE7EB /* Filters */, - BCB5E78214E232D600701302 /* Display */, + BCB5E78214E232D600701302 /* Outputs */, BCF1A33A14DDB1EC00852800 /* Supporting Files */, ); path = GPUImage; diff --git a/framework/Source/GPUImageOutput.m b/framework/Source/GPUImageOutput.m index ccefcba7c..968f26f9f 100644 --- a/framework/Source/GPUImageOutput.m +++ b/framework/Source/GPUImageOutput.m @@ -50,8 +50,8 @@ - (void)removeAllTargets; { [targetToRemove setInputSize:CGSizeZero]; [targetToRemove setInputTexture:0]; - [targets removeObject:targetToRemove]; } + [targets removeAllObjects]; } #pragma mark - diff --git a/framework/Source/GPUImageRawData.h b/framework/Source/GPUImageRawData.h index 4f696106d..920970b9b 100644 --- a/framework/Source/GPUImageRawData.h +++ b/framework/Source/GPUImageRawData.h @@ -5,7 +5,7 @@ struct GPUByteColorVector { GLubyte red; GLubyte green; GLubyte blue; - GLubyte alphe; + GLubyte alpha; }; typedef struct GPUByteColorVector GPUByteColorVector; @@ -15,7 +15,6 @@ typedef struct GPUByteColorVector GPUByteColorVector; @property(readwrite, unsafe_unretained, nonatomic) id delegate; @property(readonly) GLubyte *rawBytesForImage; -@property(readonly) GLint openGLTexture; // Initialization and teardown - (id)initWithImageSize:(CGSize)newImageSize; diff --git a/framework/Source/GPUImageRawData.m b/framework/Source/GPUImageRawData.m index c576316d9..ab3a0e8c3 100644 --- a/framework/Source/GPUImageRawData.m +++ b/framework/Source/GPUImageRawData.m @@ -1,10 +1,44 @@ #import "GPUImageRawData.h" +#import "GPUImageOpenGLESContext.h" +#import "GLProgram.h" +#import "GPUImageFilter.h" + +NSString *const kGPUImageDataFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; + + uniform sampler2D inputImageTexture; + + void main() + { + gl_FragColor = texture2D(inputImageTexture, textureCoordinate); + } +); + + @interface GPUImageRawData () { CGSize imageSize; BOOL hasReadFromTheCurrentFrame; + + GLuint dataFramebuffer, dataRenderbuffer; + + GLuint inputTextureForDisplay; + + GLProgram *dataProgram; + GLint dataPositionAttribute, dataTextureCoordinateAttribute; + GLint dataInputTextureUniform; } + +// Frame rendering +- (void)createDataFBO; +- (void)destroyDataFBO; +- (void)setFilterFBO; +- (void)presentFramebuffer; + +- (void)renderAtInternalSize; + @end @implementation GPUImageRawData @@ -20,10 +54,35 @@ - (id)initWithImageSize:(CGSize)newImageSize; } imageSize = newImageSize; - hasReadFromTheCurrentFrame = NO; _rawBytesForImage = NULL; + [GPUImageOpenGLESContext useImageProcessingContext]; + dataProgram = [[GLProgram alloc] initWithVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageDataFragmentShaderString]; + + [dataProgram addAttribute:@"position"]; + [dataProgram addAttribute:@"inputTextureCoordinate"]; + + if (![dataProgram link]) + { + NSString *progLog = [dataProgram programLog]; + NSLog(@"Program link log: %@", progLog); + NSString *fragLog = [dataProgram fragmentShaderLog]; + NSLog(@"Fragment shader compile log: %@", fragLog); + NSString *vertLog = [dataProgram vertexShaderLog]; + NSLog(@"Vertex shader compile log: %@", vertLog); + dataProgram = nil; + NSAssert(NO, @"Filter shader link failed"); + } + + dataPositionAttribute = [dataProgram attributeIndex:@"position"]; + dataTextureCoordinateAttribute = [dataProgram attributeIndex:@"inputTextureCoordinate"]; + dataInputTextureUniform = [dataProgram uniformIndex:@"inputImageTexture"]; + + [dataProgram use]; + glEnableVertexAttribArray(dataPositionAttribute); + glEnableVertexAttribArray(dataTextureCoordinateAttribute); + return self; } @@ -36,18 +95,119 @@ - (void)dealloc } } +#pragma mark - +#pragma mark Frame rendering + +- (void)createDataFBO; +{ + glActiveTexture(GL_TEXTURE1); + glGenFramebuffers(1, &dataFramebuffer); + glBindFramebuffer(GL_FRAMEBUFFER, dataFramebuffer); + + glGenRenderbuffers(1, &dataRenderbuffer); + glBindRenderbuffer(GL_RENDERBUFFER, dataRenderbuffer); + + glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA8_OES, (int)imageSize.width, (int)imageSize.height); + glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, dataRenderbuffer); + + GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER); + + NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status); +} + +- (void)destroyDataFBO; +{ + if (dataFramebuffer) + { + glDeleteFramebuffers(1, &dataFramebuffer); + dataFramebuffer = 0; + } + + if (dataRenderbuffer) + { + glDeleteRenderbuffers(1, &dataRenderbuffer); + dataRenderbuffer = 0; + } +} + +- (void)setFilterFBO; +{ + if (!dataFramebuffer) + { + [self createDataFBO]; + } + + glBindFramebuffer(GL_FRAMEBUFFER, dataFramebuffer); + + glViewport(0, 0, (int)imageSize.width, (int)imageSize.height); +} + +- (void)presentFramebuffer; +{ + glBindRenderbuffer(GL_RENDERBUFFER, dataRenderbuffer); + [[GPUImageOpenGLESContext sharedImageProcessingOpenGLESContext] presentBufferForDisplay]; +} + #pragma mark - #pragma mark Data access +- (void)renderAtInternalSize; +{ + [GPUImageOpenGLESContext useImageProcessingContext]; + [self setFilterFBO]; + + [dataProgram use]; + + glClearColor(0.0f, 0.0f, 0.0f, 1.0f); + glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); + + static const GLfloat squareVertices[] = { + -1.0f, -1.0f, + 1.0f, -1.0f, + -1.0f, 1.0f, + 1.0f, 1.0f, + }; + + static const GLfloat textureCoordinates[] = { + 0.0f, 1.0f, + 1.0f, 1.0f, + 0.0f, 0.0f, + 1.0f, 0.0f, + }; + + glActiveTexture(GL_TEXTURE4); + glBindTexture(GL_TEXTURE_2D, inputTextureForDisplay); + glUniform1i(dataInputTextureUniform, 4); + + glVertexAttribPointer(dataPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices); + glVertexAttribPointer(dataTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates); + + glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); + + [self presentFramebuffer]; +} + - (GPUByteColorVector)colorAtLocation:(CGPoint)locationInImage; { GPUByteColorVector *imageColorBytes = (GPUByteColorVector *)self.rawBytesForImage; +// NSLog(@"Row start"); +// for (unsigned int currentXPosition = 0; currentXPosition < (imageSize.width * 2.0); currentXPosition++) +// { +// GPUByteColorVector byteAtPosition = imageColorBytes[currentXPosition]; +// NSLog(@"%d - %d, %d, %d", currentXPosition, byteAtPosition.red, byteAtPosition.green, byteAtPosition.blue); +// } +// NSLog(@"Row end"); + +// GPUByteColorVector byteAtOne = imageColorBytes[1]; +// GPUByteColorVector byteAtWidth = imageColorBytes[(int)imageSize.width - 3]; +// GPUByteColorVector byteAtHeight = imageColorBytes[(int)(imageSize.height - 1) * (int)imageSize.width]; +// NSLog(@"Byte 1: %d, %d, %d, byte 2: %d, %d, %d, byte 3: %d, %d, %d", byteAtOne.red, byteAtOne.green, byteAtOne.blue, byteAtWidth.red, byteAtWidth.green, byteAtWidth.blue, byteAtHeight.red, byteAtHeight.green, byteAtHeight.blue); CGPoint locationToPickFrom = CGPointZero; locationToPickFrom.x = MIN(MAX(locationInImage.x, 0.0), (imageSize.width - 1.0)); - locationToPickFrom.y = MIN(MAX(locationInImage.y, 0.0), (imageSize.height - 1.0)); + locationToPickFrom.y = MIN(MAX((imageSize.height - locationInImage.y), 0.0), (imageSize.height - 1.0)); - return imageColorBytes[(int)(round(locationToPickFrom.x * locationToPickFrom.y))]; + return imageColorBytes[(int)(round((locationToPickFrom.y * imageSize.width) + locationToPickFrom.x))]; } #pragma mark - @@ -62,12 +222,11 @@ - (void)newFrameReady; - (void)setInputTexture:(GLuint)newInputTexture; { - _openGLTexture = newInputTexture; + inputTextureForDisplay = newInputTexture; } - (void)setInputSize:(CGSize)newSize; { - } - (CGSize)maximumOutputSize; @@ -79,7 +238,6 @@ - (CGSize)maximumOutputSize; #pragma mark Accessors @synthesize rawBytesForImage = _rawBytesForImage; -@synthesize openGLTexture = _openGLTexture; @synthesize delegate = _delegate; - (GLubyte *)rawBytesForImage; @@ -97,7 +255,7 @@ - (GLubyte *)rawBytesForImage; else { [GPUImageOpenGLESContext useImageProcessingContext]; - // This might require a re-rendering of the previous frame in order for the reading of the pixels to work + [self renderAtInternalSize]; glReadPixels(0, 0, imageSize.width, imageSize.height, GL_RGBA, GL_UNSIGNED_BYTE, _rawBytesForImage); return _rawBytesForImage; diff --git a/framework/Source/GPUImageSwirlFilter.m b/framework/Source/GPUImageSwirlFilter.m index e47baae7b..f5d0818f9 100644 --- a/framework/Source/GPUImageSwirlFilter.m +++ b/framework/Source/GPUImageSwirlFilter.m @@ -20,7 +20,6 @@ void main() if (dist < radius) { highp float percent = (radius - dist) / radius; -// highp float theta = percent * angle; highp float theta = percent * percent * angle * 8.0; highp float s = sin(theta); highp float c = cos(theta); diff --git a/framework/Source/GPUImageView.m b/framework/Source/GPUImageView.m index 6d2e611be..7adc2c04b 100644 --- a/framework/Source/GPUImageView.m +++ b/framework/Source/GPUImageView.m @@ -4,31 +4,17 @@ #import "GPUImageOpenGLESContext.h" #import "GPUImageFilter.h" -/* Display fragment shader string - -varying highp vec2 textureCoordinate; - -uniform sampler2D inputImageTexture; - -void main() -{ - gl_FragColor = texture2D(inputImageTexture, textureCoordinate); -} - -*/ +NSString *const kGPUImageDisplayFragmentShaderString = SHADER_STRING +( + varying highp vec2 textureCoordinate; -//NSString *const kGPUImageDisplayFragmentShaderString = @"varying highp vec2 textureCoordinate;\nuniform sampler2D inputImageTexture;\nvoid main()\n{\ngl_FragColor = vec4(textureCoordinate.r,textureCoordinate.g,0.0,1.0);\n}"; -//@"varying highp vec2 textureCoordinate;\nuniform sampler2D inputImageTexture;\nvoid main()\n{\ngl_FragColor = texture2D(inputImageTexture, textureCoordinate);\n}"; -NSString *const kGPUImageDisplayFragmentShaderString = -@"varying highp vec2 textureCoordinate;\ -\ -uniform sampler2D inputImageTexture;\ -\ -void main()\ -{\ - gl_FragColor = texture2D(inputImageTexture, textureCoordinate);\ -}"; - + uniform sampler2D inputImageTexture; + + void main() + { + gl_FragColor = texture2D(inputImageTexture, textureCoordinate); + } +); #pragma mark - #pragma mark Private methods and instance variables @@ -216,13 +202,6 @@ - (void)newFrameReady; 1.0f, 0.0f, }; -// static const GLfloat textureCoordinates[] = { -// 1.0f, 1.0f, -// 1.0f, 0.0f, -// 0.0f, 1.0f, -// 0.0f, 0.0f, -// }; - glActiveTexture(GL_TEXTURE4); glBindTexture(GL_TEXTURE_2D, inputTextureForDisplay); glUniform1i(displayInputTextureUniform, 4);