Skip to content

Commit

Permalink
Refined cache purging, protected against threading issues, and made t…
Browse files Browse the repository at this point in the history
…he camera rotate itself internally.
  • Loading branch information
BradLarson committed Mar 18, 2014
1 parent ec7981e commit 137d59b
Show file tree
Hide file tree
Showing 22 changed files with 389 additions and 183 deletions.
5 changes: 4 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -588,9 +588,12 @@ There are a couple of ways to process a still image and create a result. The fir
GPUImageSepiaFilter *stillImageFilter = [[GPUImageSepiaFilter alloc] init];

[stillImageSource addTarget:stillImageFilter];
[stillImageFilter useNextFrameForImageCapture]
[stillImageSource processImage];

UIImage *currentFilteredVideoFrame = [stillImageFilter imageFromCurrentlyProcessedOutput];
UIImage *currentFilteredVideoFrame = [stillImageFilter imageFromCurrentFramebuffer];

Note that for a manual capture of an image from a filter, you need to set -useNextFrameForImageCapture in order to tell the filter that you'll be needing to capture from it later. By default, GPUImage reuses framebuffers within filters to conserve memory, so if you need to hold on to a filter's framebuffer for manual image capture, you need to let it know ahead of time.

For single filters that you wish to apply to an image, you can simply do the following:

Expand Down
1 change: 1 addition & 0 deletions examples/iOS/CubeExample/Classes/ES2Renderer.m
Original file line number Diff line number Diff line change
Expand Up @@ -484,6 +484,7 @@ - (void)newFrameReadyFromTextureOutput:(GPUImageTextureOutput *)callbackTextureO
textureForCubeFace = callbackTextureOutput.texture;

[self renderByRotatingAroundX:0.0 rotatingAroundY:0.0];
[callbackTextureOutput doneWithTexture];
});
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1422,7 +1422,6 @@ - (void)setupFilter;
}
else if (filterType == GPUIMAGE_BUFFER)
{

GPUImageDifferenceBlendFilter *blendFilter = [[GPUImageDifferenceBlendFilter alloc] init];

[videoCamera removeTarget:filter];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
BCC1E6CD1523E3C50006EFA5 /* QuartzCore.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = BCC1E6C91523E3C50006EFA5 /* QuartzCore.framework */; };
BCC1E6D01523E4780006EFA5 /* PhotoViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = BCC1E6CF1523E4780006EFA5 /* PhotoViewController.m */; };
BCC1E6DA1523E9DC0006EFA5 /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = BCC1E6D91523E9DB0006EFA5 /* AVFoundation.framework */; };
BCC9004B18D7D4F100A5A5C7 /* Lambeau.jpg in Resources */ = {isa = PBXBuildFile; fileRef = BCC9004A18D7D4F100A5A5C7 /* Lambeau.jpg */; };
BCF867371725A72000912E34 /* libGPUImage.a in Frameworks */ = {isa = PBXBuildFile; fileRef = BCF867321725A70500912E34 /* libGPUImage.a */; };
E5066F561855AA1A008C7682 /* [email protected] in Resources */ = {isa = PBXBuildFile; fileRef = E5066F551855AA1A008C7682 /* [email protected] */; };
/* End PBXBuildFile section */
Expand Down Expand Up @@ -78,6 +79,7 @@
BCC1E6CE1523E4780006EFA5 /* PhotoViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = PhotoViewController.h; sourceTree = "<group>"; };
BCC1E6CF1523E4780006EFA5 /* PhotoViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = PhotoViewController.m; sourceTree = "<group>"; };
BCC1E6D91523E9DB0006EFA5 /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; };
BCC9004A18D7D4F100A5A5C7 /* Lambeau.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; name = Lambeau.jpg; path = ../../SimpleImageFilter/SimpleImageFilter/Lambeau.jpg; sourceTree = "<group>"; };
E5066F551855AA1A008C7682 /* [email protected] */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "[email protected]"; sourceTree = "<group>"; };
/* End PBXFileReference section */

Expand Down Expand Up @@ -152,6 +154,7 @@
BCC1E6A71523E3620006EFA5 /* Supporting Files */ = {
isa = PBXGroup;
children = (
BCC9004A18D7D4F100A5A5C7 /* Lambeau.jpg */,
BCB3C42816BCA81D003D26B0 /* Icon-72.png */,
BCB3C42916BCA81D003D26B0 /* [email protected] */,
BCB3C42A16BCA81D003D26B0 /* Icon.png */,
Expand Down Expand Up @@ -251,6 +254,7 @@
buildActionMask = 2147483647;
files = (
BCC1E6AB1523E3620006EFA5 /* InfoPlist.strings in Resources */,
BCC9004B18D7D4F100A5A5C7 /* Lambeau.jpg in Resources */,
BCB3C42D16BCA81D003D26B0 /* Icon-72.png in Resources */,
BCB3C42E16BCA81D003D26B0 /* [email protected] in Resources */,
BCB3C42F16BCA81D003D26B0 /* Icon.png in Resources */,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
GPUImageOutput<GPUImageInput> *filter, *secondFilter, *terminalFilter;
UISlider *filterSettingsSlider;
UIButton *photoCaptureButton;

GPUImagePicture *memoryPressurePicture1, *memoryPressurePicture2;
}

- (IBAction)updateSliderValue:(id)sender;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,9 @@ - (void)viewDidLoad
// [terminalFilter prepareForImageCapture];

[stillCamera addTarget:filter];

GPUImageView *filterView = (GPUImageView *)self.view;
// [filter addTarget:filterView];
[filter addTarget:filterView];
// [terminalFilter addTarget:filterView];

Expand All @@ -78,6 +80,11 @@ - (void)viewDidLoad
// [stillCamera.inputCamera unlockForConfiguration];

[stillCamera startCameraCapture];

// UIImage *inputImage = [UIImage imageNamed:@"Lambeau.jpg"];
// memoryPressurePicture1 = [[GPUImagePicture alloc] initWithImage:inputImage];
//
// memoryPressurePicture2 = [[GPUImagePicture alloc] initWithImage:inputImage];
}

- (void)viewDidUnload
Expand All @@ -101,16 +108,13 @@ - (IBAction)takePhoto:(id)sender;
{
[photoCaptureButton setEnabled:NO];

// [stillCamera capturePhotoAsJPEGProcessedUpToFilter:terminalFilter withCompletionHandler:^(NSData *processedJPEG, NSError *error){
[stillCamera capturePhotoAsJPEGProcessedUpToFilter:filter withCompletionHandler:^(NSData *processedJPEG, NSError *error){

// Save to assets library
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
// report_memory(@"After asset library creation");

[library writeImageDataToSavedPhotosAlbum:processedJPEG metadata:stillCamera.currentCaptureMetadata completionBlock:^(NSURL *assetURL, NSError *error2)
{
// report_memory(@"After writing to library");
if (error2) {
NSLog(@"ERROR: the image failed to be written");
}
Expand All @@ -119,7 +123,6 @@ - (IBAction)takePhoto:(id)sender;
}

runOnMainQueueWithoutDeadlocking(^{
// report_memory(@"Operation completed");
[photoCaptureButton setEnabled:YES];
});
}];
Expand Down
68 changes: 36 additions & 32 deletions framework/Source/GPUImageAverageColor.m
Original file line number Diff line number Diff line change
Expand Up @@ -169,38 +169,42 @@ - (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteg

- (void)extractAverageColorAtFrameTime:(CMTime)frameTime;
{
// we need a normal color texture for averaging the color values
NSAssert(self.outputTextureOptions.internalFormat == GL_RGBA, @"The output texture internal format for this filter must be GL_RGBA.");
NSAssert(self.outputTextureOptions.type == GL_UNSIGNED_BYTE, @"The type of the output texture of this filter must be GL_UNSIGNED_BYTE.");

NSUInteger totalNumberOfPixels = round(finalStageSize.width * finalStageSize.height);

if (rawImagePixels == NULL)
{
rawImagePixels = (GLubyte *)malloc(totalNumberOfPixels * 4);
}

glReadPixels(0, 0, (int)finalStageSize.width, (int)finalStageSize.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels);

NSUInteger redTotal = 0, greenTotal = 0, blueTotal = 0, alphaTotal = 0;
NSUInteger byteIndex = 0;
for (NSUInteger currentPixel = 0; currentPixel < totalNumberOfPixels; currentPixel++)
{
redTotal += rawImagePixels[byteIndex++];
greenTotal += rawImagePixels[byteIndex++];
blueTotal += rawImagePixels[byteIndex++];
alphaTotal += rawImagePixels[byteIndex++];
}

CGFloat normalizedRedTotal = (CGFloat)redTotal / (CGFloat)totalNumberOfPixels / 255.0;
CGFloat normalizedGreenTotal = (CGFloat)greenTotal / (CGFloat)totalNumberOfPixels / 255.0;
CGFloat normalizedBlueTotal = (CGFloat)blueTotal / (CGFloat)totalNumberOfPixels / 255.0;
CGFloat normalizedAlphaTotal = (CGFloat)alphaTotal / (CGFloat)totalNumberOfPixels / 255.0;

if (_colorAverageProcessingFinishedBlock != NULL)
{
_colorAverageProcessingFinishedBlock(normalizedRedTotal, normalizedGreenTotal, normalizedBlueTotal, normalizedAlphaTotal, frameTime);
}
runSynchronouslyOnVideoProcessingQueue(^{
// we need a normal color texture for averaging the color values
NSAssert(self.outputTextureOptions.internalFormat == GL_RGBA, @"The output texture internal format for this filter must be GL_RGBA.");
NSAssert(self.outputTextureOptions.type == GL_UNSIGNED_BYTE, @"The type of the output texture of this filter must be GL_UNSIGNED_BYTE.");

NSUInteger totalNumberOfPixels = round(finalStageSize.width * finalStageSize.height);

if (rawImagePixels == NULL)
{
rawImagePixels = (GLubyte *)malloc(totalNumberOfPixels * 4);
}

[GPUImageContext useImageProcessingContext];
[outputFramebuffer activateFramebuffer];
glReadPixels(0, 0, (int)finalStageSize.width, (int)finalStageSize.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels);

NSUInteger redTotal = 0, greenTotal = 0, blueTotal = 0, alphaTotal = 0;
NSUInteger byteIndex = 0;
for (NSUInteger currentPixel = 0; currentPixel < totalNumberOfPixels; currentPixel++)
{
redTotal += rawImagePixels[byteIndex++];
greenTotal += rawImagePixels[byteIndex++];
blueTotal += rawImagePixels[byteIndex++];
alphaTotal += rawImagePixels[byteIndex++];
}

CGFloat normalizedRedTotal = (CGFloat)redTotal / (CGFloat)totalNumberOfPixels / 255.0;
CGFloat normalizedGreenTotal = (CGFloat)greenTotal / (CGFloat)totalNumberOfPixels / 255.0;
CGFloat normalizedBlueTotal = (CGFloat)blueTotal / (CGFloat)totalNumberOfPixels / 255.0;
CGFloat normalizedAlphaTotal = (CGFloat)alphaTotal / (CGFloat)totalNumberOfPixels / 255.0;

if (_colorAverageProcessingFinishedBlock != NULL)
{
_colorAverageProcessingFinishedBlock(normalizedRedTotal, normalizedGreenTotal, normalizedBlueTotal, normalizedAlphaTotal, frameTime);
}
});
}

@end
9 changes: 9 additions & 0 deletions framework/Source/GPUImageFilter.m
Original file line number Diff line number Diff line change
Expand Up @@ -362,6 +362,15 @@ - (void)informTargetsAboutNewFrameAtTime:(CMTime)frameTime;
// Release our hold so it can return to the cache immediately upon processing
[[self framebufferForOutput] unlock];

if (usingNextFrameForImageCapture)
{
// usingNextFrameForImageCapture = NO;
}
else
{
[self removeOutputFramebuffer];
}

// Trigger processing last, so that our unlock comes first in serial execution, avoiding the need for a callback
for (id<GPUImageInput> currentTarget in targets)
{
Expand Down
1 change: 1 addition & 0 deletions framework/Source/GPUImageFramebuffer.h
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ typedef struct GPUTextureOptions {
// Initialization and teardown
- (id)initWithSize:(CGSize)framebufferSize;
- (id)initWithSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)fboTextureOptions onlyTexture:(BOOL)onlyGenerateTexture;
- (id)initWithSize:(CGSize)framebufferSize overriddenTexture:(GLuint)inputTexture;

// Usage
- (void)activateFramebuffer;
Expand Down
80 changes: 54 additions & 26 deletions framework/Source/GPUImageFramebuffer.m
Original file line number Diff line number Diff line change
Expand Up @@ -45,13 +45,12 @@ - (id)initWithSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)fbo
referenceCountingDisabled = NO;
_missingFramebuffer = onlyGenerateTexture;

NSLog(@"Creating framebuffer: %@ at size %f, %f", self, _size.width, _size.height);

if (_missingFramebuffer)
{
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
[self generateTexture];
framebuffer = 0;
});
}
else
Expand All @@ -61,6 +60,32 @@ - (id)initWithSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)fbo
return self;
}

- (id)initWithSize:(CGSize)framebufferSize overriddenTexture:(GLuint)inputTexture;
{
if (!(self = [super init]))
{
return nil;
}

GPUTextureOptions defaultTextureOptions;
defaultTextureOptions.minFilter = GL_LINEAR;
defaultTextureOptions.magFilter = GL_LINEAR;
defaultTextureOptions.wrapS = GL_CLAMP_TO_EDGE;
defaultTextureOptions.wrapT = GL_CLAMP_TO_EDGE;
defaultTextureOptions.internalFormat = GL_RGBA;
defaultTextureOptions.format = GL_BGRA;
defaultTextureOptions.type = GL_UNSIGNED_BYTE;

_textureOptions = defaultTextureOptions;
_size = framebufferSize;
framebufferReferenceCount = 0;
referenceCountingDisabled = YES;

_texture = inputTexture;

return self;
}

- (id)initWithSize:(CGSize)framebufferSize;
{
GPUTextureOptions defaultTextureOptions;
Expand All @@ -82,8 +107,6 @@ - (id)initWithSize:(CGSize)framebufferSize;

- (void)dealloc
{
NSLog(@"Destroying framebuffer: %@ at size %f, %f", self, _size.width, _size.height);

[self destroyFramebuffer];
}

Expand All @@ -108,9 +131,7 @@ - (void)generateFramebuffer;
{
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];

[self generateTexture];


glGenFramebuffers(1, &framebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);

Expand Down Expand Up @@ -162,6 +183,8 @@ - (void)generateFramebuffer;
}
else
{
[self generateTexture];

glBindTexture(GL_TEXTURE_2D, _texture);

glTexImage2D(GL_TEXTURE_2D, 0, _textureOptions.internalFormat, (int)_size.width, (int)_size.height, 0, _textureOptions.format, _textureOptions.type, 0);
Expand All @@ -180,23 +203,34 @@ - (void)destroyFramebuffer;
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];

glDeleteFramebuffers(1, &framebuffer);
framebuffer = 0;

#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
if (renderTarget)
if (framebuffer)
{
CFRelease(renderTarget);
renderTarget = NULL;
glDeleteFramebuffers(1, &framebuffer);
framebuffer = 0;
}


if (renderTexture)
if ([GPUImageContext supportsFastTextureUpload] && (!_missingFramebuffer))
{
CFRelease(renderTexture);
renderTexture = NULL;
}

#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
if (renderTarget)
{
CFRelease(renderTarget);
renderTarget = NULL;
}

if (renderTexture)
{
CFRelease(renderTexture);
renderTexture = NULL;
}
#endif
}
else
{
glDeleteTextures(1, &_texture);
}

});
}

Expand Down Expand Up @@ -229,7 +263,7 @@ - (void)unlock;
return;
}

NSAssert(framebufferReferenceCount > 0, @"Tried to overrelease a framebuffer");
NSAssert(framebufferReferenceCount > 0, @"Tried to overrelease a framebuffer, did you forget to call -useNextFrameForImageCapture before using -imageFromCurrentFramebuffer?");
framebufferReferenceCount--;
if (framebufferReferenceCount < 1)
{
Expand Down Expand Up @@ -258,7 +292,6 @@ - (void)enableReferenceCounting;
void dataProviderReleaseCallback (void *info, const void *data, size_t size)
{
free((void *)data);
NSLog(@"Free callback");
}

void dataProviderUnlockCallback (void *info, const void *data, size_t size)
Expand All @@ -268,7 +301,6 @@ void dataProviderUnlockCallback (void *info, const void *data, size_t size)
[framebuffer restoreRenderTarget];
[framebuffer unlock];
[[GPUImageContext sharedFramebufferCache] removeFramebufferFromActiveImageCaptureList:framebuffer];
NSLog(@"Unlock callback");
}

- (CGImageRef)newCGImageFromFramebufferContents;
Expand All @@ -291,8 +323,6 @@ - (CGImageRef)newCGImageFromFramebufferContents;
if ([GPUImageContext supportsFastTextureUpload])
{
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE

NSLog(@"Fast texture path");
NSUInteger paddedWidthOfImage = CVPixelBufferGetBytesPerRow(renderTarget) / 4.0;
NSUInteger paddedBytesForImage = paddedWidthOfImage * (int)_size.height * 4;

Expand All @@ -308,7 +338,6 @@ - (CGImageRef)newCGImageFromFramebufferContents;
}
else
{
NSLog(@"Normal path");
[self activateFramebuffer];
rawImagePixels = (GLubyte *)malloc(totalBytesForImage);
glReadPixels(0, 0, (int)_size.width, (int)_size.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels);
Expand Down Expand Up @@ -337,7 +366,6 @@ - (CGImageRef)newCGImageFromFramebufferContents;
});

return cgImageFromBytes;

}

- (void)restoreRenderTarget;
Expand Down
Loading

0 comments on commit 137d59b

Please sign in to comment.