Skip to content

Commit

Permalink
doing my best to DRY up capturePhotoAs* methods of GPUImageStillCamera
Browse files Browse the repository at this point in the history
  • Loading branch information
brettg committed Nov 7, 2012
1 parent d921ec2 commit 9be0190
Showing 1 changed file with 47 additions and 103 deletions.
150 changes: 47 additions & 103 deletions framework/Source/GPUImageStillCamera.m
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,9 @@ @interface GPUImageStillCamera ()
AVCaptureStillImageOutput *photoOutput;
}

// Methods calling this are responsible for calling dispatch_semaphore_signal(frameRenderingSemaphore) somewhere inside the block
- (void)capturePhotoProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withImageOnGPUHandler:(void (^)(NSError *error))block;

@end

@implementation GPUImageStillCamera
Expand Down Expand Up @@ -107,136 +110,85 @@ - (void)capturePhotoAsSampleBufferWithCompletionHandler:(void (^)(CMSampleBuffer

- (void)capturePhotoAsImageProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withCompletionHandler:(void (^)(UIImage *processedImage, NSError *error))block;
{
dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_FOREVER);

[photoOutput captureStillImageAsynchronouslyFromConnection:[[photoOutput connections] objectAtIndex:0] completionHandler:^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
[self capturePhotoProcessedUpToFilter:finalFilterInChain withImageOnGPUHandler:^(NSError *error) {
UIImage *filteredPhoto = nil;

if(imageSampleBuffer == NULL){
dispatch_semaphore_signal(frameRenderingSemaphore);
block(nil, error);
return;
if(!error){
filteredPhoto = [finalFilterInChain imageFromCurrentlyProcessedOutput];
}

[self conserveMemoryForNextFrame];

// For now, resize photos to fix within the max texture size of the GPU
CVImageBufferRef cameraFrame = CMSampleBufferGetImageBuffer(imageSampleBuffer);

CGSize sizeOfPhoto = CGSizeMake(CVPixelBufferGetWidth(cameraFrame), CVPixelBufferGetHeight(cameraFrame));
CGSize scaledImageSizeToFitOnGPU = [GPUImageOpenGLESContext sizeThatFitsWithinATextureForSize:sizeOfPhoto];
if (!CGSizeEqualToSize(sizeOfPhoto, scaledImageSizeToFitOnGPU))
{
CMSampleBufferRef sampleBuffer;
GPUImageCreateResizedSampleBuffer(cameraFrame, scaledImageSizeToFitOnGPU, &sampleBuffer);

dispatch_semaphore_signal(frameRenderingSemaphore);
[self captureOutput:photoOutput didOutputSampleBuffer:sampleBuffer fromConnection:[[photoOutput connections] objectAtIndex:0]];
dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_FOREVER);
CFRelease(sampleBuffer);
}
else
{
// This is a workaround for the corrupt images that are sometimes returned when taking a photo with the front camera and using the iOS 5.0 texture caches
AVCaptureDevicePosition currentCameraPosition = [[videoInput device] position];
if ( (currentCameraPosition != AVCaptureDevicePositionFront) || (![GPUImageOpenGLESContext supportsFastTextureUpload]))
{
dispatch_semaphore_signal(frameRenderingSemaphore);
[self captureOutput:photoOutput didOutputSampleBuffer:imageSampleBuffer fromConnection:[[photoOutput connections] objectAtIndex:0]];
dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_FOREVER);
}
}

UIImage *filteredPhoto = [finalFilterInChain imageFromCurrentlyProcessedOutput];
dispatch_semaphore_signal(frameRenderingSemaphore);
block(filteredPhoto, error);

block(filteredPhoto, error);
}];

return;
}

- (void)capturePhotoAsJPEGProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withCompletionHandler:(void (^)(NSData *processedJPEG, NSError *error))block;
{
// reportAvailableMemoryForGPUImage(@"Before still image capture");
dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_FOREVER);
// reportAvailableMemoryForGPUImage(@"Before Capture");

[photoOutput captureStillImageAsynchronouslyFromConnection:[[photoOutput connections] objectAtIndex:0] completionHandler:^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
// reportAvailableMemoryForGPUImage(@"Before filter processing");
[self capturePhotoProcessedUpToFilter:finalFilterInChain withImageOnGPUHandler:^(NSError *error) {
NSData *dataForJPEGFile = nil;

if(imageSampleBuffer == NULL){
if(!error){
@autoreleasepool {
UIImage *filteredPhoto = [finalFilterInChain imageFromCurrentlyProcessedOutput];
dispatch_semaphore_signal(frameRenderingSemaphore);
// reportAvailableMemoryForGPUImage(@"After UIImage generation");

dataForJPEGFile = UIImageJPEGRepresentation(filteredPhoto, 0.8);
// reportAvailableMemoryForGPUImage(@"After JPEG generation");
}

// reportAvailableMemoryForGPUImage(@"After autorelease pool");
}else{
dispatch_semaphore_signal(frameRenderingSemaphore);
block(nil, error);
return;
}

[self conserveMemoryForNextFrame];
dispatch_semaphore_signal(frameRenderingSemaphore);
block(dataForJPEGFile, error);
}];
}

// For now, resize photos to fix within the max texture size of the GPU
CVImageBufferRef cameraFrame = CMSampleBufferGetImageBuffer(imageSampleBuffer);
- (void)capturePhotoAsPNGProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withCompletionHandler:(void (^)(NSData *processedPNG, NSError *error))block;
{

CGSize sizeOfPhoto = CGSizeMake(CVPixelBufferGetWidth(cameraFrame), CVPixelBufferGetHeight(cameraFrame));
CGSize scaledImageSizeToFitOnGPU = [GPUImageOpenGLESContext sizeThatFitsWithinATextureForSize:sizeOfPhoto];
if (!CGSizeEqualToSize(sizeOfPhoto, scaledImageSizeToFitOnGPU))
{
CMSampleBufferRef sampleBuffer;
[self capturePhotoProcessedUpToFilter:finalFilterInChain withImageOnGPUHandler:^(NSError *error) {
NSData *dataForPNGFile = nil;

GPUImageCreateResizedSampleBuffer(cameraFrame, scaledImageSizeToFitOnGPU, &sampleBuffer);

dispatch_semaphore_signal(frameRenderingSemaphore);
[self captureOutput:photoOutput didOutputSampleBuffer:sampleBuffer fromConnection:[[photoOutput connections] objectAtIndex:0]];
dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_FOREVER);
CFRelease(sampleBuffer);
}
else
{
// This is a workaround for the corrupt images that are sometimes returned when taking a photo with the front camera and using the iOS 5.0 texture caches
AVCaptureDevicePosition currentCameraPosition = [[videoInput device] position];
if ( (currentCameraPosition != AVCaptureDevicePositionFront) || (![GPUImageOpenGLESContext supportsFastTextureUpload]))
{
if(!error){
@autoreleasepool {
UIImage *filteredPhoto = [finalFilterInChain imageFromCurrentlyProcessedOutput];
dispatch_semaphore_signal(frameRenderingSemaphore);
[self captureOutput:photoOutput didOutputSampleBuffer:imageSampleBuffer fromConnection:[[photoOutput connections] objectAtIndex:0]];
dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_FOREVER);
}
}

// reportAvailableMemoryForGPUImage(@"After filter processing");

__strong NSData *dataForJPEGFile = nil;
@autoreleasepool {
UIImage *filteredPhoto = [finalFilterInChain imageFromCurrentlyProcessedOutput];
dataForPNGFile = UIImagePNGRepresentation(filteredPhoto);
}
}else{
dispatch_semaphore_signal(frameRenderingSemaphore);

// reportAvailableMemoryForGPUImage(@"After UIImage generation");

dataForJPEGFile = UIImageJPEGRepresentation(filteredPhoto, 0.8);
// reportAvailableMemoryForGPUImage(@"After JPEG generation");
}

// reportAvailableMemoryForGPUImage(@"After autorelease pool");

block(dataForJPEGFile, error);

block(dataForPNGFile, error);
}];

return;
}

- (void)capturePhotoAsPNGProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withCompletionHandler:(void (^)(NSData *processedPNG, NSError *error))block;
#pragma mark - Private Methods

- (void)capturePhotoProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withImageOnGPUHandler:(void (^)(NSError *error))block
{
dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_FOREVER);

[photoOutput captureStillImageAsynchronouslyFromConnection:[[photoOutput connections] objectAtIndex:0] completionHandler:^(CMSampleBufferRef imageSampleBuffer, NSError *error) {

if(imageSampleBuffer == NULL){
dispatch_semaphore_signal(frameRenderingSemaphore);
block(nil, error);
block(error);
return;
}

[self conserveMemoryForNextFrame];

// For now, resize photos to fix within the max texture size of the GPU
CVImageBufferRef cameraFrame = CMSampleBufferGetImageBuffer(imageSampleBuffer);

CGSize sizeOfPhoto = CGSizeMake(CVPixelBufferGetWidth(cameraFrame), CVPixelBufferGetHeight(cameraFrame));
CGSize scaledImageSizeToFitOnGPU = [GPUImageOpenGLESContext sizeThatFitsWithinATextureForSize:sizeOfPhoto];
if (!CGSizeEqualToSize(sizeOfPhoto, scaledImageSizeToFitOnGPU))
Expand All @@ -261,18 +213,10 @@ - (void)capturePhotoAsPNGProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)fi
}
}

NSData *dataForPNGFile = nil;
@autoreleasepool {
UIImage *filteredPhoto = [finalFilterInChain imageFromCurrentlyProcessedOutput];
dispatch_semaphore_signal(frameRenderingSemaphore);
dataForPNGFile = UIImagePNGRepresentation(filteredPhoto);
}

block(dataForPNGFile, error);
block(nil);
}];

return;
}



@end

0 comments on commit 9be0190

Please sign in to comment.