From 0e711a80e1eb5057d83e01aed50c8be4be5b63fe Mon Sep 17 00:00:00 2001
From: Brad Larson <larson@sunsetlakesoftware.com>
Date: Mon, 31 Mar 2014 22:24:04 -0500
Subject: [PATCH] Fixed photo capture on iPhone 4. Fixed data acquisition from
 raw data outputs.

---
 .../SimpleVideoFilterViewController.m         |  7 ++---
 framework/Source/GPUImageFramebuffer.m        |  7 +++--
 framework/Source/GPUImageRawDataOutput.m      |  1 +
 framework/Source/GPUImageStillCamera.m        | 13 ++--------
 framework/Source/GPUImageVideoCamera.m        |  3 ++-
 framework/Source/iOS/GPUImageMovieWriter.h    |  1 -
 framework/Source/iOS/GPUImageMovieWriter.m    | 26 +++++--------------
 7 files changed, 21 insertions(+), 37 deletions(-)

diff --git a/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/SimpleVideoFilterViewController.m b/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/SimpleVideoFilterViewController.m
index f72e71864..3ce796d82 100755
--- a/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/SimpleVideoFilterViewController.m
+++ b/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/SimpleVideoFilterViewController.m
@@ -44,7 +44,6 @@ - (void)viewDidLoad
     
     [videoCamera addTarget:filter];
     GPUImageView *filterView = (GPUImageView *)self.view;
-    [filter addTarget:filterView];
 //    filterView.fillMode = kGPUImageFillModeStretch;
 //    filterView.fillMode = kGPUImageFillModePreserveAspectRatioAndFill;
     
@@ -54,10 +53,12 @@ - (void)viewDidLoad
     unlink([pathToMovie UTF8String]); // If a file already exists, AVAssetWriter won't let you record new frames, so delete the old movie
     NSURL *movieURL = [NSURL fileURLWithPath:pathToMovie];
     movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL size:CGSizeMake(480.0, 640.0)];
+    movieWriter.encodingLiveVideo = YES;
 //    movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL size:CGSizeMake(640.0, 480.0)];
 //    movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL size:CGSizeMake(720.0, 1280.0)];
 //    movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL size:CGSizeMake(1080.0, 1920.0)];
     [filter addTarget:movieWriter];
+    [filter addTarget:filterView];
     
     [videoCamera startCameraCapture];
     
@@ -66,7 +67,7 @@ - (void)viewDidLoad
     dispatch_after(startTime, dispatch_get_main_queue(), ^(void){
         NSLog(@"Start recording");
         
-        videoCamera.audioEncodingTarget = movieWriter;
+//        videoCamera.audioEncodingTarget = movieWriter;
         [movieWriter startRecording];
 
 //        NSError *error = nil;
@@ -77,7 +78,7 @@ - (void)viewDidLoad
 //        [videoCamera.inputCamera setTorchMode:AVCaptureTorchModeOn];
 //        [videoCamera.inputCamera unlockForConfiguration];
 
-        double delayInSeconds = 30.0;
+        double delayInSeconds = 10.0;
         dispatch_time_t stopTime = dispatch_time(DISPATCH_TIME_NOW, delayInSeconds * NSEC_PER_SEC);
         dispatch_after(stopTime, dispatch_get_main_queue(), ^(void){
             
diff --git a/framework/Source/GPUImageFramebuffer.m b/framework/Source/GPUImageFramebuffer.m
index 36d89c29d..3bec9fb95 100644
--- a/framework/Source/GPUImageFramebuffer.m
+++ b/framework/Source/GPUImageFramebuffer.m
@@ -371,8 +371,8 @@ - (CGImageRef)newCGImageFromFramebufferContents;
 - (void)restoreRenderTarget;
 {
 #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-    CFRelease(renderTarget);
     CVPixelBufferUnlockBaseAddress(renderTarget, 0);
+    CFRelease(renderTarget);
 #else
 #endif
 }
@@ -399,7 +399,10 @@ - (NSUInteger)bytesPerRow;
 - (GLubyte *)byteBuffer;
 {
 #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-    return (GLubyte *)CVPixelBufferGetBaseAddress(renderTarget);
+    CVPixelBufferLockBaseAddress(renderTarget, 0);
+    GLubyte * bufferBytes = CVPixelBufferGetBaseAddress(renderTarget);
+    CVPixelBufferUnlockBaseAddress(renderTarget, 0);
+    return bufferBytes;
 #else
     return NULL; // TODO: do more with this on the non-texture-cache side
 #endif
diff --git a/framework/Source/GPUImageRawDataOutput.m b/framework/Source/GPUImageRawDataOutput.m
index f2cdb785b..64a324865 100755
--- a/framework/Source/GPUImageRawDataOutput.m
+++ b/framework/Source/GPUImageRawDataOutput.m
@@ -260,6 +260,7 @@ - (GLubyte *)rawBytesForImage;
             {
                 glFinish();
                 _rawBytesForImage = [outputFramebuffer byteBuffer];
+                NSLog(@"Output framebuffer: %@", outputFramebuffer);
             }
             else
             {
diff --git a/framework/Source/GPUImageStillCamera.m b/framework/Source/GPUImageStillCamera.m
index 1d0cbc2bb..998aecc9f 100755
--- a/framework/Source/GPUImageStillCamera.m
+++ b/framework/Source/GPUImageStillCamera.m
@@ -37,6 +37,7 @@ void GPUImageCreateResizedSampleBuffer(CVPixelBufferRef cameraFrame, CGSize fina
     CMSampleTimingInfo timing = {frameTime, frameTime, kCMTimeInvalid};
     
     CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixel_buffer, YES, NULL, NULL, videoInfo, &timing, sampleBuffer);
+    CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
     CFRelease(videoInfo);
     CVPixelBufferRelease(pixel_buffer);
 }
@@ -77,7 +78,7 @@ - (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureD
    
     // Having a still photo input set to BGRA and video to YUV doesn't work well, so since I don't have YUV resizing for iPhone 4 yet, kick back to BGRA for that device
 //    if (captureAsYUV && [GPUImageContext supportsFastTextureUpload])
-    if (captureAsYUV && [GPUImageContext supportsFastTextureUpload])
+    if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])
     {
         BOOL supportsFullYUVRange = NO;
         NSArray *supportedPixelFormats = photoOutput.availableImageDataCVPixelFormatTypes;
@@ -105,16 +106,6 @@ - (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureD
         [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
     }
     
-//    if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])
-//    {
-//        // TODO: Check for full range output and use that if available
-//        [photoOutput setOutputSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
-//    }
-//    else
-//    {
-//        [photoOutput setOutputSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
-//    }
-
     [self.captureSession addOutput:photoOutput];
     
     [self.captureSession commitConfiguration];
diff --git a/framework/Source/GPUImageVideoCamera.m b/framework/Source/GPUImageVideoCamera.m
index 9481b8b4d..3eeff7ae7 100644
--- a/framework/Source/GPUImageVideoCamera.m
+++ b/framework/Source/GPUImageVideoCamera.m
@@ -719,7 +719,7 @@ - (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;
 
     [GPUImageContext useImageProcessingContext];
 
-    if ([GPUImageContext supportsFastTextureUpload])
+    if ([GPUImageContext supportsFastTextureUpload] && captureAsYUV)
     {
         CVOpenGLESTextureRef luminanceTextureRef = NULL;
         CVOpenGLESTextureRef chrominanceTextureRef = NULL;
@@ -727,6 +727,7 @@ - (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;
 //        if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])
         if (CVPixelBufferGetPlaneCount(cameraFrame) > 0) // Check for YUV planar inputs to do RGB conversion
         {
+            CVPixelBufferLockBaseAddress(cameraFrame, 0);
             
             if ( (imageBufferWidth != bufferWidth) && (imageBufferHeight != bufferHeight) )
             {
diff --git a/framework/Source/iOS/GPUImageMovieWriter.h b/framework/Source/iOS/GPUImageMovieWriter.h
index 5f12204f0..dee0bfb76 100755
--- a/framework/Source/iOS/GPUImageMovieWriter.h
+++ b/framework/Source/iOS/GPUImageMovieWriter.h
@@ -24,7 +24,6 @@ extern NSString *const kGPUImageColorSwizzlingFragmentShaderString;
     AVAssetWriterInputPixelBufferAdaptor *assetWriterPixelBufferInput;
 	dispatch_queue_t movieWritingQueue;
     
-    CVOpenGLESTextureCacheRef coreVideoTextureCache;
     CVPixelBufferRef renderTarget;
     CVOpenGLESTextureRef renderTexture;
 
diff --git a/framework/Source/iOS/GPUImageMovieWriter.m b/framework/Source/iOS/GPUImageMovieWriter.m
index 8ef1664a2..5a3df0051 100755
--- a/framework/Source/iOS/GPUImageMovieWriter.m
+++ b/framework/Source/iOS/GPUImageMovieWriter.m
@@ -501,17 +501,6 @@ - (void)createDataFBO;
     
     if ([GPUImageContext supportsFastTextureUpload])
     {
-#if defined(__IPHONE_6_0)
-        CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, [[GPUImageContext sharedImageProcessingContext] context], NULL, &coreVideoTextureCache);
-#else
-        CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)[[GPUImageContext sharedImageProcessingContext] context], NULL, &coreVideoTextureCache);
-#endif
-
-        if (err) 
-        {
-            NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreate %d", err);
-        }
-
         // Code originally sourced from http://allmybrain.com/2011/12/08/rendering-to-a-texture-with-ios-5-texture-cache-api/
         
 
@@ -526,7 +515,7 @@ - (void)createDataFBO;
         CVBufferSetAttachment(renderTarget, kCVImageBufferYCbCrMatrixKey, kCVImageBufferYCbCrMatrix_ITU_R_601_4, kCVAttachmentMode_ShouldPropagate);
         CVBufferSetAttachment(renderTarget, kCVImageBufferTransferFunctionKey, kCVImageBufferTransferFunction_ITU_R_709_2, kCVAttachmentMode_ShouldPropagate);
         
-        CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault, coreVideoTextureCache, renderTarget,
+        CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], renderTarget,
                                                       NULL, // texture attributes
                                                       GL_TEXTURE_2D,
                                                       GL_RGBA, // opengl format
@@ -576,11 +565,6 @@ - (void)destroyDataFBO;
         
         if ([GPUImageContext supportsFastTextureUpload])
         {
-            if (coreVideoTextureCache)
-            {
-                CFRelease(coreVideoTextureCache);
-            }
-            
             if (renderTexture)
             {
                 CFRelease(renderTexture);
@@ -634,9 +618,9 @@ - (void)renderAtInternalSize;
 	glVertexAttribPointer(colorSwizzlingTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
     
     glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
+    [firstInputFramebuffer unlock];
     
     glFinish();
-    [firstInputFramebuffer unlock];
 }
 
 #pragma mark -
@@ -709,7 +693,7 @@ - (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
     void(^write)() = ^() {
         while( ! assetWriterVideoInput.readyForMoreMediaData && ! _encodingLiveVideo && ! videoEncodingIsFinished ) {
             NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.1];
-            //NSLog(@"video waiting...");
+//            NSLog(@"video waiting...");
             [[NSRunLoop currentRunLoop] runUntilDate:maxDate];
         }
         if (!assetWriterVideoInput.readyForMoreMediaData)
@@ -735,9 +719,13 @@ - (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
     };
 
     if( _encodingLiveVideo )
+    {
         dispatch_async(movieWritingQueue, write);
+    }
     else
+    {
         write();
+    }
 }
 
 - (NSInteger)nextAvailableTextureIndex;