Skip to content

Commit

Permalink
Fixed photo capture on iPhone 4. Fixed data acquisition from raw data…
Browse files Browse the repository at this point in the history
… outputs.
  • Loading branch information
BradLarson committed Apr 1, 2014
1 parent 58708f8 commit 0e711a8
Show file tree
Hide file tree
Showing 7 changed files with 21 additions and 37 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@ - (void)viewDidLoad

[videoCamera addTarget:filter];
GPUImageView *filterView = (GPUImageView *)self.view;
[filter addTarget:filterView];
// filterView.fillMode = kGPUImageFillModeStretch;
// filterView.fillMode = kGPUImageFillModePreserveAspectRatioAndFill;

Expand All @@ -54,10 +53,12 @@ - (void)viewDidLoad
unlink([pathToMovie UTF8String]); // If a file already exists, AVAssetWriter won't let you record new frames, so delete the old movie
NSURL *movieURL = [NSURL fileURLWithPath:pathToMovie];
movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL size:CGSizeMake(480.0, 640.0)];
movieWriter.encodingLiveVideo = YES;
// movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL size:CGSizeMake(640.0, 480.0)];
// movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL size:CGSizeMake(720.0, 1280.0)];
// movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL size:CGSizeMake(1080.0, 1920.0)];
[filter addTarget:movieWriter];
[filter addTarget:filterView];

[videoCamera startCameraCapture];

Expand All @@ -66,7 +67,7 @@ - (void)viewDidLoad
dispatch_after(startTime, dispatch_get_main_queue(), ^(void){
NSLog(@"Start recording");

videoCamera.audioEncodingTarget = movieWriter;
// videoCamera.audioEncodingTarget = movieWriter;
[movieWriter startRecording];

// NSError *error = nil;
Expand All @@ -77,7 +78,7 @@ - (void)viewDidLoad
// [videoCamera.inputCamera setTorchMode:AVCaptureTorchModeOn];
// [videoCamera.inputCamera unlockForConfiguration];

double delayInSeconds = 30.0;
double delayInSeconds = 10.0;
dispatch_time_t stopTime = dispatch_time(DISPATCH_TIME_NOW, delayInSeconds * NSEC_PER_SEC);
dispatch_after(stopTime, dispatch_get_main_queue(), ^(void){

Expand Down
7 changes: 5 additions & 2 deletions framework/Source/GPUImageFramebuffer.m
Original file line number Diff line number Diff line change
Expand Up @@ -371,8 +371,8 @@ - (CGImageRef)newCGImageFromFramebufferContents;
- (void)restoreRenderTarget;
{
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
CFRelease(renderTarget);
CVPixelBufferUnlockBaseAddress(renderTarget, 0);
CFRelease(renderTarget);
#else
#endif
}
Expand All @@ -399,7 +399,10 @@ - (NSUInteger)bytesPerRow;
- (GLubyte *)byteBuffer;
{
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
return (GLubyte *)CVPixelBufferGetBaseAddress(renderTarget);
CVPixelBufferLockBaseAddress(renderTarget, 0);
GLubyte * bufferBytes = CVPixelBufferGetBaseAddress(renderTarget);
CVPixelBufferUnlockBaseAddress(renderTarget, 0);
return bufferBytes;
#else
return NULL; // TODO: do more with this on the non-texture-cache side
#endif
Expand Down
1 change: 1 addition & 0 deletions framework/Source/GPUImageRawDataOutput.m
Original file line number Diff line number Diff line change
Expand Up @@ -260,6 +260,7 @@ - (GLubyte *)rawBytesForImage;
{
glFinish();
_rawBytesForImage = [outputFramebuffer byteBuffer];
NSLog(@"Output framebuffer: %@", outputFramebuffer);
}
else
{
Expand Down
13 changes: 2 additions & 11 deletions framework/Source/GPUImageStillCamera.m
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ void GPUImageCreateResizedSampleBuffer(CVPixelBufferRef cameraFrame, CGSize fina
CMSampleTimingInfo timing = {frameTime, frameTime, kCMTimeInvalid};

CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixel_buffer, YES, NULL, NULL, videoInfo, &timing, sampleBuffer);
CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
CFRelease(videoInfo);
CVPixelBufferRelease(pixel_buffer);
}
Expand Down Expand Up @@ -77,7 +78,7 @@ - (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureD

// Having a still photo input set to BGRA and video to YUV doesn't work well, so since I don't have YUV resizing for iPhone 4 yet, kick back to BGRA for that device
// if (captureAsYUV && [GPUImageContext supportsFastTextureUpload])
if (captureAsYUV && [GPUImageContext supportsFastTextureUpload])
if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])
{
BOOL supportsFullYUVRange = NO;
NSArray *supportedPixelFormats = photoOutput.availableImageDataCVPixelFormatTypes;
Expand Down Expand Up @@ -105,16 +106,6 @@ - (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureD
[videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
}

// if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])
// {
// // TODO: Check for full range output and use that if available
// [photoOutput setOutputSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
// }
// else
// {
// [photoOutput setOutputSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
// }

[self.captureSession addOutput:photoOutput];

[self.captureSession commitConfiguration];
Expand Down
3 changes: 2 additions & 1 deletion framework/Source/GPUImageVideoCamera.m
Original file line number Diff line number Diff line change
Expand Up @@ -719,14 +719,15 @@ - (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;

[GPUImageContext useImageProcessingContext];

if ([GPUImageContext supportsFastTextureUpload])
if ([GPUImageContext supportsFastTextureUpload] && captureAsYUV)
{
CVOpenGLESTextureRef luminanceTextureRef = NULL;
CVOpenGLESTextureRef chrominanceTextureRef = NULL;

// if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])
if (CVPixelBufferGetPlaneCount(cameraFrame) > 0) // Check for YUV planar inputs to do RGB conversion
{
CVPixelBufferLockBaseAddress(cameraFrame, 0);

if ( (imageBufferWidth != bufferWidth) && (imageBufferHeight != bufferHeight) )
{
Expand Down
1 change: 0 additions & 1 deletion framework/Source/iOS/GPUImageMovieWriter.h
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ extern NSString *const kGPUImageColorSwizzlingFragmentShaderString;
AVAssetWriterInputPixelBufferAdaptor *assetWriterPixelBufferInput;
dispatch_queue_t movieWritingQueue;

CVOpenGLESTextureCacheRef coreVideoTextureCache;
CVPixelBufferRef renderTarget;
CVOpenGLESTextureRef renderTexture;

Expand Down
26 changes: 7 additions & 19 deletions framework/Source/iOS/GPUImageMovieWriter.m
Original file line number Diff line number Diff line change
Expand Up @@ -501,17 +501,6 @@ - (void)createDataFBO;

if ([GPUImageContext supportsFastTextureUpload])
{
#if defined(__IPHONE_6_0)
CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, [[GPUImageContext sharedImageProcessingContext] context], NULL, &coreVideoTextureCache);
#else
CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)[[GPUImageContext sharedImageProcessingContext] context], NULL, &coreVideoTextureCache);
#endif

if (err)
{
NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreate %d", err);
}

// Code originally sourced from http://allmybrain.com/2011/12/08/rendering-to-a-texture-with-ios-5-texture-cache-api/


Expand All @@ -526,7 +515,7 @@ - (void)createDataFBO;
CVBufferSetAttachment(renderTarget, kCVImageBufferYCbCrMatrixKey, kCVImageBufferYCbCrMatrix_ITU_R_601_4, kCVAttachmentMode_ShouldPropagate);
CVBufferSetAttachment(renderTarget, kCVImageBufferTransferFunctionKey, kCVImageBufferTransferFunction_ITU_R_709_2, kCVAttachmentMode_ShouldPropagate);

CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault, coreVideoTextureCache, renderTarget,
CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], renderTarget,
NULL, // texture attributes
GL_TEXTURE_2D,
GL_RGBA, // opengl format
Expand Down Expand Up @@ -576,11 +565,6 @@ - (void)destroyDataFBO;

if ([GPUImageContext supportsFastTextureUpload])
{
if (coreVideoTextureCache)
{
CFRelease(coreVideoTextureCache);
}

if (renderTexture)
{
CFRelease(renderTexture);
Expand Down Expand Up @@ -634,9 +618,9 @@ - (void)renderAtInternalSize;
glVertexAttribPointer(colorSwizzlingTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);

glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
[firstInputFramebuffer unlock];

glFinish();
[firstInputFramebuffer unlock];
}

#pragma mark -
Expand Down Expand Up @@ -709,7 +693,7 @@ - (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
void(^write)() = ^() {
while( ! assetWriterVideoInput.readyForMoreMediaData && ! _encodingLiveVideo && ! videoEncodingIsFinished ) {
NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.1];
//NSLog(@"video waiting...");
// NSLog(@"video waiting...");
[[NSRunLoop currentRunLoop] runUntilDate:maxDate];
}
if (!assetWriterVideoInput.readyForMoreMediaData)
Expand All @@ -735,9 +719,13 @@ - (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
};

if( _encodingLiveVideo )
{
dispatch_async(movieWritingQueue, write);
}
else
{
write();
}
}

- (NSInteger)nextAvailableTextureIndex;
Expand Down

0 comments on commit 0e711a8

Please sign in to comment.