Skip to content

Commit

Permalink
Experimenting with changing GCD queues on video encoding to improve a…
Browse files Browse the repository at this point in the history
…udio recording performance.
  • Loading branch information
BradLarson committed Jun 9, 2013
1 parent a29f15a commit 6c785e4
Show file tree
Hide file tree
Showing 3 changed files with 62 additions and 61 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ - (void)viewDidLoad
// [videoCamera.inputCamera setTorchMode:AVCaptureTorchModeOn];
// [videoCamera.inputCamera unlockForConfiguration];

double delayInSeconds = 10.0;
double delayInSeconds = 30.0;
dispatch_time_t stopTime = dispatch_time(DISPATCH_TIME_NOW, delayInSeconds * NSEC_PER_SEC);
dispatch_after(stopTime, dispatch_get_main_queue(), ^(void){

Expand Down
32 changes: 6 additions & 26 deletions framework/Source/GPUImageVideoCamera.m
Original file line number Diff line number Diff line change
Expand Up @@ -122,8 +122,9 @@ - (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureD
return nil;
}

cameraProcessingQueue = dispatch_queue_create("com.sunsetlakesoftware.GPUImage.cameraProcessingQueue", NULL);
audioProcessingQueue = dispatch_queue_create("com.sunsetlakesoftware.GPUImage.audioProcessingQueue", NULL);
cameraProcessingQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH,0);
audioProcessingQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW,0);

frameRenderingSemaphore = dispatch_semaphore_create(1);

_frameRate = 0; // This will not set frame rate unless this value gets set to 1 or above
Expand Down Expand Up @@ -257,7 +258,6 @@ - (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureD
}

[videoOutput setSampleBufferDelegate:self queue:cameraProcessingQueue];
// [videoOutput setSampleBufferDelegate:self queue:[GPUImageContext sharedContextQueue]];
if ([_captureSession canAddOutput:videoOutput])
{
[_captureSession addOutput:videoOutput];
Expand Down Expand Up @@ -298,17 +298,7 @@ - (void)dealloc
}

// ARC forbids explicit message send of 'release'; since iOS 6 even for dispatch_release() calls: stripping it out in that case is required.
#if ( (__IPHONE_OS_VERSION_MIN_REQUIRED < __IPHONE_6_0) || (!defined(__IPHONE_6_0)) )
if (cameraProcessingQueue != NULL)
{
dispatch_release(cameraProcessingQueue);
}

if (audioProcessingQueue != NULL)
{
dispatch_release(audioProcessingQueue);
}

#if ( (__IPHONE_OS_VERSION_MIN_REQUIRED < __IPHONE_6_0) || (!defined(__IPHONE_6_0)) )
if (frameRenderingSemaphore != NULL)
{
dispatch_release(frameRenderingSemaphore);
Expand Down Expand Up @@ -817,25 +807,15 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CM
{
if (captureOutput == audioOutput)
{
// if (dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0)
// {
// return;
// }

CFRetain(sampleBuffer);
runAsynchronouslyOnVideoProcessingQueue(^{
[self processAudioSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
// dispatch_semaphore_signal(frameRenderingSemaphore);
});
[self processAudioSampleBuffer:sampleBuffer];
}
else
{
if (dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0)
{
return;
}

CFRetain(sampleBuffer);
runAsynchronouslyOnVideoProcessingQueue(^{
//Feature Detection Hook.
Expand Down
89 changes: 55 additions & 34 deletions framework/Source/iOS/GPUImageMovieWriter.m
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,8 @@ - (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize fileType:(NSStr

self.enabled = YES;

movieWritingQueue = dispatch_queue_create("com.sunsetlakesoftware.GPUImage.movieWritingQueue", NULL);

videoSize = newSize;
movieURL = newMovieURL;
fileType = newFileType;
Expand Down Expand Up @@ -138,6 +140,13 @@ - (void)dealloc;
{
free(frameData);
}

#if ( (__IPHONE_OS_VERSION_MIN_REQUIRED < __IPHONE_6_0) || (!defined(__IPHONE_6_0)) )
if (movieWritingQueue != NULL)
{
dispatch_release(movieWritingQueue);
}
#endif
}

#pragma mark -
Expand Down Expand Up @@ -262,7 +271,9 @@ - (void)cancelRecording;

- (void)finishRecording;
{
[self finishRecordingWithCompletionHandler:nil];
runSynchronouslyOnVideoProcessingQueue(^{
[self finishRecordingWithCompletionHandler:nil];
});
}

- (void)finishRecordingWithCompletionHandler:(void (^)(void))handler;
Expand Down Expand Up @@ -312,12 +323,14 @@ - (void)processAudioBuffer:(CMSampleBufferRef)audioBuffer;

if (CMTIME_IS_INVALID(startTime))
{
if (audioInputReadyCallback == NULL)
{
[assetWriter startWriting];
}
[assetWriter startSessionAtSourceTime:currentSampleTime];
startTime = currentSampleTime;
dispatch_sync(movieWritingQueue, ^{
if ((audioInputReadyCallback == NULL) && (assetWriter.status != AVAssetWriterStatusWriting))
{
[assetWriter startWriting];
}
[assetWriter startSessionAtSourceTime:currentSampleTime];
startTime = currentSampleTime;
});
}

if (!assetWriterAudioInput.readyForMoreMediaData)
Expand All @@ -327,7 +340,11 @@ - (void)processAudioBuffer:(CMSampleBufferRef)audioBuffer;
}

// NSLog(@"Recorded audio sample time: %lld, %d, %lld", currentSampleTime.value, currentSampleTime.timescale, currentSampleTime.epoch);
[assetWriterAudioInput appendSampleBuffer:audioBuffer];
CFRetain(audioBuffer);
dispatch_async(movieWritingQueue, ^{
[assetWriterAudioInput appendSampleBuffer:audioBuffer];
CFRelease(audioBuffer);
});
}
}

Expand Down Expand Up @@ -509,13 +526,15 @@ - (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;

if (CMTIME_IS_INVALID(startTime))
{
if (videoInputReadyCallback == NULL)
{
[assetWriter startWriting];
}

[assetWriter startSessionAtSourceTime:frameTime];
startTime = frameTime;
dispatch_sync(movieWritingQueue, ^{
if ((videoInputReadyCallback == NULL) && (assetWriter.status != AVAssetWriterStatusWriting))
{
[assetWriter startWriting];
}

[assetWriter startSessionAtSourceTime:frameTime];
startTime = frameTime;
});
}

if (!assetWriterVideoInput.readyForMoreMediaData)
Expand All @@ -527,12 +546,12 @@ - (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
// Render the frame with swizzled colors, so that they can be uploaded quickly as BGRA frames
[GPUImageContext useImageProcessingContext];
[self renderAtInternalSize];

CVPixelBufferRef pixel_buffer = NULL;

if ([GPUImageContext supportsFastTextureUpload])
{
pixel_buffer = renderTarget;
pixel_buffer = renderTarget;
CVPixelBufferLockBaseAddress(pixel_buffer, 0);
}
else
Expand All @@ -551,23 +570,25 @@ - (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
}
}

// if(![assetWriterPixelBufferInput appendPixelBuffer:pixel_buffer withPresentationTime:CMTimeSubtract(frameTime, startTime)])
if(![assetWriterPixelBufferInput appendPixelBuffer:pixel_buffer withPresentationTime:frameTime])
{
NSLog(@"Problem appending pixel buffer at time: %lld", frameTime.value);
}
else
{
// NSLog(@"Recorded video sample time: %lld, %d, %lld", frameTime.value, frameTime.timescale, frameTime.epoch);
}
CVPixelBufferUnlockBaseAddress(pixel_buffer, 0);

previousFrameTime = frameTime;
dispatch_sync(movieWritingQueue, ^{
if(![assetWriterPixelBufferInput appendPixelBuffer:pixel_buffer withPresentationTime:frameTime])
{
NSLog(@"Problem appending pixel buffer at time: %lld", frameTime.value);
}
else
{
// NSLog(@"Recorded video sample time: %lld, %d, %lld", frameTime.value, frameTime.timescale, frameTime.epoch);
}
CVPixelBufferUnlockBaseAddress(pixel_buffer, 0);

previousFrameTime = frameTime;

if (![GPUImageContext supportsFastTextureUpload])
{
CVPixelBufferRelease(pixel_buffer);
}
});

if (![GPUImageContext supportsFastTextureUpload])
{
CVPixelBufferRelease(pixel_buffer);
}
}

- (NSInteger)nextAvailableTextureIndex;
Expand Down

0 comments on commit 6c785e4

Please sign in to comment.