Skip to content

Commit

Permalink
Added ability to enable/disabled GPUImageOutputs - fast way to turn t…
Browse files Browse the repository at this point in the history
…hem on or off. Replaced all references to self in blocks with __unsafe_unretained __typeof__(self) weakSelf = self; Added method to GPUImageStillCamera to take a photo and retrieve the CMSampleBufferRef directly. However, I left this method commented out, as to use it you have to have the camera pixel format in its native setting, which is incompatible with GPUImage.
  • Loading branch information
rileytestut committed Jul 5, 2012
1 parent 29e7cd8 commit a0af247
Show file tree
Hide file tree
Showing 6 changed files with 65 additions and 32 deletions.
17 changes: 9 additions & 8 deletions framework/Source/GPUImageMovie.m
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,8 @@ - (void)startProcessing;
NSDictionary *inputOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
AVURLAsset *inputAsset = [[AVURLAsset alloc] initWithURL:self.url options:inputOptions];

__unsafe_unretained __typeof__(self) weakSelf = self;

[inputAsset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"tracks"] completionHandler: ^{
NSError *error = nil;
AVKeyValueStatus tracksStatus = [inputAsset statusOfValueForKey:@"tracks" error:&error];
Expand All @@ -86,7 +88,7 @@ - (void)startProcessing;
[reader addOutput:readerVideoTrackOutput];

NSArray *audioTracks = [inputAsset tracksWithMediaType:AVMediaTypeAudio];
BOOL shouldRecordAudioTrack = (([audioTracks count] > 0) && (self.audioEncodingTarget != nil) );
BOOL shouldRecordAudioTrack = (([audioTracks count] > 0) && (weakSelf.audioEncodingTarget != nil) );
AVAssetReaderTrackOutput *readerAudioTrackOutput = nil;

if (shouldRecordAudioTrack)
Expand All @@ -101,14 +103,12 @@ - (void)startProcessing;

if ([reader startReading] == NO)
{
NSLog(@"Error reading from file at URL: %@", self.url);
NSLog(@"Error reading from file at URL: %@", weakSelf.url);
return;
}

if (synchronizedMovieWriter != nil)
{
__unsafe_unretained GPUImageMovie *weakSelf = self;

[synchronizedMovieWriter setVideoInputReadyCallback:^{
[weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput];
}];
Expand All @@ -123,17 +123,17 @@ - (void)startProcessing;
{
while (reader.status == AVAssetReaderStatusReading)
{
[self readNextVideoFrameFromOutput:readerVideoTrackOutput];
[weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput];

if ( (shouldRecordAudioTrack) && (!audioEncodingIsFinished) )
{
[self readNextAudioSampleFromOutput:readerAudioTrackOutput];
[weakSelf readNextAudioSampleFromOutput:readerAudioTrackOutput];
}

}

if (reader.status == AVAssetWriterStatusCompleted) {
[self endProcessing];
[weakSelf endProcessing];
}
}
}];
Expand All @@ -146,8 +146,9 @@ - (void)readNextVideoFrameFromOutput:(AVAssetReaderTrackOutput *)readerVideoTrac
CMSampleBufferRef sampleBufferRef = [readerVideoTrackOutput copyNextSampleBuffer];
if (sampleBufferRef)
{
__unsafe_unretained __typeof__(self) weakSelf = self;
runOnMainQueueWithoutDeadlocking(^{
[self processMovieFrame:sampleBufferRef];
[weakSelf processMovieFrame:sampleBufferRef];
});

CMSampleBufferInvalidate(sampleBufferRef);
Expand Down
7 changes: 7 additions & 0 deletions framework/Source/GPUImageOutput.h
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ void report_memory(NSString *tag);
@property(readwrite, nonatomic, retain) GPUImageMovieWriter *audioEncodingTarget;
@property(readwrite, nonatomic, unsafe_unretained) id<GPUImageInput> targetToIgnoreForUpdates;
@property(nonatomic, copy) void(^frameProcessingCompletionBlock)(GPUImageOutput*, CMTime);
@property(nonatomic) BOOL enabled;

/// @name Managing targets
- (void)setInputTextureForTarget:(id<GPUImageInput>)target atIndex:(NSInteger)inputTextureIndex;
Expand Down Expand Up @@ -70,6 +71,12 @@ void report_memory(NSString *tag);
*/
- (void)removeAllTargets;

/// @name The state of the GPUImageOutput

/** Returns a BOOL indicating whether the GPUImageOutput is enabled or not. Default is YES.
*/
- (BOOL)isEnabled;

/// @name Manage the output texture

- (void)initializeOutputTexture;
Expand Down
9 changes: 9 additions & 0 deletions framework/Source/GPUImageOutput.m
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ @implementation GPUImageOutput
@synthesize audioEncodingTarget = _audioEncodingTarget;
@synthesize targetToIgnoreForUpdates = _targetToIgnoreForUpdates;
@synthesize frameProcessingCompletionBlock = _frameProcessingCompletionBlock;
@synthesize enabled = _enabled;

#pragma mark -
#pragma mark Initialization and teardown
Expand All @@ -57,6 +58,7 @@ - (id)init;

targets = [[NSMutableArray alloc] init];
targetTextureIndices = [[NSMutableArray alloc] init];
_enabled = YES;

[self initializeOutputTexture];

Expand All @@ -69,6 +71,13 @@ - (void)dealloc
[self deleteOutputTexture];
}

#pragma mark -
#pragma mark Status

- (BOOL)isEnabled {
return _enabled;
}

#pragma mark -
#pragma mark Managing targets

Expand Down
1 change: 1 addition & 0 deletions framework/Source/GPUImageStillCamera.h
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ void GPUImageCreateResizedSampleBuffer(CVPixelBufferRef cameraFrame, CGSize fina
@interface GPUImageStillCamera : GPUImageVideoCamera

// Photography controls
//- (void)capturePhotoAsSampleBufferWithCompletionHandler:(void (^)(CMSampleBufferRef imageSampleBuffer, NSError *error))block;
- (void)capturePhotoAsImageProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withCompletionHandler:(void (^)(UIImage *processedImage, NSError *error))block;
- (void)capturePhotoAsJPEGProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withCompletionHandler:(void (^)(NSData *processedJPEG, NSError *error))block;
- (void)capturePhotoAsPNGProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withCompletionHandler:(void (^)(NSData *processedPNG, NSError *error))block;
Expand Down
10 changes: 10 additions & 0 deletions framework/Source/GPUImageStillCamera.m
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,16 @@ - (void)removeInputsAndOutputs;
#pragma mark -
#pragma mark Photography controls

/*- (void)capturePhotoAsSampleBufferWithCompletionHandler:(void (^)(CMSampleBufferRef imageSampleBuffer, NSError *error))block
{
[photoOutput captureStillImageAsynchronouslyFromConnection:[[photoOutput connections] objectAtIndex:0] completionHandler:^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
#error If you want to use this method, you must comment out the line in initWithSessionPreset:cameraPosition: which sets the CVPixelBufferPixelFormatTypeKey. However, if you do this you cannot use any of the below methods to take a photo if you also supply a filter.
block(imageSampleBuffer, error);
}];
return;
}*/

- (void)capturePhotoAsImageProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withCompletionHandler:(void (^)(UIImage *processedImage, NSError *error))block;
{
[photoOutput captureStillImageAsynchronouslyFromConnection:[[photoOutput connections] objectAtIndex:0] completionHandler:^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
Expand Down
53 changes: 29 additions & 24 deletions framework/Source/GPUImageVideoCamera.m
Original file line number Diff line number Diff line change
Expand Up @@ -288,22 +288,24 @@ - (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;

for (id<GPUImageInput> currentTarget in targets)
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];

if (currentTarget != self.targetToIgnoreForUpdates)
{
[currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:textureIndexOfTarget];

[currentTarget setInputTexture:outputTexture atIndex:textureIndexOfTarget];
[currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget];
if ([(GPUImageOutput *)currentTarget respondsToSelector:@selector(enabled)] && [(GPUImageOutput *)currentTarget isEnabled]) {
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];

[currentTarget newFrameReadyAtTime:currentTime];
}
else
{
[currentTarget setInputTexture:outputTexture atIndex:textureIndexOfTarget];
[currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget];
if (currentTarget != self.targetToIgnoreForUpdates)
{
[currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:textureIndexOfTarget];

[currentTarget setInputTexture:outputTexture atIndex:textureIndexOfTarget];
[currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget];

[currentTarget newFrameReadyAtTime:currentTime];
}
else
{
[currentTarget setInputTexture:outputTexture atIndex:textureIndexOfTarget];
[currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget];
}
}
}

Expand Down Expand Up @@ -342,13 +344,15 @@ - (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;

for (id<GPUImageInput> currentTarget in targets)
{
if (currentTarget != self.targetToIgnoreForUpdates)
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];

[currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:textureIndexOfTarget];
[currentTarget newFrameReadyAtTime:currentTime];
if ([(GPUImageOutput *)currentTarget respondsToSelector:@selector(enabled)] && [(GPUImageOutput *)currentTarget isEnabled]) {
if (currentTarget != self.targetToIgnoreForUpdates)
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];

[currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:textureIndexOfTarget];
[currentTarget newFrameReadyAtTime:currentTime];
}
}
}

Expand Down Expand Up @@ -388,16 +392,17 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CM
@autoreleasepool
{
//these need to be on the main thread for proper timing
__unsafe_unretained __typeof__(self) weakSelf = self;
if (captureOutput == audioOutput)
{
runOnMainQueueWithoutDeadlocking(^{
[self processAudioSampleBuffer:sampleBuffer];
[weakSelf processAudioSampleBuffer:sampleBuffer];
});
}
else
{
runOnMainQueueWithoutDeadlocking(^{
[self processVideoSampleBuffer:sampleBuffer];
[weakSelf processVideoSampleBuffer:sampleBuffer];
});
}
}
Expand Down

0 comments on commit a0af247

Please sign in to comment.