Skip to content

Commit

Permalink
Allow GPUImageMovie to process an already opened asset
Browse files Browse the repository at this point in the history
  • Loading branch information
hlidotbe committed Jul 18, 2012
1 parent f1c401b commit e23899d
Show file tree
Hide file tree
Showing 2 changed files with 98 additions and 55 deletions.
2 changes: 2 additions & 0 deletions framework/Source/GPUImageMovie.h
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,15 @@
*/
@interface GPUImageMovie : GPUImageOutput

@property (readwrite, retain) AVAsset *asset;
@property(readwrite, retain) NSURL *url;

/** This enables the benchmarking mode, which logs out instantaneous and average frame times to the console
*/
@property(readwrite, nonatomic) BOOL runBenchmark;

/// @name Initialization and teardown
- (id)initWithAsset:(AVAsset *)asset;
- (id)initWithURL:(NSURL *)url;

/// @name Movie processing
Expand Down
151 changes: 96 additions & 55 deletions framework/Source/GPUImageMovie.m
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,14 @@ @interface GPUImageMovie ()
AVAssetReader *reader;
}

- (void)processAsset;

@end

@implementation GPUImageMovie

@synthesize url = _url;
@synthesize asset = _asset;
@synthesize runBenchmark = _runBenchmark;

#pragma mark -
Expand All @@ -25,7 +28,7 @@ - (id)initWithURL:(NSURL *)url;
{
return nil;
}

if ([GPUImageOpenGLESContext supportsFastTextureUpload])
{
[GPUImageOpenGLESContext useImageProcessingContext];
Expand All @@ -38,13 +41,42 @@ - (id)initWithURL:(NSURL *)url;
{
NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreate %d", err);
}

// Need to remove the initially created texture
[self deleteOutputTexture];
}

self.url = url;

self.asset = nil;

return self;
}

-(id)initWithAsset:(AVAsset *)asset {
if (!(self = [super init]))
{
return nil;
}

if ([GPUImageOpenGLESContext supportsFastTextureUpload])
{
[GPUImageOpenGLESContext useImageProcessingContext];
#if defined(__IPHONE_6_0)
CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, [[GPUImageOpenGLESContext sharedImageProcessingOpenGLESContext] context], NULL, &coreVideoTextureCache);
#else
CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)[[GPUImageOpenGLESContext sharedImageProcessingOpenGLESContext] context], NULL, &coreVideoTextureCache);
#endif
if (err)
{
NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreate %d", err);
}

// Need to remove the initially created texture
[self deleteOutputTexture];
}
self.url = nil;
self.asset = asset;

return self;
}

Expand All @@ -55,7 +87,6 @@ - (void)dealloc
CFRelease(coreVideoTextureCache);
}
}

#pragma mark -
#pragma mark Movie processing

Expand All @@ -67,76 +98,86 @@ - (void)enableSynchronizedEncodingUsingMovieWriter:(GPUImageMovieWriter *)movieW

- (void)startProcessing;
{
if(self.url == nil) {
[self processAsset];
return;
}

NSDictionary *inputOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
AVURLAsset *inputAsset = [[AVURLAsset alloc] initWithURL:self.url options:inputOptions];

__unsafe_unretained GPUImageMovie *weakSelf = self;

AVURLAsset *inputAsset = [[AVURLAsset alloc] initWithURL:self.url options:inputOptions];
[inputAsset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"tracks"] completionHandler: ^{
NSError *error = nil;
AVKeyValueStatus tracksStatus = [inputAsset statusOfValueForKey:@"tracks" error:&error];
if (!tracksStatus == AVKeyValueStatusLoaded)
{
return;
}
reader = [AVAssetReader assetReaderWithAsset:inputAsset error:&error];

NSMutableDictionary *outputSettings = [NSMutableDictionary dictionary];
[outputSettings setObject: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey: (NSString*)kCVPixelBufferPixelFormatTypeKey];
// Maybe set alwaysCopiesSampleData to NO on iOS 5.0 for faster video decoding
AVAssetReaderTrackOutput *readerVideoTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:[[inputAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] outputSettings:outputSettings];
[reader addOutput:readerVideoTrackOutput];

NSArray *audioTracks = [inputAsset tracksWithMediaType:AVMediaTypeAudio];
BOOL shouldRecordAudioTrack = (([audioTracks count] > 0) && (weakSelf.audioEncodingTarget != nil) );
AVAssetReaderTrackOutput *readerAudioTrackOutput = nil;
self.asset = inputAsset;
[self processAsset];
}];
}

if (shouldRecordAudioTrack)
{
audioEncodingIsFinished = NO;

// This might need to be extended to handle movies with more than one audio track
AVAssetTrack* audioTrack = [audioTracks objectAtIndex:0];
readerAudioTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
[reader addOutput:readerAudioTrackOutput];
}
- (void)processAsset
{
__unsafe_unretained GPUImageMovie *weakSelf = self;
NSError *error = nil;
reader = [AVAssetReader assetReaderWithAsset:self.asset error:&error];

if ([reader startReading] == NO)
{
NSMutableDictionary *outputSettings = [NSMutableDictionary dictionary];
[outputSettings setObject: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey: (NSString*)kCVPixelBufferPixelFormatTypeKey];
// Maybe set alwaysCopiesSampleData to NO on iOS 5.0 for faster video decoding
AVAssetReaderTrackOutput *readerVideoTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:[[self.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] outputSettings:outputSettings];
[reader addOutput:readerVideoTrackOutput];

NSArray *audioTracks = [self.asset tracksWithMediaType:AVMediaTypeAudio];
BOOL shouldRecordAudioTrack = (([audioTracks count] > 0) && (weakSelf.audioEncodingTarget != nil) );
AVAssetReaderTrackOutput *readerAudioTrackOutput = nil;

if (shouldRecordAudioTrack)
{
audioEncodingIsFinished = NO;

// This might need to be extended to handle movies with more than one audio track
AVAssetTrack* audioTrack = [audioTracks objectAtIndex:0];
readerAudioTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
[reader addOutput:readerAudioTrackOutput];
}

if ([reader startReading] == NO)
{
NSLog(@"Error reading from file at URL: %@", weakSelf.url);
return;
}
return;
}

if (synchronizedMovieWriter != nil)
if (synchronizedMovieWriter != nil)
{
[synchronizedMovieWriter setVideoInputReadyCallback:^{
[weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput];
}];

[synchronizedMovieWriter setAudioInputReadyCallback:^{
[weakSelf readNextAudioSampleFromOutput:readerAudioTrackOutput];
}];

[synchronizedMovieWriter enableSynchronizationCallbacks];
}
else
{
while (reader.status == AVAssetReaderStatusReading)
{
[synchronizedMovieWriter setVideoInputReadyCallback:^{
[weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput];
}];

[synchronizedMovieWriter setAudioInputReadyCallback:^{
[weakSelf readNextAudioSampleFromOutput:readerAudioTrackOutput];
}];

[synchronizedMovieWriter enableSynchronizationCallbacks];
}
else
{
while (reader.status == AVAssetReaderStatusReading)
if ( (shouldRecordAudioTrack) && (!audioEncodingIsFinished) )
{
[weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput];

if ( (shouldRecordAudioTrack) && (!audioEncodingIsFinished) )
{
[weakSelf readNextAudioSampleFromOutput:readerAudioTrackOutput];
}
}
}

}

if (reader.status == AVAssetWriterStatusCompleted) {
if (reader.status == AVAssetWriterStatusCompleted) {
[weakSelf endProcessing];
}
}
}];
}
}

- (void)readNextVideoFrameFromOutput:(AVAssetReaderTrackOutput *)readerVideoTrackOutput;
Expand Down Expand Up @@ -223,7 +264,7 @@ - (void)processMovieFrame:(CMSampleBufferRef)movieSampleBuffer;
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

for (id<GPUImageInput> currentTarget in targets)
{
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];

Expand Down

0 comments on commit e23899d

Please sign in to comment.