Skip to content

Commit

Permalink
Improved the speed of movie recording on the Mac by directly pulling …
Browse files Browse the repository at this point in the history
…BGRA frames. Corrected a movie recording bug when sizes didn't match incoming camera frame sizes.
  • Loading branch information
BradLarson committed Jun 2, 2016
1 parent 3a90a50 commit 35f5799
Show file tree
Hide file tree
Showing 4 changed files with 13 additions and 12 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -276,7 +276,7 @@
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
MACOSX_DEPLOYMENT_TARGET = 10.11;
MACOSX_DEPLOYMENT_TARGET = 10.9;
MTL_ENABLE_DEBUG_INFO = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = macosx;
Expand Down Expand Up @@ -315,7 +315,7 @@
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
MACOSX_DEPLOYMENT_TARGET = 10.11;
MACOSX_DEPLOYMENT_TARGET = 10.9;
MTL_ENABLE_DEBUG_INFO = NO;
SDKROOT = macosx;
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ class AppDelegate: NSObject, NSApplicationDelegate {
filter = SmoothToonFilter()

camera --> filter --> renderView
camera.runBenchmark = true
camera.startCapture()
} catch {
fatalError("Couldn't initialize pipeline, error: \(error)")
Expand All @@ -38,7 +39,8 @@ class AppDelegate: NSObject, NSApplicationDelegate {
if okayButton == NSModalResponseOK {
do {
self.isRecording = true
movieOutput = try MovieOutput(URL:movieSavingDialog.URL!, size:Size(width:1280, height:720), liveVideo:true)
// movieOutput = try MovieOutput(URL:movieSavingDialog.URL!, size:Size(width:1280, height:720), liveVideo:true)
movieOutput = try MovieOutput(URL:movieSavingDialog.URL!, size:Size(width:640, height:480), liveVideo:true)
// camera.audioEncodingTarget = movieOutput
filter --> movieOutput!
movieOutput!.startRecording()
Expand Down
13 changes: 6 additions & 7 deletions framework/Source/Mac/MovieOutput.swift
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget {
var assetWriterAudioInput:AVAssetWriterInput?
let assetWriterPixelBufferInput:AVAssetWriterInputPixelBufferAdaptor
let size:Size
let colorSwizzlingShader:ShaderProgram
private var isRecording = false
private var videoEncodingIsFinished = false
private var audioEncodingIsFinished = false
Expand All @@ -24,8 +23,6 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget {
private var encodingLiveVideo:Bool

public init(URL:NSURL, size:Size, fileType:String = AVFileTypeQuickTimeMovie, liveVideo:Bool = false, settings:[String:AnyObject]? = nil) throws {
self.colorSwizzlingShader = crashOnShaderCompileFailure("MovieOutput"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(1), fragmentShader:ColorSwizzlingFragmentShader)}

self.size = size
assetWriter = try AVAssetWriter(URL:URL, fileType:fileType)
// Set this to make sure that a functional movie is produced, even if the recording is cut off mid-stream. Only the last second should be lost in that case.
Expand All @@ -50,7 +47,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget {
let sourcePixelBufferAttributesDictionary:[String:AnyObject] = [kCVPixelBufferPixelFormatTypeKey as String:NSNumber(int:Int32(kCVPixelFormatType_32BGRA)),
kCVPixelBufferWidthKey as String:NSNumber(float:size.width),
kCVPixelBufferHeightKey as String:NSNumber(float:size.height)]

assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput:assetWriterVideoInput, sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary)
assetWriter.addInput(assetWriterVideoInput)
}
Expand Down Expand Up @@ -134,17 +131,19 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget {
}

func renderIntoPixelBuffer(pixelBuffer:CVPixelBuffer, framebuffer:Framebuffer) {
let renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:framebuffer.orientation, size:framebuffer.size)
let renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:framebuffer.orientation, size:GLSize(self.size))
renderFramebuffer.lock()

renderFramebuffer.activateFramebufferForRendering()
clearFramebufferWithColor(Color.Black)
renderQuadWithShader(colorSwizzlingShader, uniformSettings:ShaderUniformSettings(), vertices:standardImageVertices, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.NoRotation)])

renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings:ShaderUniformSettings(), vertices:standardImageVertices, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.NoRotation)])

CVPixelBufferLockBaseAddress(pixelBuffer, 0)
glReadPixels(0, 0, framebuffer.size.width, framebuffer.size.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddress(pixelBuffer))
glReadPixels(0, 0, renderFramebuffer.size.width, renderFramebuffer.size.height, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddress(pixelBuffer))
renderFramebuffer.unlock()
}

// MARK: -
// MARK: Audio support

Expand Down
4 changes: 2 additions & 2 deletions framework/Source/iOS/MovieOutput.swift
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget {

func renderIntoPixelBuffer(pixelBuffer:CVPixelBuffer, framebuffer:Framebuffer) {
if !sharedImageProcessingContext.supportsTextureCaches() {
renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:framebuffer.orientation, size:framebuffer.size)
renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:framebuffer.orientation, size:GLSize(self.size))
renderFramebuffer.lock()
}

Expand All @@ -172,7 +172,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget {
if sharedImageProcessingContext.supportsTextureCaches() {
glFinish()
} else {
glReadPixels(0, 0, framebuffer.size.width, framebuffer.size.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddress(pixelBuffer))
glReadPixels(0, 0, renderFramebuffer.size.width, renderFramebuffer.size.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddress(pixelBuffer))
renderFramebuffer.unlock()
}
}
Expand Down

0 comments on commit 35f5799

Please sign in to comment.