Skip to content

Commit

Permalink
Fixed a bug with Camera.stopRunning(), updated for Xcode 8 beta 2, fi…
Browse files Browse the repository at this point in the history
…xed a series of race conditions.
  • Loading branch information
BradLarson committed Jul 7, 2016
1 parent 9c4e296 commit f478f48
Show file tree
Hide file tree
Showing 18 changed files with 83 additions and 54 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ class FilterShowcaseWindowController: NSWindowController {
do {
videoCamera = try Camera(sessionPreset:AVCaptureSessionPreset1280x720)
videoCamera.runBenchmark = true
videoCamera.startCapture()
} catch {
fatalError("Couldn't initialize camera with error: \(error)")
}
Expand All @@ -43,7 +44,7 @@ class FilterShowcaseWindowController: NSWindowController {
currentlySelectedRow = row

// Clean up everything from the previous filter selection first
videoCamera.stopCapture()
// videoCamera.stopCapture()
videoCamera.removeAllTargets()
currentFilterOperation?.filter.removeAllTargets()
currentFilterOperation?.secondInput?.removeAllTargets()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ class AppDelegate: NSObject, NSApplicationDelegate {
}

func applicationDidFinishLaunching(_ aNotification: Notification) {
let bundleURL = Bundle.main().resourceURL!
let bundleURL = Bundle.main.resourceURL!
let movieURL = URL(string:"sample_iPod.m4v", relativeTo:bundleURL)!

do {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,9 @@ class ViewController: UIViewController {

let pngImage = UIImagePNGRepresentation(filteredImage)!
do {
let documentsDir = try FileManager.default().urlForDirectory(.documentDirectory, in:.userDomainMask, appropriateFor:nil, create:true)
let documentsDir = try FileManager.default.urlForDirectory(.documentDirectory, in:.userDomainMask, appropriateFor:nil, create:true)
let fileURL = URL(string:"test.png", relativeTo:documentsDir)!
try pngImage.write(to:fileURL, options:.dataWritingAtomic)
try pngImage.write(to:fileURL, options:.atomic)
} catch {
print("Couldn't write to file with error: \(error)")
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ class ViewController: UIViewController {
override func viewDidLayoutSubviews() {
super.viewDidLayoutSubviews()

let bundleURL = Bundle.main().resourceURL!
let bundleURL = Bundle.main.resourceURL!
let movieURL = URL(string:"sample_iPod.m4v", relativeTo:bundleURL)!

do {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ class ViewController: UIViewController {
@IBAction func capture(_ sender: AnyObject) {
print("Capture")
do {
let documentsDir = try FileManager.default().urlForDirectory(.documentDirectory, in:.userDomainMask, appropriateFor:nil, create:true)
let documentsDir = try FileManager.default.urlForDirectory(.documentDirectory, in:.userDomainMask, appropriateFor:nil, create:true)
saturationFilter.saveNextFrameToURL(URL(string:"TestImage.png", relativeTo:documentsDir)!, format:.png)
} catch {
print("Couldn't save image: \(error)")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,26 +31,29 @@ class ViewController: UIViewController {
if (!isRecording) {
do {
self.isRecording = true
let documentsDir = try FileManager.default().urlForDirectory(.documentDirectory, in:.userDomainMask, appropriateFor:nil, create:true)
let documentsDir = try FileManager.default.urlForDirectory(.documentDirectory, in:.userDomainMask, appropriateFor:nil, create:true)
let fileURL = URL(string:"test.mp4", relativeTo:documentsDir)!
do {
try FileManager.default().removeItem(at:fileURL)
try FileManager.default.removeItem(at:fileURL)
} catch {
}

movieOutput = try MovieOutput(URL:fileURL, size:Size(width:480, height:640), liveVideo:true)
camera.audioEncodingTarget = movieOutput
filter --> movieOutput!
movieOutput!.startRecording()
(sender as! UIButton).titleLabel?.text = "Stop"
DispatchQueue.main.async {
// Label not updating on the main thread, for some reason, so dispatching slightly after this
(sender as! UIButton).titleLabel!.text = "Stop"
}
} catch {
fatalError("Couldn't initialize movie, error: \(error)")
}
} else {
movieOutput?.finishRecording{
self.isRecording = false
DispatchQueue.main.async {
(sender as! UIButton).titleLabel?.text = "Record"
(sender as! UIButton).titleLabel!.text = "Record"
}
self.camera.audioEncodingTarget = nil
self.movieOutput = nil
Expand Down
2 changes: 2 additions & 0 deletions framework/GPUImage-Mac.xcodeproj/project.pbxproj
Original file line number Diff line number Diff line change
Expand Up @@ -146,6 +146,7 @@
BCBEC0C61CCD2E6200B70ED7 /* Histogram.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCBEC0C51CCD2E6200B70ED7 /* Histogram.swift */; };
BCBEC0E01CCD492D00B70ED7 /* HistogramEqualization.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCBEC0DF1CCD492D00B70ED7 /* HistogramEqualization.swift */; };
BCBEC0FA1CCD993900B70ED7 /* MovieInput.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCBEC0F91CCD993900B70ED7 /* MovieInput.swift */; };
BCCAED291D2ED9C700DE28D1 /* Timestamp.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCB825BA1CC9C96B00339790 /* Timestamp.swift */; };
BCD1B14A1C66AE00001F2BDC /* SerialDispatch.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCD1B1491C66AE00001F2BDC /* SerialDispatch.swift */; };
BCD1B14C1C66B225001F2BDC /* Pipeline.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCD1B14B1C66B225001F2BDC /* Pipeline.swift */; };
BCE111A51CBC94FD005293A4 /* AverageLuminanceExtractor.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCE111A41CBC94FD005293A4 /* AverageLuminanceExtractor.swift */; };
Expand Down Expand Up @@ -1192,6 +1193,7 @@
BC09239E1C92658200A2ADFA /* ShaderProgram_Tests.swift in Sources */,
BC7FD1941CB1D3E000037949 /* Size.swift in Sources */,
BC0923A51C92669900A2ADFA /* FramebufferCache.swift in Sources */,
BCCAED291D2ED9C700DE28D1 /* Timestamp.swift in Sources */,
BC6E7CC81C39AD9E006DF678 /* ShaderProgram.swift in Sources */,
BC7FD0FF1CB071DC00037949 /* Color.swift in Sources */,
BC4C85EF1C9F043400FD95D8 /* ConvertedShaders_GL.swift in Sources */,
Expand Down
10 changes: 6 additions & 4 deletions framework/Source/BasicOperation.swift
Original file line number Diff line number Diff line change
Expand Up @@ -183,9 +183,11 @@ public class BasicOperation: ImageProcessingOperation {
}

public func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) {
guard let renderFramebuffer = renderFramebuffer where (!renderFramebuffer.timingStyle.isTransient()) else { return }

renderFramebuffer.lock()
target.newFramebufferAvailable(renderFramebuffer, fromSourceIndex:atIndex)
sharedImageProcessingContext.runOperationAsynchronously{
guard let renderFramebuffer = self.renderFramebuffer where (!renderFramebuffer.timingStyle.isTransient()) else { return }

renderFramebuffer.lock()
target.newFramebufferAvailable(renderFramebuffer, fromSourceIndex:atIndex)
}
}
}
2 changes: 1 addition & 1 deletion framework/Source/Mac/Camera.swift
Original file line number Diff line number Diff line change
Expand Up @@ -195,7 +195,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer
}

public func stopCapture() {
if (!captureSession.isRunning) {
if (captureSession.isRunning) {
captureSession.stopRunning()
}
}
Expand Down
2 changes: 1 addition & 1 deletion framework/Source/Mac/OpenGLContext.swift
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import OpenGL.GL3
import OpenGL.GL
import Cocoa

// TODO: Figure out way to allow for multiple contexts for different GPUs
Expand Down
8 changes: 5 additions & 3 deletions framework/Source/Operations/Halftone.swift
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
public class Halftone: BasicOperation {
public var fractionalWidthOfAPixel:Float = 0.01 {
didSet {
let imageWidth = 1.0 / Float(self.renderFramebuffer?.size.width ?? 2048)
uniformSettings["fractionalWidthOfPixel"] = max(fractionalWidthOfAPixel, imageWidth)
sharedImageProcessingContext.runOperationAsynchronously{
let imageWidth = 1.0 / Float(self.renderFramebuffer?.size.width ?? 2048)
self.uniformSettings["fractionalWidthOfPixel"] = max(self.fractionalWidthOfAPixel, imageWidth)
}
}
}

Expand All @@ -11,4 +13,4 @@ public class Halftone: BasicOperation {

({fractionalWidthOfAPixel = 0.01})()
}
}
}
32 changes: 20 additions & 12 deletions framework/Source/Pipeline.swift
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
// MARK: -
// MARK: Basic types
import Foundation

public protocol ImageSource {
var targets:TargetContainer { get }
Expand Down Expand Up @@ -89,37 +90,44 @@ class WeakImageConsumer {
public class TargetContainer:Sequence {
var targets = [WeakImageConsumer]()
var count:Int { get {return targets.count}}

let dispatchQueue = DispatchQueue(label:"com.sunsetlakesoftware.GPUImage.targetContainerQueue", attributes: [])

public init() {
}

public func append(_ target:ImageConsumer, indexAtTarget:UInt) {
// TODO: Don't allow the addition of a target more than once
targets.append(WeakImageConsumer(value:target, indexAtTarget:indexAtTarget))
dispatchQueue.async{
self.targets.append(WeakImageConsumer(value:target, indexAtTarget:indexAtTarget))
}
}

public func makeIterator() -> AnyIterator<(ImageConsumer, UInt)> {
var index = 0

return AnyIterator { () -> (ImageConsumer, UInt)? in
if (index >= self.targets.count) {
return nil
}

while (self.targets[index].value == nil) {
self.targets.remove(at:index)
return self.dispatchQueue.sync{
if (index >= self.targets.count) {
return nil
}

while (self.targets[index].value == nil) {
self.targets.remove(at:index)
if (index >= self.targets.count) {
return nil
}
}

index += 1
return (self.targets[index - 1].value!, self.targets[index - 1].indexAtTarget)
}

index += 1
return (self.targets[index - 1].value!, self.targets[index - 1].indexAtTarget)
}
}

public func removeAll() {
targets.removeAll()
dispatchQueue.async{
self.targets.removeAll()
}
}
}

Expand Down
2 changes: 1 addition & 1 deletion framework/Source/SerialDispatch.swift
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ func runOnMainQueue(_ mainThreadOperation:() -> ()) {
}
}

@warn_unused_result func runOnMainQueue<T>(_ mainThreadOperation:() -> T) -> T {
func runOnMainQueue<T>(_ mainThreadOperation:() -> T) -> T {
var returnedValue: T!
runOnMainQueue {
returnedValue = mainThreadOperation()
Expand Down
6 changes: 3 additions & 3 deletions framework/Source/iOS/Camera.swift
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer
let bufferHeight = CVPixelBufferGetHeight(cameraFrame)
let currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)

CVPixelBufferLockBaseAddress(cameraFrame, 0)
CVPixelBufferLockBaseAddress(cameraFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0)))
sharedImageProcessingContext.runOperationAsynchronously{
let cameraFramebuffer:Framebuffer

Expand Down Expand Up @@ -236,7 +236,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer
glBindTexture(GLenum(GL_TEXTURE_2D), cameraFramebuffer.texture)
glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_RGBA, GLsizei(bufferWidth), GLsizei(bufferHeight), 0, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddress(cameraFrame))
}
CVPixelBufferUnlockBaseAddress(cameraFrame, 0)
CVPixelBufferUnlockBaseAddress(cameraFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0)))

cameraFramebuffer.timingStyle = .videoFrame(timestamp:Timestamp(currentTime))
self.updateTargetsWithFramebuffer(cameraFramebuffer)
Expand Down Expand Up @@ -275,7 +275,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer
}

public func stopCapture() {
if (!captureSession.isRunning) {
if (captureSession.isRunning) {
captureSession.stopRunning()
}
}
Expand Down
4 changes: 2 additions & 2 deletions framework/Source/iOS/MovieInput.swift
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ public class MovieInput: ImageSource {
func processMovieFrame(_ movieFrame:CVPixelBuffer, withSampleTime:CMTime) {
let bufferHeight = CVPixelBufferGetHeight(movieFrame)
let bufferWidth = CVPixelBufferGetWidth(movieFrame)
CVPixelBufferLockBaseAddress(movieFrame, 0)
CVPixelBufferLockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0)))

let conversionMatrix = colorConversionMatrix601FullRangeDefault
// TODO: Get this color query working
Expand Down Expand Up @@ -172,7 +172,7 @@ public class MovieInput: ImageSource {
let movieFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:false)

convertYUVToRGB(shader:self.yuvConversionShader, luminanceFramebuffer:luminanceFramebuffer, chrominanceFramebuffer:chrominanceFramebuffer, resultFramebuffer:movieFramebuffer, colorConversionMatrix:conversionMatrix)
CVPixelBufferUnlockBaseAddress(movieFrame, 0)
CVPixelBufferUnlockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0)))

movieFramebuffer.timingStyle = .videoFrame(timestamp:Timestamp(withSampleTime))
self.updateTargetsWithFramebuffer(movieFramebuffer)
Expand Down
10 changes: 5 additions & 5 deletions framework/Source/iOS/MovieOutput.swift
Original file line number Diff line number Diff line change
Expand Up @@ -74,9 +74,9 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget {
* Tagging the resulting video file as BT.601, is the best option right now.
* Creating a proper BT.709 video is not possible at the moment.
*/
CVBufferSetAttachment(self.pixelBuffer!, kCVImageBufferColorPrimariesKey, kCVImageBufferColorPrimaries_ITU_R_709_2, kCVAttachmentMode_ShouldPropagate)
CVBufferSetAttachment(self.pixelBuffer!, kCVImageBufferYCbCrMatrixKey, kCVImageBufferYCbCrMatrix_ITU_R_601_4, kCVAttachmentMode_ShouldPropagate)
CVBufferSetAttachment(self.pixelBuffer!, kCVImageBufferTransferFunctionKey, kCVImageBufferTransferFunction_ITU_R_709_2, kCVAttachmentMode_ShouldPropagate)
CVBufferSetAttachment(self.pixelBuffer!, kCVImageBufferColorPrimariesKey, kCVImageBufferColorPrimaries_ITU_R_709_2, .shouldPropagate)
CVBufferSetAttachment(self.pixelBuffer!, kCVImageBufferYCbCrMatrixKey, kCVImageBufferYCbCrMatrix_ITU_R_601_4, .shouldPropagate)
CVBufferSetAttachment(self.pixelBuffer!, kCVImageBufferTransferFunctionKey, kCVImageBufferTransferFunction_ITU_R_709_2, .shouldPropagate)

let bufferSize = GLSize(self.size)
var cachedTextureRef:CVOpenGLESTexture? = nil
Expand Down Expand Up @@ -152,7 +152,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget {
debugPrint("Problem appending pixel buffer at time: \(frameTime)")
}

CVPixelBufferUnlockBaseAddress(pixelBuffer!, 0)
CVPixelBufferUnlockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0)))
if !sharedImageProcessingContext.supportsTextureCaches() {
pixelBuffer = nil
}
Expand All @@ -166,7 +166,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget {

renderFramebuffer.activateFramebufferForRendering()
clearFramebufferWithColor(Color.Black)
CVPixelBufferLockBaseAddress(pixelBuffer, 0)
CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0)))
renderQuadWithShader(colorSwizzlingShader, uniformSettings:ShaderUniformSettings(), vertices:standardImageVertices, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.noRotation)])

if sharedImageProcessingContext.supportsTextureCaches() {
Expand Down
2 changes: 1 addition & 1 deletion framework/Source/iOS/PictureOutput.swift
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ public class PictureOutput: ImageConsumer {
self.url = url // Create an intentional short-term retain cycle to prevent deallocation before next frame is captured
encodedImageAvailableCallback = {imageData in
do {
try imageData.write(to: self.url, options:.dataWritingAtomic)
try imageData.write(to: self.url, options:.atomic)
} catch {
// TODO: Handle this better
print("WARNING: Couldn't save image with error:\(error)")
Expand Down
33 changes: 22 additions & 11 deletions framework/Tests/Pipeline_Tests.swift
Original file line number Diff line number Diff line change
Expand Up @@ -11,21 +11,27 @@ class FakeOperation: ImageProcessingOperation {
self.name = name
}

func newFramebufferAvailable(framebuffer:Framebuffer, fromProducer:ImageSource) {
func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) {
}

func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) {
}
}

class FakeRenderView: ImageConsumer {
let sources = SourceContainer()
let maximumInputs:UInt = 1

func newFramebufferAvailable(framebuffer:Framebuffer, fromProducer:ImageSource) {
func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) {
}
}

class FakeCamera: ImageSource {
let targets = TargetContainer()

func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) {
}

func newCameraFrame() {
// Framebuffer has size, orientation encoded in it

Expand All @@ -46,15 +52,15 @@ class Pipeline_Tests: XCTestCase {

// All operations have been added and should have a strong reference
var operation1:FakeOperation? = FakeOperation(name:"Operation 1")
targetContainer.append(operation1!)
targetContainer.append(operation1!, indexAtTarget:0)
var operation2:FakeOperation? = FakeOperation(name:"Operation 2")
targetContainer.append(operation2!)
let operation3:FakeOperation? = FakeOperation(name:"Operation 3")
targetContainer.append(operation3!)
targetContainer.append(operation2!, indexAtTarget:0)
var operation3:FakeOperation? = FakeOperation(name:"Operation 3")
targetContainer.append(operation3!, indexAtTarget:0)
var operation4:FakeOperation? = FakeOperation(name:"Operation 4")
targetContainer.append(operation4!)
targetContainer.append(operation4!, indexAtTarget:0)

for (index, target) in targetContainer.enumerate() {
for (index, (target, _)) in targetContainer.enumerated() {
let operation = target as! FakeOperation
switch index {
case 0: XCTAssert(operation.name == "Operation 1")
Expand All @@ -69,7 +75,7 @@ class Pipeline_Tests: XCTestCase {
operation2 = nil
operation4 = nil

for (index, target) in targetContainer.enumerate() {
for (index, (target, _)) in targetContainer.enumerated() {
let operation = target as! FakeOperation
switch index {
case 0: XCTAssert(operation.name == "Operation 1")
Expand All @@ -80,13 +86,18 @@ class Pipeline_Tests: XCTestCase {

operation1 = nil

for (index, target) in targetContainer.enumerate() {
for (index, (target, _)) in targetContainer.enumerated() {
let operation = target as! FakeOperation
switch index {
case 0: XCTAssert(operation.name == "Operation 3")
default: XCTFail("Should not have hit an index this high")
t default: XCTFail("Should not have hit an index this high")
}
}

operation3 = nil
for (_, (_, _)) in targetContainer.enumerated() {
XCTFail("Should not be any targets left in the container")
}
}

func testSourceContainer() {
Expand Down

0 comments on commit f478f48

Please sign in to comment.