From c55ee85622b855140cc7abc5ab244025cacbfc25 Mon Sep 17 00:00:00 2001 From: Brad Larson Date: Tue, 21 Jun 2016 13:58:12 -0500 Subject: [PATCH 01/14] Performed initial conversion to Swift 3. --- .../FilterShowcase.xcodeproj/project.pbxproj | 6 +- .../FilterShowcase/AppDelegate.swift | 2 +- .../FilterShowcase/FilterOperationTypes.swift | 20 +- .../FilterShowcase/FilterOperations.swift | 432 +++++++++--------- .../FilterShowcaseWindowController.swift | 28 +- .../project.pbxproj | 6 +- .../SimpleImageFilter/AppDelegate.swift | 2 +- .../project.pbxproj | 6 +- .../SimpleMovieFilter/AppDelegate.swift | 6 +- .../project.pbxproj | 6 +- .../SimpleVideoFilter/AppDelegate.swift | 10 +- .../project.pbxproj | 6 +- .../SimpleVideoRecorder/AppDelegate.swift | 12 +- .../FilterShowcase.xcodeproj/project.pbxproj | 11 +- .../FilterShowcaseSwift/AppDelegate.swift | 2 +- .../FilterDisplayViewController.swift | 28 +- .../FilterListViewController.swift | 14 +- .../project.pbxproj | 3 + .../SimpleImageFilter/AppDelegate.swift | 2 +- .../SimpleImageFilter/ViewController.swift | 6 +- .../project.pbxproj | 4 + .../SimpleMovieFilter/ViewController.swift | 4 +- .../project.pbxproj | 3 + .../SimpleVideoFilter/AppDelegate.swift | 12 +- .../SimpleVideoFilter/ViewController.swift | 38 +- .../project.pbxproj | 3 + .../SimpleVideoRecorder/ViewController.swift | 12 +- .../GPUImage-Mac.xcodeproj/project.pbxproj | 7 +- .../xcshareddata/xcschemes/GPUImage.xcscheme | 2 +- .../GPUImage-iOS.xcodeproj/project.pbxproj | 7 +- .../xcshareddata/xcschemes/GPUImage.xcscheme | 2 +- framework/Source/BasicOperation.swift | 30 +- framework/Source/CameraConversion.swift | 4 +- framework/Source/FillMode.swift | 35 +- framework/Source/Framebuffer.swift | 66 +-- framework/Source/FramebufferCache.swift | 4 +- framework/Source/ImageGenerator.swift | 6 +- framework/Source/ImageOrientation.swift | 56 +-- framework/Source/Mac/Camera.swift | 44 +- framework/Source/Mac/MovieInput.swift | 46 +- framework/Source/Mac/MovieOutput.swift | 62 +-- framework/Source/Mac/OpenGLContext.swift | 15 +- framework/Source/Mac/PictureInput.swift | 50 +- framework/Source/Mac/PictureOutput.swift | 48 +- framework/Source/Mac/RenderView.swift | 10 +- framework/Source/Matrix.swift | 4 +- framework/Source/OpenGLContext_Shared.swift | 18 +- framework/Source/OpenGLRendering.swift | 26 +- framework/Source/OperationGroup.swift | 8 +- .../Operations/AverageColorExtractor.swift | 6 +- .../AverageLuminanceExtractor.swift | 4 +- framework/Source/Operations/BoxBlur.swift | 4 +- .../Source/Operations/CircleGenerator.swift | 8 +- framework/Source/Operations/Crop.swift | 8 +- .../Operations/CrosshairGenerator.swift | 6 +- .../Source/Operations/GaussianBlur.swift | 16 +- .../Operations/HarrisCornerDetector.swift | 10 +- framework/Source/Operations/Histogram.swift | 28 +- .../Operations/HistogramEqualization.swift | 22 +- framework/Source/Operations/ImageBuffer.swift | 4 +- .../Source/Operations/LanczosResampling.swift | 14 +- .../Source/Operations/LineGenerator.swift | 12 +- framework/Source/Operations/MotionBlur.swift | 4 +- framework/Source/Operations/Sharpen.swift | 6 +- .../SingleComponentGaussianBlur.swift | 2 +- .../Operations/SolidColorGenerator.swift | 4 +- .../Operations/TransformOperation.swift | 10 +- framework/Source/Pipeline.swift | 40 +- framework/Source/RawDataInput.swift | 22 +- framework/Source/RawDataOutput.swift | 10 +- framework/Source/SerialDispatch.swift | 39 +- framework/Source/ShaderProgram.swift | 58 +-- framework/Source/ShaderUniformSettings.swift | 2 +- framework/Source/TextureInput.swift | 6 +- framework/Source/TextureOutput.swift | 6 +- .../Source/TextureSamplingOperation.swift | 6 +- framework/Source/Timestamp.swift | 2 +- framework/Source/TwoStageOperation.swift | 22 +- framework/Source/iOS/Camera.swift | 64 +-- framework/Source/iOS/MovieInput.swift | 42 +- framework/Source/iOS/MovieOutput.swift | 62 +-- framework/Source/iOS/OpenGLContext.swift | 25 +- framework/Source/iOS/PictureInput.swift | 48 +- framework/Source/iOS/PictureOutput.swift | 48 +- framework/Source/iOS/RenderView.swift | 16 +- 85 files changed, 988 insertions(+), 932 deletions(-) diff --git a/examples/Mac/FilterShowcase/FilterShowcase.xcodeproj/project.pbxproj b/examples/Mac/FilterShowcase/FilterShowcase.xcodeproj/project.pbxproj index 07cbe2f3..7f252baf 100755 --- a/examples/Mac/FilterShowcase/FilterShowcase.xcodeproj/project.pbxproj +++ b/examples/Mac/FilterShowcase/FilterShowcase.xcodeproj/project.pbxproj @@ -189,11 +189,12 @@ isa = PBXProject; attributes = { LastSwiftUpdateCheck = 0720; - LastUpgradeCheck = 0720; + LastUpgradeCheck = 0800; ORGANIZATIONNAME = "Sunset Lake Software LLC"; TargetAttributes = { BC1E133A1C9F82B4008F844F = { CreatedOnToolsVersion = 7.2.1; + LastSwiftMigration = 0800; }; }; }; @@ -381,6 +382,7 @@ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/../Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.FilterShowcase; PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 3.0; }; name = Debug; }; @@ -393,6 +395,8 @@ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/../Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.FilterShowcase; PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; + SWIFT_VERSION = 3.0; }; name = Release; }; diff --git a/examples/Mac/FilterShowcase/FilterShowcase/AppDelegate.swift b/examples/Mac/FilterShowcase/FilterShowcase/AppDelegate.swift index 61885431..19f72e6a 100755 --- a/examples/Mac/FilterShowcase/FilterShowcase/AppDelegate.swift +++ b/examples/Mac/FilterShowcase/FilterShowcase/AppDelegate.swift @@ -7,7 +7,7 @@ class AppDelegate: NSObject, NSApplicationDelegate { var windowController:FilterShowcaseWindowController? - func applicationDidFinishLaunching(aNotification: NSNotification) { + func applicationDidFinishLaunching(_ aNotification: Notification) { self.windowController = FilterShowcaseWindowController(windowNibName:"FilterShowcaseWindowController") self.windowController?.showWindow(self) } diff --git a/examples/Mac/FilterShowcase/FilterShowcase/FilterOperationTypes.swift b/examples/Mac/FilterShowcase/FilterShowcase/FilterOperationTypes.swift index 460e8b3c..760bcbe8 100755 --- a/examples/Mac/FilterShowcase/FilterShowcase/FilterOperationTypes.swift +++ b/examples/Mac/FilterShowcase/FilterShowcase/FilterOperationTypes.swift @@ -2,16 +2,16 @@ import Foundation import GPUImage enum FilterSliderSetting { - case Disabled - case Enabled(minimumValue:Float, maximumValue:Float, initialValue:Float) + case disabled + case enabled(minimumValue:Float, maximumValue:Float, initialValue:Float) } typealias FilterSetupFunction = (camera:Camera, filter:ImageProcessingOperation, outputView:RenderView) -> ImageSource? enum FilterOperationType { - case SingleInput - case Blend - case Custom(filterSetupFunction:FilterSetupFunction) + case singleInput + case blend + case custom(filterSetupFunction:FilterSetupFunction) } protocol FilterOperationInterface { @@ -22,8 +22,8 @@ protocol FilterOperationInterface { var sliderConfiguration: FilterSliderSetting { get } var filterOperationType: FilterOperationType { get } - func configureCustomFilter(secondInput:ImageSource?) - func updateBasedOnSliderValue(sliderValue:Float) + func configureCustomFilter(_ secondInput:ImageSource?) + func updateBasedOnSliderValue(_ sliderValue:Float) } class FilterOperation: FilterOperationInterface { @@ -50,11 +50,11 @@ class FilterOperation: FilterOperationInt return internalFilter } - func configureCustomFilter(secondInput:ImageSource?) { + func configureCustomFilter(_ secondInput:ImageSource?) { self.secondInput = secondInput } - func updateBasedOnSliderValue(sliderValue:Float) { + func updateBasedOnSliderValue(_ sliderValue:Float) { sliderUpdateCallback?(filter:internalFilter, sliderValue:sliderValue) } -} \ No newline at end of file +} diff --git a/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift b/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift index 94d212de..dc60509f 100755 --- a/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift +++ b/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift @@ -6,37 +6,37 @@ let filterOperations: Array = [ filter:{SaturationAdjustment()}, listName:"Saturation", titleName:"Saturation", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:2.0, initialValue:1.0), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:2.0, initialValue:1.0), sliderUpdateCallback: {(filter, sliderValue) in filter.saturation = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{ContrastAdjustment()}, listName:"Contrast", titleName:"Contrast", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:4.0, initialValue:1.0), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:4.0, initialValue:1.0), sliderUpdateCallback: {(filter, sliderValue) in filter.contrast = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{BrightnessAdjustment()}, listName:"Brightness", titleName:"Brightness", - sliderConfiguration:.Enabled(minimumValue:-1.0, maximumValue:1.0, initialValue:0.0), + sliderConfiguration:.enabled(minimumValue:-1.0, maximumValue:1.0, initialValue:0.0), sliderUpdateCallback: {(filter, sliderValue) in filter.brightness = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{LevelsAdjustment()}, listName:"Levels", titleName:"Levels", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.0), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.0), sliderUpdateCallback: {(filter, sliderValue) in filter.minimum = Color(red:Float(sliderValue), green:Float(sliderValue), blue:Float(sliderValue)) filter.middle = Color(red:1.0, green:1.0, blue:1.0) @@ -44,57 +44,57 @@ let filterOperations: Array = [ filter.minOutput = Color(red:0.0, green:0.0, blue:0.0) filter.maxOutput = Color(red:1.0, green:1.0, blue:1.0) }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{ExposureAdjustment()}, listName:"Exposure", titleName:"Exposure", - sliderConfiguration:.Enabled(minimumValue:-4.0, maximumValue:4.0, initialValue:0.0), + sliderConfiguration:.enabled(minimumValue:-4.0, maximumValue:4.0, initialValue:0.0), sliderUpdateCallback: {(filter, sliderValue) in filter.exposure = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{RGBAdjustment()}, listName:"RGB", titleName:"RGB", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:2.0, initialValue:1.0), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:2.0, initialValue:1.0), sliderUpdateCallback: {(filter, sliderValue) in filter.green = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{HueAdjustment()}, listName:"Hue", titleName:"Hue", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:360.0, initialValue:90.0), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:360.0, initialValue:90.0), sliderUpdateCallback: {(filter, sliderValue) in filter.hue = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{WhiteBalance()}, listName:"White balance", titleName:"White Balance", - sliderConfiguration:.Enabled(minimumValue:2500.0, maximumValue:7500.0, initialValue:5000.0), + sliderConfiguration:.enabled(minimumValue:2500.0, maximumValue:7500.0, initialValue:5000.0), sliderUpdateCallback: {(filter, sliderValue) in filter.temperature = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{MonochromeFilter()}, listName:"Monochrome", titleName:"Monochrome", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:1.0, initialValue:1.0), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:1.0), sliderUpdateCallback: {(filter, sliderValue) in filter.intensity = sliderValue }, - filterOperationType:.Custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in let castFilter = filter as! MonochromeFilter camera --> castFilter --> outputView castFilter.color = Color(red:0.0, green:0.0, blue:1.0, alpha:1.0) @@ -105,45 +105,45 @@ let filterOperations: Array = [ filter:{FalseColor()}, listName:"False color", titleName:"False Color", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback:nil, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{Sharpen()}, listName:"Sharpen", titleName:"Sharpen", - sliderConfiguration:.Enabled(minimumValue:-1.0, maximumValue:4.0, initialValue:0.0), + sliderConfiguration:.enabled(minimumValue:-1.0, maximumValue:4.0, initialValue:0.0), sliderUpdateCallback: {(filter, sliderValue) in filter.sharpness = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{UnsharpMask()}, listName:"Unsharp mask", titleName:"Unsharp Mask", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:5.0, initialValue:1.0), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:5.0, initialValue:1.0), sliderUpdateCallback: {(filter, sliderValue) in filter.intensity = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{TransformOperation()}, listName:"Transform (2-D)", titleName:"Transform (2-D)", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:6.28, initialValue:0.75), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:6.28, initialValue:0.75), sliderUpdateCallback:{(filter, sliderValue) in - filter.transform = Matrix4x4(CGAffineTransformMakeRotation(CGFloat(sliderValue))) + filter.transform = Matrix4x4(CGAffineTransform(rotationAngle:CGFloat(sliderValue))) }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{TransformOperation()}, listName:"Transform (3-D)", titleName:"Transform (3-D)", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:6.28, initialValue:0.75), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:6.28, initialValue:0.75), sliderUpdateCallback:{(filter, sliderValue) in var perspectiveTransform = CATransform3DIdentity perspectiveTransform.m34 = 0.4 @@ -152,25 +152,25 @@ let filterOperations: Array = [ perspectiveTransform = CATransform3DRotate(perspectiveTransform, CGFloat(sliderValue), 0.0, 1.0, 0.0) filter.transform = Matrix4x4(perspectiveTransform) }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{Crop()}, listName:"Crop", titleName:"Crop", - sliderConfiguration:.Enabled(minimumValue:240.0, maximumValue:480.0, initialValue:240.0), + sliderConfiguration:.enabled(minimumValue:240.0, maximumValue:480.0, initialValue:240.0), sliderUpdateCallback:{(filter, sliderValue) in filter.cropSizeInPixels = Size(width:480.0, height:sliderValue) }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{Luminance()}, listName:"Masking", titleName:"Mask Example", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback: nil, - filterOperationType:.Custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in let castFilter = filter as! Luminance let maskImage = PictureInput(imageName:"Mask.png") castFilter.drawUnmodifiedImageOutsideOfMask = false @@ -184,122 +184,122 @@ let filterOperations: Array = [ filter:{GammaAdjustment()}, listName:"Gamma", titleName:"Gamma", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:3.0, initialValue:1.0), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:3.0, initialValue:1.0), sliderUpdateCallback: {(filter, sliderValue) in filter.gamma = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), // TODO : Tone curve FilterOperation( filter:{HighlightsAndShadows()}, listName:"Highlights and shadows", titleName:"Highlights and Shadows", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:1.0, initialValue:1.0), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:1.0), sliderUpdateCallback: {(filter, sliderValue) in filter.highlights = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{Haze()}, listName:"Haze / UV", titleName:"Haze / UV", - sliderConfiguration:.Enabled(minimumValue:-0.2, maximumValue:0.2, initialValue:0.2), + sliderConfiguration:.enabled(minimumValue:-0.2, maximumValue:0.2, initialValue:0.2), sliderUpdateCallback: {(filter, sliderValue) in filter.distance = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{SepiaToneFilter()}, listName:"Sepia tone", titleName:"Sepia Tone", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:1.0, initialValue:1.0), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:1.0), sliderUpdateCallback: {(filter, sliderValue) in filter.intensity = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{AmatorkaFilter()}, listName:"Amatorka (Lookup)", titleName:"Amatorka (Lookup)", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback: nil, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{MissEtikateFilter()}, listName:"Miss Etikate (Lookup)", titleName:"Miss Etikate (Lookup)", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback: nil, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{SoftElegance()}, listName:"Soft elegance (Lookup)", titleName:"Soft Elegance (Lookup)", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback: nil, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{ColorInversion()}, listName:"Color invert", titleName:"Color Invert", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback: nil, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{Solarize()}, listName:"Solarize", titleName:"Solarize", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), sliderUpdateCallback: {(filter, sliderValue) in filter.threshold = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{Vibrance()}, listName:"Vibrance", titleName:"Vibrance", - sliderConfiguration:.Enabled(minimumValue:-1.2, maximumValue:1.2, initialValue:0.0), + sliderConfiguration:.enabled(minimumValue:-1.2, maximumValue:1.2, initialValue:0.0), sliderUpdateCallback: {(filter, sliderValue) in filter.vibrance = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{HighlightAndShadowTint()}, listName:"Highlight and shadow tint", titleName:"Highlight / Shadow Tint", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.0), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.0), sliderUpdateCallback: {(filter, sliderValue) in filter.shadowTintIntensity = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation ( filter:{Luminance()}, listName:"Luminance", titleName:"Luminance", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback:nil, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( - filter:{Histogram(type:.RGB)}, + filter:{Histogram(type:.rgb)}, listName:"Histogram", titleName:"Histogram", - sliderConfiguration:.Enabled(minimumValue:4.0, maximumValue:32.0, initialValue:16.0), + sliderConfiguration:.enabled(minimumValue:4.0, maximumValue:32.0, initialValue:16.0), sliderUpdateCallback: {(filter, sliderValue) in filter.downsamplingFactor = UInt(round(sliderValue)) }, - filterOperationType:.Custom(filterSetupFunction: {(camera, filter, outputView) in + filterOperationType:.custom(filterSetupFunction: {(camera, filter, outputView) in let castFilter = filter as! Histogram let histogramGraph = HistogramDisplay() histogramGraph.overriddenOutputSize = Size(width:256.0, height:330.0) @@ -312,20 +312,20 @@ let filterOperations: Array = [ }) ), FilterOperation ( - filter:{HistogramEqualization(type:.RGB)}, + filter:{HistogramEqualization(type:.rgb)}, listName:"Histogram equalization", titleName:"Histogram Equalization", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback:nil, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{AverageColorExtractor()}, listName:"Average color", titleName:"Average Color", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback: nil, - filterOperationType:.Custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in let castFilter = filter as! AverageColorExtractor let colorGenerator = SolidColorGenerator(size:outputView.sizeInPixels) @@ -341,9 +341,9 @@ let filterOperations: Array = [ filter:{AverageLuminanceExtractor()}, listName:"Average luminosity", titleName:"Average Luminosity", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback: nil, - filterOperationType:.Custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in let castFilter = filter as! AverageLuminanceExtractor let colorGenerator = SolidColorGenerator(size:outputView.sizeInPixels) @@ -360,59 +360,59 @@ let filterOperations: Array = [ filter:{LuminanceThreshold()}, listName:"Luminance threshold", titleName:"Luminance Threshold", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), sliderUpdateCallback: {(filter, sliderValue) in filter.threshold = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{AdaptiveThreshold()}, listName:"Adaptive threshold", titleName:"Adaptive Threshold", - sliderConfiguration:.Enabled(minimumValue:1.0, maximumValue:20.0, initialValue:1.0), + sliderConfiguration:.enabled(minimumValue:1.0, maximumValue:20.0, initialValue:1.0), sliderUpdateCallback: {(filter, sliderValue) in filter.blurRadiusInPixels = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{AverageLuminanceThreshold()}, listName:"Average luminance threshold", titleName:"Avg. Lum. Threshold", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:2.0, initialValue:1.0), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:2.0, initialValue:1.0), sliderUpdateCallback: {(filter, sliderValue) in filter.thresholdMultiplier = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{Pixellate()}, listName:"Pixellate", titleName:"Pixellate", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:0.3, initialValue:0.05), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:0.3, initialValue:0.05), sliderUpdateCallback: {(filter, sliderValue) in filter.fractionalWidthOfAPixel = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{PolarPixellate()}, listName:"Polar pixellate", titleName:"Polar Pixellate", - sliderConfiguration:.Enabled(minimumValue:-0.1, maximumValue:0.1, initialValue:0.05), + sliderConfiguration:.enabled(minimumValue:-0.1, maximumValue:0.1, initialValue:0.05), sliderUpdateCallback: {(filter, sliderValue) in filter.pixelSize = Size(width:sliderValue, height:sliderValue) }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{Pixellate()}, listName:"Masked Pixellate", titleName:"Masked Pixellate", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback: nil, - filterOperationType:.Custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in let castFilter = filter as! Pixellate castFilter.fractionalWidthOfAPixel = 0.05 // TODO: Find a way to not hardcode these values @@ -431,81 +431,81 @@ let filterOperations: Array = [ filter:{PolkaDot()}, listName:"Polka dot", titleName:"Polka Dot", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:0.3, initialValue:0.05), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:0.3, initialValue:0.05), sliderUpdateCallback: {(filter, sliderValue) in filter.fractionalWidthOfAPixel = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{Halftone()}, listName:"Halftone", titleName:"Halftone", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:0.05, initialValue:0.01), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:0.05, initialValue:0.01), sliderUpdateCallback: {(filter, sliderValue) in filter.fractionalWidthOfAPixel = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{Crosshatch()}, listName:"Crosshatch", titleName:"Crosshatch", - sliderConfiguration:.Enabled(minimumValue:0.01, maximumValue:0.06, initialValue:0.03), + sliderConfiguration:.enabled(minimumValue:0.01, maximumValue:0.06, initialValue:0.03), sliderUpdateCallback: {(filter, sliderValue) in filter.crossHatchSpacing = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{SobelEdgeDetection()}, listName:"Sobel edge detection", titleName:"Sobel Edge Detection", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.25), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.25), sliderUpdateCallback: {(filter, sliderValue) in filter.edgeStrength = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{PrewittEdgeDetection()}, listName:"Prewitt edge detection", titleName:"Prewitt Edge Detection", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:1.0, initialValue:1.0), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:1.0), sliderUpdateCallback: {(filter, sliderValue) in filter.edgeStrength = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{CannyEdgeDetection()}, listName:"Canny edge detection", titleName:"Canny Edge Detection", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:4.0, initialValue:1.0), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:4.0, initialValue:1.0), sliderUpdateCallback: {(filter, sliderValue) in filter.blurRadiusInPixels = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{ThresholdSobelEdgeDetection()}, listName:"Threshold edge detection", titleName:"Threshold Edge Detection", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.25), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.25), sliderUpdateCallback: {(filter, sliderValue) in filter.threshold = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{HarrisCornerDetector()}, listName:"Harris corner detector", titleName:"Harris Corner Detector", - sliderConfiguration:.Enabled(minimumValue:0.01, maximumValue:0.70, initialValue:0.20), + sliderConfiguration:.enabled(minimumValue:0.01, maximumValue:0.70, initialValue:0.20), sliderUpdateCallback: {(filter, sliderValue) in filter.threshold = sliderValue }, - filterOperationType:.Custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in let castFilter = filter as! HarrisCornerDetector // TODO: Get this more dynamically sized #if os(iOS) @@ -532,11 +532,11 @@ let filterOperations: Array = [ filter:{NobleCornerDetector()}, listName:"Noble corner detector", titleName:"Noble Corner Detector", - sliderConfiguration:.Enabled(minimumValue:0.01, maximumValue:0.70, initialValue:0.20), + sliderConfiguration:.enabled(minimumValue:0.01, maximumValue:0.70, initialValue:0.20), sliderUpdateCallback: {(filter, sliderValue) in filter.threshold = sliderValue }, - filterOperationType:.Custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in let castFilter = filter as! NobleCornerDetector // TODO: Get this more dynamically sized #if os(iOS) @@ -563,11 +563,11 @@ let filterOperations: Array = [ filter:{ShiTomasiFeatureDetector()}, listName:"Shi-Tomasi feature detector", titleName:"Shi-Tomasi Feature Detector", - sliderConfiguration:.Enabled(minimumValue:0.01, maximumValue:0.70, initialValue:0.20), + sliderConfiguration:.enabled(minimumValue:0.01, maximumValue:0.70, initialValue:0.20), sliderUpdateCallback: {(filter, sliderValue) in filter.threshold = sliderValue }, - filterOperationType:.Custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in let castFilter = filter as! ShiTomasiFeatureDetector // TODO: Get this more dynamically sized #if os(iOS) @@ -595,29 +595,29 @@ let filterOperations: Array = [ filter:{ColourFASTFeatureDetection()}, listName:"ColourFAST feature detection", titleName:"ColourFAST Features", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback:nil, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{LowPassFilter()}, listName:"Low pass", titleName:"Low Pass", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), sliderUpdateCallback: {(filter, sliderValue) in filter.strength = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{HighPassFilter()}, listName:"High pass", titleName:"High Pass", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), sliderUpdateCallback: {(filter, sliderValue) in filter.strength = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), // TODO: Motion detector @@ -625,76 +625,76 @@ let filterOperations: Array = [ filter:{SketchFilter()}, listName:"Sketch", titleName:"Sketch", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), sliderUpdateCallback: {(filter, sliderValue) in filter.edgeStrength = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{ThresholdSketchFilter()}, listName:"Threshold Sketch", titleName:"Threshold Sketch", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.25), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.25), sliderUpdateCallback: {(filter, sliderValue) in filter.threshold = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{ToonFilter()}, listName:"Toon", titleName:"Toon", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback: nil, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{SmoothToonFilter()}, listName:"Smooth toon", titleName:"Smooth Toon", - sliderConfiguration:.Enabled(minimumValue:1.0, maximumValue:6.0, initialValue:1.0), + sliderConfiguration:.enabled(minimumValue:1.0, maximumValue:6.0, initialValue:1.0), sliderUpdateCallback: {(filter, sliderValue) in filter.blurRadiusInPixels = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{TiltShift()}, listName:"Tilt shift", titleName:"Tilt Shift", - sliderConfiguration:.Enabled(minimumValue:0.2, maximumValue:0.8, initialValue:0.5), + sliderConfiguration:.enabled(minimumValue:0.2, maximumValue:0.8, initialValue:0.5), sliderUpdateCallback: {(filter, sliderValue) in filter.topFocusLevel = sliderValue - 0.1 filter.bottomFocusLevel = sliderValue + 0.1 }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{CGAColorspaceFilter()}, listName:"CGA colorspace", titleName:"CGA Colorspace", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback: nil, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{Posterize()}, listName:"Posterize", titleName:"Posterize", - sliderConfiguration:.Enabled(minimumValue:1.0, maximumValue:20.0, initialValue:10.0), + sliderConfiguration:.enabled(minimumValue:1.0, maximumValue:20.0, initialValue:10.0), sliderUpdateCallback: {(filter, sliderValue) in filter.colorLevels = round(sliderValue) }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{Convolution3x3()}, listName:"3x3 convolution", titleName:"3x3 convolution", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback: nil, - filterOperationType:.Custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in let castFilter = filter as! Convolution3x3 castFilter.convolutionKernel = Matrix3x3(rowMajorValues:[ @@ -711,29 +711,29 @@ let filterOperations: Array = [ filter:{EmbossFilter()}, listName:"Emboss", titleName:"Emboss", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:5.0, initialValue:1.0), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:5.0, initialValue:1.0), sliderUpdateCallback: {(filter, sliderValue) in filter.intensity = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{Laplacian()}, listName:"Laplacian", titleName:"Laplacian", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback: nil, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{ChromaKeying()}, listName:"Chroma key", titleName:"Chroma Key", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:1.00, initialValue:0.40), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.00, initialValue:0.40), sliderUpdateCallback: {(filter, sliderValue) in filter.thresholdSensitivity = sliderValue }, - filterOperationType:.Custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in let castFilter = filter as! ChromaKeying let blendFilter = AlphaBlend() @@ -751,136 +751,136 @@ let filterOperations: Array = [ filter:{KuwaharaFilter()}, listName:"Kuwahara", titleName:"Kuwahara", - sliderConfiguration:.Enabled(minimumValue:3.0, maximumValue:9.0, initialValue:3.0), + sliderConfiguration:.enabled(minimumValue:3.0, maximumValue:9.0, initialValue:3.0), sliderUpdateCallback: {(filter, sliderValue) in filter.radius = Int(round(sliderValue)) }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{KuwaharaRadius3Filter()}, listName:"Kuwahara (radius 3)", titleName:"Kuwahara (Radius 3)", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback: nil, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{Vignette()}, listName:"Vignette", titleName:"Vignette", - sliderConfiguration:.Enabled(minimumValue:0.5, maximumValue:0.9, initialValue:0.75), + sliderConfiguration:.enabled(minimumValue:0.5, maximumValue:0.9, initialValue:0.75), sliderUpdateCallback: {(filter, sliderValue) in filter.end = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{GaussianBlur()}, listName:"Gaussian blur", titleName:"Gaussian Blur", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:40.0, initialValue:2.0), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:40.0, initialValue:2.0), sliderUpdateCallback: {(filter, sliderValue) in filter.blurRadiusInPixels = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{BoxBlur()}, listName:"Box blur", titleName:"Box Blur", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:40.0, initialValue:2.0), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:40.0, initialValue:2.0), sliderUpdateCallback: {(filter, sliderValue) in filter.blurRadiusInPixels = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{MedianFilter()}, listName:"Median", titleName:"Median", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback: nil, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{BilateralBlur()}, listName:"Bilateral blur", titleName:"Bilateral Blur", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:10.0, initialValue:1.0), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:10.0, initialValue:1.0), sliderUpdateCallback: {(filter, sliderValue) in filter.distanceNormalizationFactor = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{MotionBlur()}, listName:"Motion blur", titleName:"Motion Blur", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:180.0, initialValue:0.0), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:180.0, initialValue:0.0), sliderUpdateCallback: {(filter, sliderValue) in filter.blurAngle = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{ZoomBlur()}, listName:"Zoom blur", titleName:"Zoom Blur", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:2.5, initialValue:1.0), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:2.5, initialValue:1.0), sliderUpdateCallback: {(filter, sliderValue) in filter.blurSize = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( // TODO: Make this only partially applied to the view filter:{iOSBlur()}, listName:"iOS 7 blur", titleName:"iOS 7 Blur", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback: nil, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{SwirlDistortion()}, listName:"Swirl", titleName:"Swirl", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:2.0, initialValue:1.0), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:2.0, initialValue:1.0), sliderUpdateCallback: {(filter, sliderValue) in filter.angle = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{BulgeDistortion()}, listName:"Bulge", titleName:"Bulge", - sliderConfiguration:.Enabled(minimumValue:-1.0, maximumValue:1.0, initialValue:0.5), + sliderConfiguration:.enabled(minimumValue:-1.0, maximumValue:1.0, initialValue:0.5), sliderUpdateCallback: {(filter, sliderValue) in // filter.scale = sliderValue filter.center = Position(0.5, sliderValue) }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{PinchDistortion()}, listName:"Pinch", titleName:"Pinch", - sliderConfiguration:.Enabled(minimumValue:-2.0, maximumValue:2.0, initialValue:0.5), + sliderConfiguration:.enabled(minimumValue:-2.0, maximumValue:2.0, initialValue:0.5), sliderUpdateCallback: {(filter, sliderValue) in filter.scale = sliderValue }, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{SphereRefraction()}, listName:"Sphere refraction", titleName:"Sphere Refraction", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.15), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.15), sliderUpdateCallback:{(filter, sliderValue) in filter.radius = sliderValue }, - filterOperationType:.Custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in let castFilter = filter as! SphereRefraction // Provide a blurred image for a cool-looking background @@ -900,11 +900,11 @@ let filterOperations: Array = [ filter:{GlassSphereRefraction()}, listName:"Glass sphere", titleName:"Glass Sphere", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.15), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.15), sliderUpdateCallback:{(filter, sliderValue) in filter.radius = sliderValue }, - filterOperationType:.Custom(filterSetupFunction:{(camera, filter, outputView) in + filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in let castFilter = filter as! GlassSphereRefraction // Provide a blurred image for a cool-looking background @@ -924,41 +924,41 @@ let filterOperations: Array = [ filter:{StretchDistortion()}, listName:"Stretch", titleName:"Stretch", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback: nil, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{Dilation()}, listName:"Dilation", titleName:"Dilation", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback: nil, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{Erosion()}, listName:"Erosion", titleName:"Erosion", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback: nil, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{OpeningFilter()}, listName:"Opening", titleName:"Opening", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback: nil, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{ClosingFilter()}, listName:"Closing", titleName:"Closing", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback: nil, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), // TODO: Perlin noise // TODO: JFAVoronoi @@ -967,198 +967,198 @@ let filterOperations: Array = [ filter:{LocalBinaryPattern()}, listName:"Local binary pattern", titleName:"Local Binary Pattern", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback:nil, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{ColorLocalBinaryPattern()}, listName:"Local binary pattern (color)", titleName:"Local Binary Pattern (Color)", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback:nil, - filterOperationType:.SingleInput + filterOperationType:.singleInput ), FilterOperation( filter:{DissolveBlend()}, listName:"Dissolve blend", titleName:"Dissolve Blend", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.5), sliderUpdateCallback: {(filter, sliderValue) in filter.mix = sliderValue }, - filterOperationType:.Blend + filterOperationType:.blend ), FilterOperation( filter:{ChromaKeyBlend()}, listName:"Chroma key blend (green)", titleName:"Chroma Key (Green)", - sliderConfiguration:.Enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.4), + sliderConfiguration:.enabled(minimumValue:0.0, maximumValue:1.0, initialValue:0.4), sliderUpdateCallback: {(filter, sliderValue) in filter.thresholdSensitivity = sliderValue }, - filterOperationType:.Blend + filterOperationType:.blend ), FilterOperation( filter:{AddBlend()}, listName:"Add blend", titleName:"Add Blend", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback: nil, - filterOperationType:.Blend + filterOperationType:.blend ), FilterOperation( filter:{DivideBlend()}, listName:"Divide blend", titleName:"Divide Blend", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback: nil, - filterOperationType:.Blend + filterOperationType:.blend ), FilterOperation( filter:{MultiplyBlend()}, listName:"Multiply blend", titleName:"Multiply Blend", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback: nil, - filterOperationType:.Blend + filterOperationType:.blend ), FilterOperation( filter:{OverlayBlend()}, listName:"Overlay blend", titleName:"Overlay Blend", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback: nil, - filterOperationType:.Blend + filterOperationType:.blend ), FilterOperation( filter:{LightenBlend()}, listName:"Lighten blend", titleName:"Lighten Blend", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback: nil, - filterOperationType:.Blend + filterOperationType:.blend ), FilterOperation( filter:{DarkenBlend()}, listName:"Darken blend", titleName:"Darken Blend", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback: nil, - filterOperationType:.Blend + filterOperationType:.blend ), FilterOperation( filter:{ColorBurnBlend()}, listName:"Color burn blend", titleName:"Color Burn Blend", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback: nil, - filterOperationType:.Blend + filterOperationType:.blend ), FilterOperation( filter:{ColorDodgeBlend()}, listName:"Color dodge blend", titleName:"Color Dodge Blend", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback: nil, - filterOperationType:.Blend + filterOperationType:.blend ), FilterOperation( filter:{LinearBurnBlend()}, listName:"Linear burn blend", titleName:"Linear Burn Blend", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback: nil, - filterOperationType:.Blend + filterOperationType:.blend ), FilterOperation( filter:{ScreenBlend()}, listName:"Screen blend", titleName:"Screen Blend", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback:nil, - filterOperationType:.Blend + filterOperationType:.blend ), FilterOperation( filter:{DifferenceBlend()}, listName:"Difference blend", titleName:"Difference Blend", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback:nil, - filterOperationType:.Blend + filterOperationType:.blend ), FilterOperation( filter:{SubtractBlend()}, listName:"Subtract blend", titleName:"Subtract Blend", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback:nil, - filterOperationType:.Blend + filterOperationType:.blend ), FilterOperation( filter:{ExclusionBlend()}, listName:"Exclusion blend", titleName:"Exclusion Blend", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback:nil, - filterOperationType:.Blend + filterOperationType:.blend ), FilterOperation( filter:{HardLightBlend()}, listName:"Hard light blend", titleName:"Hard Light Blend", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback:nil, - filterOperationType:.Blend + filterOperationType:.blend ), FilterOperation( filter:{SoftLightBlend()}, listName:"Soft light blend", titleName:"Soft Light Blend", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback:nil, - filterOperationType:.Blend + filterOperationType:.blend ), FilterOperation( filter:{ColorBlend()}, listName:"Color blend", titleName:"Color Blend", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback:nil, - filterOperationType:.Blend + filterOperationType:.blend ), FilterOperation( filter:{HueBlend()}, listName:"Hue blend", titleName:"Hue Blend", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback:nil, - filterOperationType:.Blend + filterOperationType:.blend ), FilterOperation( filter:{SaturationBlend()}, listName:"Saturation blend", titleName:"Saturation Blend", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback:nil, - filterOperationType:.Blend + filterOperationType:.blend ), FilterOperation( filter:{LuminosityBlend()}, listName:"Luminosity blend", titleName:"Luminosity Blend", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback:nil, - filterOperationType:.Blend + filterOperationType:.blend ), FilterOperation( filter:{NormalBlend()}, listName:"Normal blend", titleName:"Normal Blend", - sliderConfiguration:.Disabled, + sliderConfiguration:.disabled, sliderUpdateCallback:nil, - filterOperationType:.Blend + filterOperationType:.blend ), // TODO: Poisson blend -] \ No newline at end of file +] diff --git a/examples/Mac/FilterShowcase/FilterShowcase/FilterShowcaseWindowController.swift b/examples/Mac/FilterShowcase/FilterShowcase/FilterShowcaseWindowController.swift index 6aaa66af..817ccff9 100755 --- a/examples/Mac/FilterShowcase/FilterShowcase/FilterShowcaseWindowController.swift +++ b/examples/Mac/FilterShowcase/FilterShowcase/FilterShowcaseWindowController.swift @@ -13,8 +13,8 @@ class FilterShowcaseWindowController: NSWindowController { dynamic var currentSliderValue:Float = 0.5 { willSet(newSliderValue) { switch (currentFilterOperation!.sliderConfiguration) { - case .Enabled: currentFilterOperation!.updateBasedOnSliderValue(newSliderValue) - case .Disabled: break + case .enabled: currentFilterOperation!.updateBasedOnSliderValue(newSliderValue) + case .disabled: break } } } @@ -38,7 +38,7 @@ class FilterShowcaseWindowController: NSWindowController { self.changeSelectedRow(0) } - func changeSelectedRow(row:Int) { + func changeSelectedRow(_ row:Int) { guard (currentlySelectedRow != row) else { return } currentlySelectedRow = row @@ -50,27 +50,27 @@ class FilterShowcaseWindowController: NSWindowController { currentFilterOperation = filterOperations[row] switch currentFilterOperation!.filterOperationType { - case .SingleInput: + case .singleInput: videoCamera.addTarget((currentFilterOperation!.filter)) currentFilterOperation!.filter.addTarget(filterView!) - case .Blend: + case .blend: blendImage.removeAllTargets() videoCamera.addTarget((currentFilterOperation!.filter)) self.blendImage.addTarget((currentFilterOperation!.filter)) currentFilterOperation!.filter.addTarget(filterView!) self.blendImage.processImage() - case let .Custom(filterSetupFunction:setupFunction): + case let .custom(filterSetupFunction:setupFunction): currentFilterOperation!.configureCustomFilter(setupFunction(camera:videoCamera!, filter:currentFilterOperation!.filter, outputView:filterView!)) } switch currentFilterOperation!.sliderConfiguration { - case .Disabled: - filterSlider.enabled = false + case .disabled: + filterSlider.isEnabled = false // case let .Enabled(minimumValue, initialValue, maximumValue, filterSliderCallback): - case let .Enabled(minimumValue, maximumValue, initialValue): + case let .enabled(minimumValue, maximumValue, initialValue): filterSlider.minValue = Double(minimumValue) filterSlider.maxValue = Double(maximumValue) - filterSlider.enabled = true + filterSlider.isEnabled = true currentSliderValue = initialValue } @@ -80,19 +80,19 @@ class FilterShowcaseWindowController: NSWindowController { // MARK: - // MARK: Table view delegate and datasource methods - func numberOfRowsInTableView(aTableView:NSTableView!) -> Int { + func numberOfRowsInTableView(_ aTableView:NSTableView!) -> Int { return filterOperations.count } - func tableView(aTableView:NSTableView!, objectValueForTableColumn aTableColumn:NSTableColumn!, row rowIndex:Int) -> AnyObject! { + func tableView(_ aTableView:NSTableView!, objectValueForTableColumn aTableColumn:NSTableColumn!, row rowIndex:Int) -> AnyObject! { let filterInList:FilterOperationInterface = filterOperations[rowIndex] return filterInList.listName } - func tableViewSelectionDidChange(aNotification: NSNotification!) { + func tableViewSelectionDidChange(_ aNotification: Notification!) { if let currentTableView = aNotification.object as? NSTableView { let rowIndex = currentTableView.selectedRow self.changeSelectedRow(rowIndex) } } -} \ No newline at end of file +} diff --git a/examples/Mac/SimpleImageFilter/SimpleImageFilter.xcodeproj/project.pbxproj b/examples/Mac/SimpleImageFilter/SimpleImageFilter.xcodeproj/project.pbxproj index d4bd50b0..a04d8941 100755 --- a/examples/Mac/SimpleImageFilter/SimpleImageFilter.xcodeproj/project.pbxproj +++ b/examples/Mac/SimpleImageFilter/SimpleImageFilter.xcodeproj/project.pbxproj @@ -151,11 +151,12 @@ isa = PBXProject; attributes = { LastSwiftUpdateCheck = 0720; - LastUpgradeCheck = 0720; + LastUpgradeCheck = 0800; ORGANIZATIONNAME = "Sunset Lake Software LLC"; TargetAttributes = { BC1E13071C9F5DB9008F844F = { CreatedOnToolsVersion = 7.2.1; + LastSwiftMigration = 0800; }; }; }; @@ -333,6 +334,7 @@ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/../Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.SimpleImageFilter; PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 3.0; }; name = Debug; }; @@ -345,6 +347,8 @@ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/../Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.SimpleImageFilter; PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; + SWIFT_VERSION = 3.0; }; name = Release; }; diff --git a/examples/Mac/SimpleImageFilter/SimpleImageFilter/AppDelegate.swift b/examples/Mac/SimpleImageFilter/SimpleImageFilter/AppDelegate.swift index abbff8b3..3586c02f 100755 --- a/examples/Mac/SimpleImageFilter/SimpleImageFilter/AppDelegate.swift +++ b/examples/Mac/SimpleImageFilter/SimpleImageFilter/AppDelegate.swift @@ -17,7 +17,7 @@ class AppDelegate: NSObject, NSApplicationDelegate { } } - func applicationDidFinishLaunching(aNotification: NSNotification) { + func applicationDidFinishLaunching(_ aNotification: Notification) { let inputImage = NSImage(named:"Lambeau.jpg")! image = PictureInput(image:inputImage) diff --git a/examples/Mac/SimpleMovieFilter/SimpleMovieFilter.xcodeproj/project.pbxproj b/examples/Mac/SimpleMovieFilter/SimpleMovieFilter.xcodeproj/project.pbxproj index 8d890a6a..88720c1f 100644 --- a/examples/Mac/SimpleMovieFilter/SimpleMovieFilter.xcodeproj/project.pbxproj +++ b/examples/Mac/SimpleMovieFilter/SimpleMovieFilter.xcodeproj/project.pbxproj @@ -151,11 +151,12 @@ isa = PBXProject; attributes = { LastSwiftUpdateCheck = 0730; - LastUpgradeCheck = 0730; + LastUpgradeCheck = 0800; ORGANIZATIONNAME = "Sunset Lake Software LLC"; TargetAttributes = { BCC49F571CD5AF1D00B63EEB = { CreatedOnToolsVersion = 7.3; + LastSwiftMigration = 0800; }; }; }; @@ -335,6 +336,7 @@ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/../Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.SimpleMovieFilter; PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 3.0; }; name = Debug; }; @@ -347,6 +349,8 @@ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/../Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.SimpleMovieFilter; PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; + SWIFT_VERSION = 3.0; }; name = Release; }; diff --git a/examples/Mac/SimpleMovieFilter/SimpleMovieFilter/AppDelegate.swift b/examples/Mac/SimpleMovieFilter/SimpleMovieFilter/AppDelegate.swift index 0e7c103d..f49b40b4 100644 --- a/examples/Mac/SimpleMovieFilter/SimpleMovieFilter/AppDelegate.swift +++ b/examples/Mac/SimpleMovieFilter/SimpleMovieFilter/AppDelegate.swift @@ -16,9 +16,9 @@ class AppDelegate: NSObject, NSApplicationDelegate { } } - func applicationDidFinishLaunching(aNotification: NSNotification) { - let bundleURL = NSBundle.mainBundle().resourceURL! - let movieURL = NSURL(string:"sample_iPod.m4v", relativeToURL:bundleURL)! + func applicationDidFinishLaunching(_ aNotification: Notification) { + let bundleURL = Bundle.main().resourceURL! + let movieURL = URL(string:"sample_iPod.m4v", relativeTo:bundleURL)! do { movie = try MovieInput(url:movieURL, playAtActualSpeed:true) diff --git a/examples/Mac/SimpleVideoFilter/SimpleVideoFilter.xcodeproj/project.pbxproj b/examples/Mac/SimpleVideoFilter/SimpleVideoFilter.xcodeproj/project.pbxproj index 2bc1f094..2a53d689 100755 --- a/examples/Mac/SimpleVideoFilter/SimpleVideoFilter.xcodeproj/project.pbxproj +++ b/examples/Mac/SimpleVideoFilter/SimpleVideoFilter.xcodeproj/project.pbxproj @@ -148,11 +148,12 @@ isa = PBXProject; attributes = { LastSwiftUpdateCheck = 0720; - LastUpgradeCheck = 0720; + LastUpgradeCheck = 0800; ORGANIZATIONNAME = "Sunset Lake Software LLC"; TargetAttributes = { BC91EDAD1C91DC4600C704A8 = { CreatedOnToolsVersion = 7.2.1; + LastSwiftMigration = 0800; }; }; }; @@ -329,6 +330,7 @@ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/../Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.SimpleVideoFilter; PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 3.0; }; name = Debug; }; @@ -341,6 +343,8 @@ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/../Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.SimpleVideoFilter; PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; + SWIFT_VERSION = 3.0; }; name = Release; }; diff --git a/examples/Mac/SimpleVideoFilter/SimpleVideoFilter/AppDelegate.swift b/examples/Mac/SimpleVideoFilter/SimpleVideoFilter/AppDelegate.swift index fc2de9fb..875309df 100755 --- a/examples/Mac/SimpleVideoFilter/SimpleVideoFilter/AppDelegate.swift +++ b/examples/Mac/SimpleVideoFilter/SimpleVideoFilter/AppDelegate.swift @@ -16,17 +16,17 @@ class AppDelegate: NSObject, NSApplicationDelegate { } } - @IBAction func capture(sender: AnyObject) { + @IBAction func capture(_ sender: AnyObject) { let imageSavingDialog = NSSavePanel() imageSavingDialog.allowedFileTypes = ["png"] let okayButton = imageSavingDialog.runModal() if okayButton == NSModalResponseOK { - filter.saveNextFrameToURL(imageSavingDialog.URL!, format:.PNG) + filter.saveNextFrameToURL(imageSavingDialog.url!, format:.png) } } - func applicationDidFinishLaunching(aNotification: NSNotification) { + func applicationDidFinishLaunching(_ aNotification: Notification) { do { camera = try Camera(sessionPreset:AVCaptureSessionPreset640x480) filter = Pixellate() @@ -38,7 +38,7 @@ class AppDelegate: NSObject, NSApplicationDelegate { } } - func applicationWillTerminate(aNotification: NSNotification) { + func applicationWillTerminate(_ aNotification: Notification) { camera.stopCapture() } -} \ No newline at end of file +} diff --git a/examples/Mac/SimpleVideoRecorder/SimpleVideoRecorder.xcodeproj/project.pbxproj b/examples/Mac/SimpleVideoRecorder/SimpleVideoRecorder.xcodeproj/project.pbxproj index fa9b3365..e6c83797 100644 --- a/examples/Mac/SimpleVideoRecorder/SimpleVideoRecorder.xcodeproj/project.pbxproj +++ b/examples/Mac/SimpleVideoRecorder/SimpleVideoRecorder.xcodeproj/project.pbxproj @@ -147,11 +147,12 @@ isa = PBXProject; attributes = { LastSwiftUpdateCheck = 0730; - LastUpgradeCheck = 0730; + LastUpgradeCheck = 0800; ORGANIZATIONNAME = "Sunset Lake Software LLC"; TargetAttributes = { BCA0C2FF1CCBF2400034F922 = { CreatedOnToolsVersion = 7.3; + LastSwiftMigration = 0800; }; }; }; @@ -330,6 +331,7 @@ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/../Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.SimpleVideoRecorder; PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 3.0; }; name = Debug; }; @@ -342,6 +344,8 @@ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/../Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.SimpleVideoRecorder; PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; + SWIFT_VERSION = 3.0; }; name = Release; }; diff --git a/examples/Mac/SimpleVideoRecorder/SimpleVideoRecorder/AppDelegate.swift b/examples/Mac/SimpleVideoRecorder/SimpleVideoRecorder/AppDelegate.swift index 81a10c09..cbf2f6c7 100644 --- a/examples/Mac/SimpleVideoRecorder/SimpleVideoRecorder/AppDelegate.swift +++ b/examples/Mac/SimpleVideoRecorder/SimpleVideoRecorder/AppDelegate.swift @@ -13,7 +13,7 @@ class AppDelegate: NSObject, NSApplicationDelegate { var movieOutput:MovieOutput? var isRecording = false - func applicationDidFinishLaunching(aNotification: NSNotification) { + func applicationDidFinishLaunching(_ aNotification: Notification) { do { camera = try Camera(sessionPreset:AVCaptureSessionPreset640x480) filter = SmoothToonFilter() @@ -26,11 +26,11 @@ class AppDelegate: NSObject, NSApplicationDelegate { } } - func applicationWillTerminate(aNotification: NSNotification) { + func applicationWillTerminate(_ aNotification: Notification) { camera.stopCapture() } - @IBAction func record(sender: AnyObject) { + @IBAction func record(_ sender: AnyObject) { if (!isRecording) { let movieSavingDialog = NSSavePanel() movieSavingDialog.allowedFileTypes = ["mp4"] @@ -39,8 +39,8 @@ class AppDelegate: NSObject, NSApplicationDelegate { if okayButton == NSModalResponseOK { do { self.isRecording = true -// movieOutput = try MovieOutput(URL:movieSavingDialog.URL!, size:Size(width:1280, height:720), liveVideo:true) - movieOutput = try MovieOutput(URL:movieSavingDialog.URL!, size:Size(width:640, height:480), liveVideo:true) +// movieOutput = try MovieOutput(URL:movieSavingDialog.url!, size:Size(width:1280, height:720), liveVideo:true) + movieOutput = try MovieOutput(URL:movieSavingDialog.url!, size:Size(width:640, height:480), liveVideo:true) // camera.audioEncodingTarget = movieOutput filter --> movieOutput! movieOutput!.startRecording() @@ -52,7 +52,7 @@ class AppDelegate: NSObject, NSApplicationDelegate { } else { movieOutput?.finishRecording{ self.isRecording = false - dispatch_async(dispatch_get_main_queue()) { + DispatchQueue.main.async { (sender as! NSButton).title = "Record" } // self.camera.audioEncodingTarget = nil diff --git a/examples/iOS/FilterShowcase/FilterShowcase.xcodeproj/project.pbxproj b/examples/iOS/FilterShowcase/FilterShowcase.xcodeproj/project.pbxproj index a3853b00..c2e9d88b 100644 --- a/examples/iOS/FilterShowcase/FilterShowcase.xcodeproj/project.pbxproj +++ b/examples/iOS/FilterShowcase/FilterShowcase.xcodeproj/project.pbxproj @@ -190,11 +190,13 @@ isa = PBXProject; attributes = { LastSwiftUpdateCheck = 0700; - LastUpgradeCheck = 0700; + LastUpgradeCheck = 0800; ORGANIZATIONNAME = "Sunset Lake Software"; TargetAttributes = { BC0037B6195CA11B00B9D651 = { CreatedOnToolsVersion = 6.0; + LastSwiftMigration = 0800; + ProvisioningStyle = Automatic; }; }; }; @@ -325,6 +327,7 @@ ENABLE_TESTABILITY = YES; GCC_C_LANGUAGE_STANDARD = gnu99; GCC_DYNAMIC_NO_PIC = NO; + GCC_NO_COMMON_BLOCKS = YES; GCC_OPTIMIZATION_LEVEL = 0; GCC_PREPROCESSOR_DEFINITIONS = ( "DEBUG=1", @@ -369,6 +372,7 @@ ENABLE_NS_ASSERTIONS = NO; ENABLE_STRICT_OBJC_MSGSEND = YES; GCC_C_LANGUAGE_STANDARD = gnu99; + GCC_NO_COMMON_BLOCKS = YES; GCC_WARN_64_TO_32_BIT_CONVERSION = YES; GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; GCC_WARN_UNDECLARED_SELECTOR = YES; @@ -387,10 +391,12 @@ isa = XCBuildConfiguration; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; INFOPLIST_FILE = FilterShowcaseSwift/Info.plist; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = "com.sunsetlakesoftware.${PRODUCT_NAME:rfc1034identifier}"; PRODUCT_NAME = FilterShowcase; + SWIFT_VERSION = 3.0; }; name = Debug; }; @@ -398,10 +404,13 @@ isa = XCBuildConfiguration; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; INFOPLIST_FILE = FilterShowcaseSwift/Info.plist; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = "com.sunsetlakesoftware.${PRODUCT_NAME:rfc1034identifier}"; PRODUCT_NAME = FilterShowcase; + SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; + SWIFT_VERSION = 3.0; }; name = Release; }; diff --git a/examples/iOS/FilterShowcase/FilterShowcaseSwift/AppDelegate.swift b/examples/iOS/FilterShowcase/FilterShowcaseSwift/AppDelegate.swift index 71526e6f..d353864b 100644 --- a/examples/iOS/FilterShowcase/FilterShowcaseSwift/AppDelegate.swift +++ b/examples/iOS/FilterShowcase/FilterShowcaseSwift/AppDelegate.swift @@ -5,7 +5,7 @@ class AppDelegate: UIResponder, UIApplicationDelegate { var window: UIWindow? - func application(application: UIApplication, didFinishLaunchingWithOptions launchOptions: [NSObject : AnyObject]?) -> Bool { + func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [NSObject : AnyObject]?) -> Bool { return true } } diff --git a/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterDisplayViewController.swift b/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterDisplayViewController.swift index 0dfa2a62..420763bf 100644 --- a/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterDisplayViewController.swift +++ b/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterDisplayViewController.swift @@ -15,7 +15,7 @@ class FilterDisplayViewController: UIViewController, UISplitViewControllerDelega required init(coder aDecoder: NSCoder) { do { - videoCamera = try Camera(sessionPreset:AVCaptureSessionPreset640x480, location:.BackFacing) + videoCamera = try Camera(sessionPreset:AVCaptureSessionPreset640x480, location:.backFacing) videoCamera!.runBenchmark = true } catch { videoCamera = nil @@ -29,9 +29,9 @@ class FilterDisplayViewController: UIViewController, UISplitViewControllerDelega func configureView() { guard let videoCamera = videoCamera else { - let errorAlertController = UIAlertController(title: NSLocalizedString("Error", comment: "Error"), message: "Couldn't initialize camera", preferredStyle: .Alert) - errorAlertController.addAction(UIAlertAction(title: NSLocalizedString("OK", comment: "OK"), style: .Default, handler: nil)) - self.presentViewController(errorAlertController, animated: true, completion: nil) + let errorAlertController = UIAlertController(title: NSLocalizedString("Error", comment: "Error"), message: "Couldn't initialize camera", preferredStyle: .alert) + errorAlertController.addAction(UIAlertAction(title: NSLocalizedString("OK", comment: "OK"), style: .default, handler: nil)) + self.present(errorAlertController, animated: true, completion: nil) return } if let currentFilterConfiguration = self.filterOperation { @@ -40,16 +40,16 @@ class FilterDisplayViewController: UIViewController, UISplitViewControllerDelega // Configure the filter chain, ending with the view if let view = self.filterView { switch currentFilterConfiguration.filterOperationType { - case .SingleInput: + case .singleInput: videoCamera.addTarget(currentFilterConfiguration.filter) currentFilterConfiguration.filter.addTarget(view) - case .Blend: + case .blend: videoCamera.addTarget(currentFilterConfiguration.filter) self.blendImage = PictureInput(imageName:blendImageName) self.blendImage?.addTarget(currentFilterConfiguration.filter) self.blendImage?.processImage() currentFilterConfiguration.filter.addTarget(view) - case let .Custom(filterSetupFunction:setupFunction): + case let .custom(filterSetupFunction:setupFunction): currentFilterConfiguration.configureCustomFilter(setupFunction(camera:videoCamera, filter:currentFilterConfiguration.filter, outputView:view)) } @@ -59,14 +59,14 @@ class FilterDisplayViewController: UIViewController, UISplitViewControllerDelega // Hide or display the slider, based on whether the filter needs it if let slider = self.filterSlider { switch currentFilterConfiguration.sliderConfiguration { - case .Disabled: - slider.hidden = true + case .disabled: + slider.isHidden = true // case let .Enabled(minimumValue, initialValue, maximumValue, filterSliderCallback): - case let .Enabled(minimumValue, maximumValue, initialValue): + case let .enabled(minimumValue, maximumValue, initialValue): slider.minimumValue = minimumValue slider.maximumValue = maximumValue slider.value = initialValue - slider.hidden = false + slider.isHidden = false self.updateSliderValue() } } @@ -77,8 +77,8 @@ class FilterDisplayViewController: UIViewController, UISplitViewControllerDelega @IBAction func updateSliderValue() { if let currentFilterConfiguration = self.filterOperation { switch (currentFilterConfiguration.sliderConfiguration) { - case .Enabled(_, _, _): currentFilterConfiguration.updateBasedOnSliderValue(Float(self.filterSlider!.value)) - case .Disabled: break + case .enabled(_, _, _): currentFilterConfiguration.updateBasedOnSliderValue(Float(self.filterSlider!.value)) + case .disabled: break } } } @@ -88,7 +88,7 @@ class FilterDisplayViewController: UIViewController, UISplitViewControllerDelega self.configureView() } - override func viewWillDisappear(animated: Bool) { + override func viewWillDisappear(_ animated: Bool) { if let videoCamera = videoCamera { videoCamera.stopCapture() videoCamera.removeAllTargets() diff --git a/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterListViewController.swift b/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterListViewController.swift index b3adc1fc..a41155bd 100644 --- a/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterListViewController.swift +++ b/examples/iOS/FilterShowcase/FilterShowcaseSwift/FilterListViewController.swift @@ -7,10 +7,10 @@ class FilterListViewController: UITableViewController { // #pragma mark - Segues - override func prepareForSegue(segue: UIStoryboardSegue, sender: AnyObject?) { + override func prepare(for segue: UIStoryboardSegue, sender: AnyObject?) { if segue.identifier == "showDetail" { if let indexPath = self.tableView.indexPathForSelectedRow { - let filterInList = filterOperations[indexPath.row] + let filterInList = filterOperations[(indexPath as NSIndexPath).row] (segue.destinationViewController as! FilterDisplayViewController).filterOperation = filterInList } } @@ -18,18 +18,18 @@ class FilterListViewController: UITableViewController { // #pragma mark - Table View - override func numberOfSectionsInTableView(tableView: UITableView) -> Int { + override func numberOfSections(in tableView: UITableView) -> Int { return 1 } - override func tableView(tableView: UITableView, numberOfRowsInSection section: Int) -> Int { + override func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int { return filterOperations.count } - override func tableView(tableView: UITableView, cellForRowAtIndexPath indexPath: NSIndexPath) -> UITableViewCell { - let cell = tableView.dequeueReusableCellWithIdentifier("Cell", forIndexPath: indexPath) + override func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell { + let cell = tableView.dequeueReusableCell(withIdentifier: "Cell", for: indexPath) - let filterInList:FilterOperationInterface = filterOperations[indexPath.row] + let filterInList:FilterOperationInterface = filterOperations[(indexPath as NSIndexPath).row] cell.textLabel?.text = filterInList.listName return cell } diff --git a/examples/iOS/SimpleImageFilter/SimpleImageFilter.xcodeproj/project.pbxproj b/examples/iOS/SimpleImageFilter/SimpleImageFilter.xcodeproj/project.pbxproj index d9f65b08..b495de0d 100644 --- a/examples/iOS/SimpleImageFilter/SimpleImageFilter.xcodeproj/project.pbxproj +++ b/examples/iOS/SimpleImageFilter/SimpleImageFilter.xcodeproj/project.pbxproj @@ -169,6 +169,7 @@ TargetAttributes = { BCD985AB1CA43FD5001FF01F = { CreatedOnToolsVersion = 7.3; + LastSwiftMigration = 0800; }; }; }; @@ -360,6 +361,7 @@ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.SimpleImageFilter; PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 3.0; }; name = Debug; }; @@ -371,6 +373,7 @@ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.SimpleImageFilter; PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 3.0; }; name = Release; }; diff --git a/examples/iOS/SimpleImageFilter/SimpleImageFilter/AppDelegate.swift b/examples/iOS/SimpleImageFilter/SimpleImageFilter/AppDelegate.swift index 4c9c4e77..7d21e236 100644 --- a/examples/iOS/SimpleImageFilter/SimpleImageFilter/AppDelegate.swift +++ b/examples/iOS/SimpleImageFilter/SimpleImageFilter/AppDelegate.swift @@ -5,7 +5,7 @@ class AppDelegate: UIResponder, UIApplicationDelegate { var window: UIWindow? - func application(application: UIApplication, didFinishLaunchingWithOptions launchOptions: [NSObject: AnyObject]?) -> Bool { + func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [NSObject: AnyObject]?) -> Bool { return true } } diff --git a/examples/iOS/SimpleImageFilter/SimpleImageFilter/ViewController.swift b/examples/iOS/SimpleImageFilter/SimpleImageFilter/ViewController.swift index 398b28b7..c209e695 100644 --- a/examples/iOS/SimpleImageFilter/SimpleImageFilter/ViewController.swift +++ b/examples/iOS/SimpleImageFilter/SimpleImageFilter/ViewController.swift @@ -18,9 +18,9 @@ class ViewController: UIViewController { let pngImage = UIImagePNGRepresentation(filteredImage)! do { - let documentsDir = try NSFileManager.defaultManager().URLForDirectory(.DocumentDirectory, inDomain:.UserDomainMask, appropriateForURL:nil, create:true) - let fileURL = NSURL(string:"test.png", relativeToURL:documentsDir)! - try pngImage.writeToURL(fileURL, options:.DataWritingAtomic) + let documentsDir = try FileManager.default().urlForDirectory(.documentDirectory, in:.userDomainMask, appropriateFor:nil, create:true) + let fileURL = URL(string:"test.png", relativeTo:documentsDir)! + try pngImage.write(to:fileURL, options:.dataWritingAtomic) } catch { print("Couldn't write to file with error: \(error)") } diff --git a/examples/iOS/SimpleMovieFilter/SimpleMovieFilter.xcodeproj/project.pbxproj b/examples/iOS/SimpleMovieFilter/SimpleMovieFilter.xcodeproj/project.pbxproj index 3663f8ab..eec85082 100644 --- a/examples/iOS/SimpleMovieFilter/SimpleMovieFilter.xcodeproj/project.pbxproj +++ b/examples/iOS/SimpleMovieFilter/SimpleMovieFilter.xcodeproj/project.pbxproj @@ -159,6 +159,7 @@ TargetAttributes = { BCC49F8E1CD6E1D800B63EEB = { CreatedOnToolsVersion = 7.3; + LastSwiftMigration = 0800; }; }; }; @@ -349,6 +350,7 @@ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.SimpleMovieFilter; PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 3.0; }; name = Debug; }; @@ -360,6 +362,7 @@ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.SimpleMovieFilter; PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 3.0; }; name = Release; }; @@ -382,6 +385,7 @@ BCC49FA31CD6E1D800B63EEB /* Release */, ); defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; }; /* End XCConfigurationList section */ }; diff --git a/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/ViewController.swift b/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/ViewController.swift index d209ba9c..d985aeb6 100644 --- a/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/ViewController.swift +++ b/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/ViewController.swift @@ -11,8 +11,8 @@ class ViewController: UIViewController { override func viewDidLayoutSubviews() { super.viewDidLayoutSubviews() - let bundleURL = NSBundle.mainBundle().resourceURL! - let movieURL = NSURL(string:"sample_iPod.m4v", relativeToURL:bundleURL)! + let bundleURL = Bundle.main().resourceURL! + let movieURL = URL(string:"sample_iPod.m4v", relativeTo:bundleURL)! do { movie = try MovieInput(url:movieURL, playAtActualSpeed:true) diff --git a/examples/iOS/SimpleVideoFilter/SimpleVideoFilter.xcodeproj/project.pbxproj b/examples/iOS/SimpleVideoFilter/SimpleVideoFilter.xcodeproj/project.pbxproj index 7c227a45..fdff60e3 100755 --- a/examples/iOS/SimpleVideoFilter/SimpleVideoFilter.xcodeproj/project.pbxproj +++ b/examples/iOS/SimpleVideoFilter/SimpleVideoFilter.xcodeproj/project.pbxproj @@ -159,6 +159,7 @@ BC9FAABC1CA1D3FC009F1261 = { CreatedOnToolsVersion = 7.3; DevelopmentTeam = J2U2U9GBML; + LastSwiftMigration = 0800; }; }; }; @@ -353,6 +354,7 @@ PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.SimpleVideoFilter; PRODUCT_NAME = "$(TARGET_NAME)"; PROVISIONING_PROFILE = ""; + SWIFT_VERSION = 3.0; }; name = Debug; }; @@ -367,6 +369,7 @@ PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.SimpleVideoFilter; PRODUCT_NAME = "$(TARGET_NAME)"; PROVISIONING_PROFILE = ""; + SWIFT_VERSION = 3.0; }; name = Release; }; diff --git a/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/AppDelegate.swift b/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/AppDelegate.swift index 960472cd..e2bb4729 100755 --- a/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/AppDelegate.swift +++ b/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/AppDelegate.swift @@ -6,28 +6,28 @@ class AppDelegate: UIResponder, UIApplicationDelegate { var window: UIWindow? - func application(application: UIApplication, didFinishLaunchingWithOptions launchOptions: [NSObject: AnyObject]?) -> Bool { + func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [NSObject: AnyObject]?) -> Bool { return true } - func applicationWillResignActive(application: UIApplication) { + func applicationWillResignActive(_ application: UIApplication) { // Pause camera } - func applicationDidEnterBackground(application: UIApplication) { + func applicationDidEnterBackground(_ application: UIApplication) { // Pause camera } - func applicationWillEnterForeground(application: UIApplication) { + func applicationWillEnterForeground(_ application: UIApplication) { } - func applicationDidBecomeActive(application: UIApplication) { + func applicationDidBecomeActive(_ application: UIApplication) { // Resume camera } - func applicationWillTerminate(application: UIApplication) { + func applicationWillTerminate(_ application: UIApplication) { // Pause camera if not already } diff --git a/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/ViewController.swift b/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/ViewController.swift index c688a7d3..32ea9cee 100755 --- a/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/ViewController.swift +++ b/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/ViewController.swift @@ -28,7 +28,7 @@ class ViewController: UIViewController { camera.delegate = self camera --> saturationFilter --> blendFilter --> renderView lineGenerator --> blendFilter - shouldDetectFaces = faceDetectSwitch.on + shouldDetectFaces = faceDetectSwitch.isOn camera.startCapture() } catch { fatalError("Could not initialize rendering pipeline: \(error)") @@ -39,15 +39,15 @@ class ViewController: UIViewController { super.viewDidLayoutSubviews() } - @IBAction func didSwitch(sender: UISwitch) { - shouldDetectFaces = sender.on + @IBAction func didSwitch(_ sender: UISwitch) { + shouldDetectFaces = sender.isOn } - @IBAction func capture(sender: AnyObject) { + @IBAction func capture(_ sender: AnyObject) { print("Capture") do { - let documentsDir = try NSFileManager.defaultManager().URLForDirectory(.DocumentDirectory, inDomain:.UserDomainMask, appropriateForURL:nil, create:true) - saturationFilter.saveNextFrameToURL(NSURL(string:"TestImage.png", relativeToURL:documentsDir)!, format:.PNG) + let documentsDir = try FileManager.default().urlForDirectory(.documentDirectory, in:.userDomainMask, appropriateFor:nil, create:true) + saturationFilter.saveNextFrameToURL(URL(string:"TestImage.png", relativeTo:documentsDir)!, format:.png) } catch { print("Couldn't save image: \(error)") } @@ -55,16 +55,16 @@ class ViewController: UIViewController { } extension ViewController: CameraDelegate { - func didCaptureBuffer(sampleBuffer: CMSampleBuffer) { + func didCaptureBuffer(_ sampleBuffer: CMSampleBuffer) { guard shouldDetectFaces else { lineGenerator.renderLines([]) // clear return } if let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) { let attachments = CMCopyDictionaryOfAttachments(kCFAllocatorDefault, sampleBuffer, CMAttachmentMode(kCMAttachmentMode_ShouldPropagate))! - let img = CIImage(CVPixelBuffer: pixelBuffer, options: attachments as? [String: AnyObject]) + let img = CIImage(cvPixelBuffer: pixelBuffer, options: attachments as? [String: AnyObject]) var lines = [Line]() - for feature in faceDetector.featuresInImage(img, options: [CIDetectorImageOrientation: 6]) { + for feature in (faceDetector?.features(in: img, options: [CIDetectorImageOrientation: 6]))! { if feature is CIFaceFeature { lines = lines + faceLines(feature.bounds) } @@ -73,13 +73,13 @@ extension ViewController: CameraDelegate { } } - func faceLines(bounds: CGRect) -> [Line] { + func faceLines(_ bounds: CGRect) -> [Line] { // convert from CoreImage to GL coords - let flip = CGAffineTransformMakeScale(1, -1) - let rotate = CGAffineTransformRotate(flip, CGFloat(-M_PI_2)) - let translate = CGAffineTransformTranslate(rotate, -1, -1) - let xform = CGAffineTransformScale(translate, CGFloat(2/fbSize.width), CGFloat(2/fbSize.height)) - let glRect = CGRectApplyAffineTransform(bounds, xform) + let flip = CGAffineTransform(scaleX: 1, y: -1) + let rotate = flip.rotate(CGFloat(-M_PI_2)) + let translate = rotate.translateBy(x: -1, y: -1) + let xform = translate.scaleBy(x: CGFloat(2/fbSize.width), y: CGFloat(2/fbSize.height)) + let glRect = bounds.apply(transform: xform) let x = Float(glRect.origin.x) let y = Float(glRect.origin.y) @@ -91,9 +91,9 @@ extension ViewController: CameraDelegate { let bl = Position(x, y + height) let br = Position(x + width, y + height) - return [.Segment(p1:tl, p2:tr), // top - .Segment(p1:tr, p2:br), // right - .Segment(p1:br, p2:bl), // bottom - .Segment(p1:bl, p2:tl)] // left + return [.segment(p1:tl, p2:tr), // top + .segment(p1:tr, p2:br), // right + .segment(p1:br, p2:bl), // bottom + .segment(p1:bl, p2:tl)] // left } } diff --git a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder.xcodeproj/project.pbxproj b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder.xcodeproj/project.pbxproj index 63de881c..2c5dfa2b 100644 --- a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder.xcodeproj/project.pbxproj +++ b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder.xcodeproj/project.pbxproj @@ -159,6 +159,7 @@ TargetAttributes = { BCA0C32F1CCBFFAE0034F922 = { CreatedOnToolsVersion = 7.3; + LastSwiftMigration = 0800; }; }; }; @@ -349,6 +350,7 @@ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.SimpleVideoRecorder; PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 3.0; }; name = Debug; }; @@ -360,6 +362,7 @@ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.SimpleVideoRecorder; PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 3.0; }; name = Release; }; diff --git a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift index 11fec2a3..35a542e9 100644 --- a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift +++ b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift @@ -27,14 +27,14 @@ class ViewController: UIViewController { super.viewDidLayoutSubviews() } - @IBAction func capture(sender: AnyObject) { + @IBAction func capture(_ sender: AnyObject) { if (!isRecording) { do { self.isRecording = true - let documentsDir = try NSFileManager.defaultManager().URLForDirectory(.DocumentDirectory, inDomain:.UserDomainMask, appropriateForURL:nil, create:true) - let fileURL = NSURL(string:"test.mp4", relativeToURL:documentsDir)! + let documentsDir = try FileManager.default().urlForDirectory(.documentDirectory, in:.userDomainMask, appropriateFor:nil, create:true) + let fileURL = URL(string:"test.mp4", relativeTo:documentsDir)! do { - try NSFileManager.defaultManager().removeItemAtURL(fileURL) + try FileManager.default().removeItem(at:fileURL) } catch { } @@ -49,7 +49,7 @@ class ViewController: UIViewController { } else { movieOutput?.finishRecording{ self.isRecording = false - dispatch_async(dispatch_get_main_queue()) { + DispatchQueue.main.async { (sender as! UIButton).titleLabel?.text = "Record" } self.camera.audioEncodingTarget = nil @@ -57,4 +57,4 @@ class ViewController: UIViewController { } } } -} \ No newline at end of file +} diff --git a/framework/GPUImage-Mac.xcodeproj/project.pbxproj b/framework/GPUImage-Mac.xcodeproj/project.pbxproj index 27e97080..9c14d86a 100755 --- a/framework/GPUImage-Mac.xcodeproj/project.pbxproj +++ b/framework/GPUImage-Mac.xcodeproj/project.pbxproj @@ -976,11 +976,12 @@ isa = PBXProject; attributes = { LastSwiftUpdateCheck = 0720; - LastUpgradeCheck = 0720; + LastUpgradeCheck = 0800; ORGANIZATIONNAME = "Sunset Lake Software LLC"; TargetAttributes = { BC6E7CAA1C39A9D8006DF678 = { CreatedOnToolsVersion = 7.2; + LastSwiftMigration = 0800; }; BC6E7CB41C39A9D8006DF678 = { CreatedOnToolsVersion = 7.2; @@ -1322,6 +1323,7 @@ PRODUCT_NAME = GPUImage; SKIP_INSTALL = YES; SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + SWIFT_VERSION = 3.0; }; name = Debug; }; @@ -1341,6 +1343,8 @@ PRODUCT_BUNDLE_IDENTIFIER = "com.sunsetlakesoftware.GPUImage-Mac"; PRODUCT_NAME = GPUImage; SKIP_INSTALL = YES; + SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; + SWIFT_VERSION = 3.0; }; name = Release; }; @@ -1363,6 +1367,7 @@ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/../Frameworks @loader_path/../Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = "com.sunsetlakesoftware.GPUImage-MacTests"; PRODUCT_NAME = GPUImage; + SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; }; name = Release; }; diff --git a/framework/GPUImage-Mac.xcodeproj/xcshareddata/xcschemes/GPUImage.xcscheme b/framework/GPUImage-Mac.xcodeproj/xcshareddata/xcschemes/GPUImage.xcscheme index bffbae83..ba5d00dd 100644 --- a/framework/GPUImage-Mac.xcodeproj/xcshareddata/xcschemes/GPUImage.xcscheme +++ b/framework/GPUImage-Mac.xcodeproj/xcshareddata/xcschemes/GPUImage.xcscheme @@ -1,6 +1,6 @@ String { +public func defaultVertexShaderForInputs(_ inputCount:UInt) -> String { switch inputCount { case 1: return OneInputVertexShader case 2: return TwoInputVertexShader @@ -58,14 +58,14 @@ public class BasicOperation: ImageProcessingOperation { usesAspectRatio = shader.uniformIndex("aspectRatio") != nil } - public init(vertexShader:String? = nil, fragmentShader:String, numberOfInputs:UInt = 1, operationName:String = __FILE__) { + public init(vertexShader:String? = nil, fragmentShader:String, numberOfInputs:UInt = 1, operationName:String = #file) { let compiledShader = crashOnShaderCompileFailure(operationName){try sharedImageProcessingContext.programForVertexShader(vertexShader ?? defaultVertexShaderForInputs(numberOfInputs), fragmentShader:fragmentShader)} self.maximumInputs = numberOfInputs self.shader = compiledShader usesAspectRatio = shader.uniformIndex("aspectRatio") != nil } - public init(vertexShaderFile:NSURL? = nil, fragmentShaderFile:NSURL, numberOfInputs:UInt = 1, operationName:String = __FILE__) throws { + public init(vertexShaderFile:URL? = nil, fragmentShaderFile:URL, numberOfInputs:UInt = 1, operationName:String = #file) throws { let compiledShader:ShaderProgram if let vertexShaderFile = vertexShaderFile { compiledShader = crashOnShaderCompileFailure(operationName){try sharedImageProcessingContext.programForVertexShader(vertexShaderFile, fragmentShader:fragmentShaderFile)} @@ -84,7 +84,7 @@ public class BasicOperation: ImageProcessingOperation { // MARK: - // MARK: Rendering - public func newFramebufferAvailable(framebuffer:Framebuffer, fromSourceIndex:UInt) { + public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { if let previousFramebuffer = inputFramebuffers[fromSourceIndex] { previousFramebuffer.unlock() } @@ -104,7 +104,7 @@ public class BasicOperation: ImageProcessingOperation { } func renderFrame() { - renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.Portrait, size:sizeOfInitialStageBasedOnFramebuffer(inputFramebuffers[0]!), stencil:mask != nil) + renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:sizeOfInitialStageBasedOnFramebuffer(inputFramebuffers[0]!), stencil:mask != nil) let textureProperties = initialTextureProperties() configureFramebufferSpecificUniforms(inputFramebuffers[0]!) @@ -123,7 +123,7 @@ public class BasicOperation: ImageProcessingOperation { } } - func internalRenderFunction(inputFramebuffer:Framebuffer, textureProperties:[InputTextureProperties]) { + func internalRenderFunction(_ inputFramebuffer:Framebuffer, textureProperties:[InputTextureProperties]) { renderQuadWithShader(shader, uniformSettings:uniformSettings, vertices:standardImageVertices, inputTextures:textureProperties) releaseIncomingFramebuffers() } @@ -131,7 +131,7 @@ public class BasicOperation: ImageProcessingOperation { func releaseIncomingFramebuffers() { var remainingFramebuffers = [UInt:Framebuffer]() // If all inputs are still images, have this output behave as one - renderFramebuffer.timingStyle = .StillImage + renderFramebuffer.timingStyle = .stillImage var latestTimestamp:Timestamp? for (key, framebuffer) in inputFramebuffers { @@ -140,7 +140,7 @@ public class BasicOperation: ImageProcessingOperation { if let timestamp = framebuffer.timingStyle.timestamp { if !(timestamp < (latestTimestamp ?? timestamp)) { latestTimestamp = timestamp - renderFramebuffer.timingStyle = .VideoFrame(timestamp:timestamp) + renderFramebuffer.timingStyle = .videoFrame(timestamp:timestamp) } framebuffer.unlock() @@ -151,11 +151,11 @@ public class BasicOperation: ImageProcessingOperation { inputFramebuffers = remainingFramebuffers } - func sizeOfInitialStageBasedOnFramebuffer(inputFramebuffer:Framebuffer) -> GLSize { + func sizeOfInitialStageBasedOnFramebuffer(_ inputFramebuffer:Framebuffer) -> GLSize { if let outputSize = overriddenOutputSize { return GLSize(outputSize) } else { - return inputFramebuffer.sizeForTargetOrientation(.Portrait) + return inputFramebuffer.sizeForTargetOrientation(.portrait) } } @@ -168,24 +168,24 @@ public class BasicOperation: ImageProcessingOperation { } } else { for framebufferIndex in 0.. [GLfloat] { + func transformVertices(_ vertices:[GLfloat], fromInputSize:GLSize, toFitSize:GLSize) -> [GLfloat] { guard (vertices.count == 8) else { fatalError("Attempted to transform a non-quad to account for fill mode.") } let aspectRatio = GLfloat(fromInputSize.height) / GLfloat(fromInputSize.width) @@ -28,8 +28,8 @@ public enum FillMode { let yRatio:GLfloat let xRatio:GLfloat switch self { - case Stretch: return vertices - case PreserveAspectRatio: + case stretch: return vertices + case preserveAspectRatio: if (aspectRatio > targetAspectRatio) { yRatio = 1.0 // xRatio = (GLfloat(toFitSize.height) / GLfloat(fromInputSize.height)) * (GLfloat(fromInputSize.width) / GLfloat(toFitSize.width)) @@ -38,7 +38,7 @@ public enum FillMode { xRatio = 1.0 yRatio = (GLfloat(fromInputSize.height) / GLfloat(toFitSize.height)) * (GLfloat(toFitSize.width) / GLfloat(fromInputSize.width)) } - case PreserveAspectRatioAndFill: + case preserveAspectRatioAndFill: if (aspectRatio > targetAspectRatio) { xRatio = 1.0 yRatio = (GLfloat(fromInputSize.height) / GLfloat(toFitSize.height)) * (GLfloat(toFitSize.width) / GLfloat(fromInputSize.width)) @@ -57,9 +57,22 @@ public enum FillMode { let yConversionDivisor:GLfloat = GLfloat(toFitSize.height) / 2.0 // The Double casting here is required by Linux - return [GLfloat(round(Double(vertices[0] * xConversionRatio))) / xConversionDivisor, GLfloat(round(Double(vertices[1] * yConversionRatio))) / yConversionDivisor, - GLfloat(round(Double(vertices[2] * xConversionRatio))) / xConversionDivisor, GLfloat(round(Double(vertices[3] * yConversionRatio))) / yConversionDivisor, - GLfloat(round(Double(vertices[4] * xConversionRatio))) / xConversionDivisor, GLfloat(round(Double(vertices[5] * yConversionRatio))) / yConversionDivisor, - GLfloat(round(Double(vertices[6] * xConversionRatio))) / xConversionDivisor, GLfloat(round(Double(vertices[7] * yConversionRatio))) / yConversionDivisor] + + let value1:GLfloat = GLfloat(round(Double(vertices[0] * xConversionRatio))) / xConversionDivisor + let value2:GLfloat = GLfloat(round(Double(vertices[1] * yConversionRatio))) / yConversionDivisor + let value3:GLfloat = GLfloat(round(Double(vertices[2] * xConversionRatio))) / xConversionDivisor + let value4:GLfloat = GLfloat(round(Double(vertices[3] * yConversionRatio))) / yConversionDivisor + let value5:GLfloat = GLfloat(round(Double(vertices[4] * xConversionRatio))) / xConversionDivisor + let value6:GLfloat = GLfloat(round(Double(vertices[5] * yConversionRatio))) / yConversionDivisor + let value7:GLfloat = GLfloat(round(Double(vertices[6] * xConversionRatio))) / xConversionDivisor + let value8:GLfloat = GLfloat(round(Double(vertices[7] * yConversionRatio))) / yConversionDivisor + + return [value1, value2, value3, value4, value5, value6, value7, value8] + + // This expression chokes the compiler in Xcode 8.0, Swift 3 +// return [GLfloat(round(Double(vertices[0] * xConversionRatio))) / xConversionDivisor, GLfloat(round(Double(vertices[1] * yConversionRatio))) / yConversionDivisor, +// GLfloat(round(Double(vertices[2] * xConversionRatio))) / xConversionDivisor, GLfloat(round(Double(vertices[3] * yConversionRatio))) / yConversionDivisor, +// GLfloat(round(Double(vertices[4] * xConversionRatio))) / xConversionDivisor, GLfloat(round(Double(vertices[5] * yConversionRatio))) / yConversionDivisor, +// GLfloat(round(Double(vertices[6] * xConversionRatio))) / xConversionDivisor, GLfloat(round(Double(vertices[7] * yConversionRatio))) / yConversionDivisor] } } diff --git a/framework/Source/Framebuffer.swift b/framework/Source/Framebuffer.swift index 4bbc7a7a..eb9f7cd4 100755 --- a/framework/Source/Framebuffer.swift +++ b/framework/Source/Framebuffer.swift @@ -17,33 +17,33 @@ import Glibc import Foundation // TODO: Add a good lookup table to this to allow for detailed error messages -struct FramebufferCreationError:ErrorType { +struct FramebufferCreationError:ErrorProtocol { let errorCode:GLenum } public enum FramebufferTimingStyle { - case StillImage - case VideoFrame(timestamp:Timestamp) + case stillImage + case videoFrame(timestamp:Timestamp) func isTransient() -> Bool { switch self { - case .StillImage: return false - case .VideoFrame: return true + case .stillImage: return false + case .videoFrame: return true } } var timestamp:Timestamp? { get { switch self { - case .StillImage: return nil - case let .VideoFrame(timestamp): return timestamp + case .stillImage: return nil + case let .videoFrame(timestamp): return timestamp } } } } public class Framebuffer { - public var timingStyle:FramebufferTimingStyle = .StillImage + public var timingStyle:FramebufferTimingStyle = .stillImage public var orientation:ImageOrientation let texture:GLuint @@ -108,7 +108,7 @@ public class Framebuffer { } } - func sizeForTargetOrientation(targetOrientation:ImageOrientation) -> GLSize { + func sizeForTargetOrientation(_ targetOrientation:ImageOrientation) -> GLSize { if self.orientation.rotationNeededForOrientation(targetOrientation).flipsDimensions() { return GLSize(width:size.height, height:size.width) } else { @@ -116,7 +116,7 @@ public class Framebuffer { } } - func aspectRatioForRotation(rotation:Rotation) -> Float { + func aspectRatioForRotation(_ rotation:Rotation) -> Float { if rotation.flipsDimensions() { return Float(size.width) / Float(size.height) } else { @@ -124,7 +124,7 @@ public class Framebuffer { } } - func texelSizeForRotation(rotation:Rotation) -> Size { + func texelSizeForRotation(_ rotation:Rotation) -> Size { if rotation.flipsDimensions() { return Size(width:1.0 / Float(size.height), height:1.0 / Float(size.width)) } else { @@ -132,7 +132,7 @@ public class Framebuffer { } } - func initialStageTexelSizeForRotation(rotation:Rotation) -> Size { + func initialStageTexelSizeForRotation(_ rotation:Rotation) -> Size { if rotation.flipsDimensions() { return Size(width:1.0 / Float(size.height), height:0.0) } else { @@ -140,11 +140,11 @@ public class Framebuffer { } } - func texturePropertiesForOutputRotation(rotation:Rotation) -> InputTextureProperties { + func texturePropertiesForOutputRotation(_ rotation:Rotation) -> InputTextureProperties { return InputTextureProperties(textureCoordinates:rotation.textureCoordinates(), texture:texture) } - func texturePropertiesForTargetOrientation(targetOrientation:ImageOrientation) -> InputTextureProperties { + func texturePropertiesForTargetOrientation(_ targetOrientation:ImageOrientation) -> InputTextureProperties { return texturePropertiesForOutputRotation(self.orientation.rotationNeededForOrientation(targetOrientation)) } @@ -179,7 +179,7 @@ public class Framebuffer { } } -func hashForFramebufferWithProperties(orientation orientation:ImageOrientation, size:GLSize, textureOnly:Bool = false, minFilter:Int32 = GL_LINEAR, magFilter:Int32 = GL_LINEAR, wrapS:Int32 = GL_CLAMP_TO_EDGE, wrapT:Int32 = GL_CLAMP_TO_EDGE, internalFormat:Int32 = GL_RGBA, format:Int32 = GL_BGRA, type:Int32 = GL_UNSIGNED_BYTE, stencil:Bool = false) -> Int64 { +func hashForFramebufferWithProperties(orientation:ImageOrientation, size:GLSize, textureOnly:Bool = false, minFilter:Int32 = GL_LINEAR, magFilter:Int32 = GL_LINEAR, wrapS:Int32 = GL_CLAMP_TO_EDGE, wrapT:Int32 = GL_CLAMP_TO_EDGE, internalFormat:Int32 = GL_RGBA, format:Int32 = GL_BGRA, type:Int32 = GL_UNSIGNED_BYTE, stencil:Bool = false) -> Int64 { var result:Int64 = 1 let prime:Int64 = 31 let yesPrime:Int64 = 1231 @@ -202,32 +202,32 @@ func hashForFramebufferWithProperties(orientation orientation:ImageOrientation, extension Rotation { func textureCoordinates() -> [GLfloat] { switch self { - case NoRotation: return [0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0] - case RotateCounterclockwise: return [0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0] - case RotateClockwise: return [1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0] - case Rotate180: return [1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0] - case FlipHorizontally: return [1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0] - case FlipVertically: return [0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0] - case RotateClockwiseAndFlipVertically: return [0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0] - case RotateClockwiseAndFlipHorizontally: return [1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0] + case noRotation: return [0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0] + case rotateCounterclockwise: return [0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0] + case rotateClockwise: return [1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0] + case rotate180: return [1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0] + case flipHorizontally: return [1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0] + case flipVertically: return [0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0] + case rotateClockwiseAndFlipVertically: return [0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0] + case rotateClockwiseAndFlipHorizontally: return [1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0] } } - func croppedTextureCoordinates(offsetFromOrigin offsetFromOrigin:Position, cropSize:Size) -> [GLfloat] { + func croppedTextureCoordinates(offsetFromOrigin:Position, cropSize:Size) -> [GLfloat] { let minX = GLfloat(offsetFromOrigin.x) let minY = GLfloat(offsetFromOrigin.y) let maxX = GLfloat(offsetFromOrigin.x) + GLfloat(cropSize.width) let maxY = GLfloat(offsetFromOrigin.y) + GLfloat(cropSize.height) switch self { - case NoRotation: return [minX, minY, maxX, minY, minX, maxY, maxX, maxY] - case RotateCounterclockwise: return [minX, maxY, minX, minY, maxX, maxY, maxX, minY] - case RotateClockwise: return [maxX, minY, maxX, maxY, minX, minY, minX, maxY] - case Rotate180: return [maxX, maxY, minX, maxY, maxX, minY, minX, minY] - case FlipHorizontally: return [maxX, minY, minX, minY, maxX, maxY, minX, maxY] - case FlipVertically: return [minX, maxY, maxX, maxY, minX, minY, maxX, minY] - case RotateClockwiseAndFlipVertically: return [minX, minY, minX, maxY, maxX, minY, maxX, maxY] - case RotateClockwiseAndFlipHorizontally: return [maxX, maxY, maxX, minY, minX, maxY, minX, minY] + case noRotation: return [minX, minY, maxX, minY, minX, maxY, maxX, maxY] + case rotateCounterclockwise: return [minX, maxY, minX, minY, maxX, maxY, maxX, minY] + case rotateClockwise: return [maxX, minY, maxX, maxY, minX, minY, minX, maxY] + case rotate180: return [maxX, maxY, minX, maxY, maxX, minY, minX, minY] + case flipHorizontally: return [maxX, minY, minX, minY, maxX, maxY, minX, maxY] + case flipVertically: return [minX, maxY, maxX, maxY, minX, minY, maxX, minY] + case rotateClockwiseAndFlipVertically: return [minX, minY, minX, maxY, maxX, minY, maxX, maxY] + case rotateClockwiseAndFlipHorizontally: return [maxX, maxY, maxX, minY, minX, maxY, minX, minY] } } } @@ -240,4 +240,4 @@ public extension Size { func glHeight() -> GLint { return GLint(round(Double(self.height))) } -} \ No newline at end of file +} diff --git a/framework/Source/FramebufferCache.swift b/framework/Source/FramebufferCache.swift index 1f813c8d..3e20f31a 100755 --- a/framework/Source/FramebufferCache.swift +++ b/framework/Source/FramebufferCache.swift @@ -22,7 +22,7 @@ public class FramebufferCache { self.context = context } - public func requestFramebufferWithProperties(orientation orientation:ImageOrientation, size:GLSize, textureOnly:Bool = false, minFilter:Int32 = GL_LINEAR, magFilter:Int32 = GL_LINEAR, wrapS:Int32 = GL_CLAMP_TO_EDGE, wrapT:Int32 = GL_CLAMP_TO_EDGE, internalFormat:Int32 = GL_RGBA, format:Int32 = GL_BGRA, type:Int32 = GL_UNSIGNED_BYTE, stencil:Bool = false) -> Framebuffer { + public func requestFramebufferWithProperties(orientation:ImageOrientation, size:GLSize, textureOnly:Bool = false, minFilter:Int32 = GL_LINEAR, magFilter:Int32 = GL_LINEAR, wrapS:Int32 = GL_CLAMP_TO_EDGE, wrapT:Int32 = GL_CLAMP_TO_EDGE, internalFormat:Int32 = GL_RGBA, format:Int32 = GL_BGRA, type:Int32 = GL_UNSIGNED_BYTE, stencil:Bool = false) -> Framebuffer { let hash = hashForFramebufferWithProperties(orientation:orientation, size:size, textureOnly:textureOnly, minFilter:minFilter, magFilter:magFilter, wrapS:wrapS, wrapT:wrapT, internalFormat:internalFormat, format:format, type:type, stencil:stencil) let framebuffer:Framebuffer if (framebufferCache[hash]?.count > 0) { @@ -46,7 +46,7 @@ public class FramebufferCache { framebufferCache.removeAll() } - func returnFramebufferToCache(framebuffer:Framebuffer) { + func returnFramebufferToCache(_ framebuffer:Framebuffer) { // print("Returning to cache: \(framebuffer)") context.runOperationSynchronously{ if (self.framebufferCache[framebuffer.hash] != nil) { diff --git a/framework/Source/ImageGenerator.swift b/framework/Source/ImageGenerator.swift index ad22b024..51f4de36 100644 --- a/framework/Source/ImageGenerator.swift +++ b/framework/Source/ImageGenerator.swift @@ -7,13 +7,13 @@ public class ImageGenerator: ImageSource { public init(size:Size) { self.size = size do { - imageFramebuffer = try Framebuffer(context:sharedImageProcessingContext, orientation:.Portrait, size:GLSize(size)) + imageFramebuffer = try Framebuffer(context:sharedImageProcessingContext, orientation:.portrait, size:GLSize(size)) } catch { fatalError("Could not construct framebuffer of size: \(size), error:\(error)") } } - public func transmitPreviousImageToTarget(target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) { imageFramebuffer.lock() target.newFramebufferAvailable(imageFramebuffer, fromSourceIndex:atIndex) } @@ -21,4 +21,4 @@ public class ImageGenerator: ImageSource { func notifyTargets() { updateTargetsWithFramebuffer(imageFramebuffer) } -} \ No newline at end of file +} diff --git a/framework/Source/ImageOrientation.swift b/framework/Source/ImageOrientation.swift index eb007d68..09ae3b44 100644 --- a/framework/Source/ImageOrientation.swift +++ b/framework/Source/ImageOrientation.swift @@ -1,42 +1,42 @@ public enum ImageOrientation { - case Portrait - case PortraitUpsideDown - case LandscapeLeft - case LandscapeRight + case portrait + case portraitUpsideDown + case landscapeLeft + case landscapeRight - func rotationNeededForOrientation(targetOrientation:ImageOrientation) -> Rotation { + func rotationNeededForOrientation(_ targetOrientation:ImageOrientation) -> Rotation { switch (self, targetOrientation) { - case (.Portrait, .Portrait), (.PortraitUpsideDown, .PortraitUpsideDown), (.LandscapeLeft, .LandscapeLeft), (LandscapeRight, LandscapeRight): return .NoRotation - case (.Portrait, .PortraitUpsideDown): return .Rotate180 - case (.PortraitUpsideDown, .Portrait): return .Rotate180 - case (.Portrait, .LandscapeLeft): return .RotateCounterclockwise - case (.LandscapeLeft, .Portrait): return .RotateClockwise - case (.Portrait, .LandscapeRight): return .RotateClockwise - case (.LandscapeRight, .Portrait): return .RotateCounterclockwise - case (.LandscapeLeft, .LandscapeRight): return .Rotate180 - case (.LandscapeRight, .LandscapeLeft): return .Rotate180 - case (.PortraitUpsideDown, .LandscapeLeft): return .RotateClockwise - case (.LandscapeLeft, .PortraitUpsideDown): return .RotateCounterclockwise - case (.PortraitUpsideDown, .LandscapeRight): return .RotateCounterclockwise - case (.LandscapeRight, .PortraitUpsideDown): return .RotateClockwise + case (.portrait, .portrait), (.portraitUpsideDown, .portraitUpsideDown), (.landscapeLeft, .landscapeLeft), (landscapeRight, landscapeRight): return .noRotation + case (.portrait, .portraitUpsideDown): return .rotate180 + case (.portraitUpsideDown, .portrait): return .rotate180 + case (.portrait, .landscapeLeft): return .rotateCounterclockwise + case (.landscapeLeft, .portrait): return .rotateClockwise + case (.portrait, .landscapeRight): return .rotateClockwise + case (.landscapeRight, .portrait): return .rotateCounterclockwise + case (.landscapeLeft, .landscapeRight): return .rotate180 + case (.landscapeRight, .landscapeLeft): return .rotate180 + case (.portraitUpsideDown, .landscapeLeft): return .rotateClockwise + case (.landscapeLeft, .portraitUpsideDown): return .rotateCounterclockwise + case (.portraitUpsideDown, .landscapeRight): return .rotateCounterclockwise + case (.landscapeRight, .portraitUpsideDown): return .rotateClockwise } } } public enum Rotation { - case NoRotation - case RotateCounterclockwise - case RotateClockwise - case Rotate180 - case FlipHorizontally - case FlipVertically - case RotateClockwiseAndFlipVertically - case RotateClockwiseAndFlipHorizontally + case noRotation + case rotateCounterclockwise + case rotateClockwise + case rotate180 + case flipHorizontally + case flipVertically + case rotateClockwiseAndFlipVertically + case rotateClockwiseAndFlipHorizontally func flipsDimensions() -> Bool { switch self { - case .NoRotation, .Rotate180, .FlipHorizontally, .FlipVertically: return false - case .RotateCounterclockwise, .RotateClockwise, .RotateClockwiseAndFlipVertically, .RotateClockwiseAndFlipHorizontally: return true + case .noRotation, .rotate180, .flipHorizontally, .flipVertically: return false + case .rotateCounterclockwise, .rotateClockwise, .rotateClockwiseAndFlipVertically, .rotateClockwiseAndFlipHorizontally: return true } } } diff --git a/framework/Source/Mac/Camera.swift b/framework/Source/Mac/Camera.swift index 509a19b4..f4f490c1 100755 --- a/framework/Source/Mac/Camera.swift +++ b/framework/Source/Mac/Camera.swift @@ -34,17 +34,17 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer var supportsFullYUVRange:Bool = false let captureAsYUV:Bool let yuvConversionShader:ShaderProgram? - let frameRenderingSemaphore = dispatch_semaphore_create(1) - let cameraProcessingQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH,0) - let audioProcessingQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW,0) + let frameRenderingSemaphore = DispatchSemaphore(value:1) + let cameraProcessingQueue = DispatchQueue.global(attributes:DispatchQueue.GlobalAttributes.priorityDefault) + let audioProcessingQueue = DispatchQueue.global(attributes:DispatchQueue.GlobalAttributes.priorityBackground) var numberOfFramesCaptured = 0 var totalFrameTimeDuringCapture:Double = 0.0 var framesSinceLastCheck = 0 var lastCheckTime = CFAbsoluteTimeGetCurrent() - public init(sessionPreset:String, cameraDevice:AVCaptureDevice? = nil, orientation:ImageOrientation = .Portrait, captureAsYUV:Bool = true) throws { - self.inputCamera = cameraDevice ?? AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo) + public init(sessionPreset:String, cameraDevice:AVCaptureDevice? = nil, orientation:ImageOrientation = .portrait, captureAsYUV:Bool = true) throws { + self.inputCamera = cameraDevice ?? AVCaptureDevice.defaultDevice(withMediaType:AVMediaTypeVideo) self.orientation = orientation self.captureAsYUV = captureAsYUV @@ -71,22 +71,22 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer if captureAsYUV { supportsFullYUVRange = false let supportedPixelFormats = videoOutput.availableVideoCVPixelFormatTypes - for currentPixelFormat in supportedPixelFormats { - if ((currentPixelFormat as! NSNumber).intValue == Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)) { + for currentPixelFormat in supportedPixelFormats! { + if ((currentPixelFormat as! NSNumber).int32Value == Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)) { supportsFullYUVRange = true } } if (supportsFullYUVRange) { yuvConversionShader = crashOnShaderCompileFailure("Camera"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionFullRangeFragmentShader)} - videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey:NSNumber(int:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] + videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey:NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] } else { yuvConversionShader = crashOnShaderCompileFailure("Camera"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionVideoRangeFragmentShader)} - videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey:NSNumber(int:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange))] + videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey:NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange))] } } else { yuvConversionShader = nil - videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey:NSNumber(int:Int32(kCVPixelFormatType_32BGRA))] + videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey:NSNumber(value:Int32(kCVPixelFormatType_32BGRA))] } if (captureSession.canAddOutput(videoOutput)) { @@ -108,13 +108,13 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } } - public func captureOutput(captureOutput:AVCaptureOutput!, didOutputSampleBuffer sampleBuffer:CMSampleBuffer!, fromConnection connection:AVCaptureConnection!) { + public func captureOutput(_ captureOutput:AVCaptureOutput!, didOutputSampleBuffer sampleBuffer:CMSampleBuffer!, from connection:AVCaptureConnection!) { guard (captureOutput != audioOutput) else { self.processAudioSampleBuffer(sampleBuffer) return } - guard (dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_NOW) == 0) else { return } + guard (frameRenderingSemaphore.wait(timeout:DispatchTime.now()) == DispatchTimeoutResult.Success) else { return } let startTime = CFAbsoluteTimeGetCurrent() let cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer)! @@ -122,7 +122,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer let bufferHeight = CVPixelBufferGetHeight(cameraFrame) let currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) - CVPixelBufferLockBaseAddress(cameraFrame, 0) + CVPixelBufferLockBaseAddress(cameraFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) sharedImageProcessingContext.runOperationAsynchronously{ let cameraFramebuffer:Framebuffer @@ -139,7 +139,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer glBindTexture(GLenum(GL_TEXTURE_2D), chrominanceFramebuffer.texture) glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_LUMINANCE_ALPHA, GLsizei(bufferWidth / 2), GLsizei(bufferHeight / 2), 0, GLenum(GL_LUMINANCE_ALPHA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddressOfPlane(cameraFrame, 1)) - cameraFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.Portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:false) + cameraFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:false) let conversionMatrix:Matrix3x3 if (self.supportsFullYUVRange) { @@ -154,9 +154,9 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer glBindTexture(GLenum(GL_TEXTURE_2D), cameraFramebuffer.texture) glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_RGBA, GLsizei(bufferWidth), GLsizei(bufferHeight), 0, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddress(cameraFrame)) } - CVPixelBufferUnlockBaseAddress(cameraFrame, 0) + CVPixelBufferUnlockBaseAddress(cameraFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) - cameraFramebuffer.timingStyle = .VideoFrame(timestamp:Timestamp(currentTime)) + cameraFramebuffer.timingStyle = .videoFrame(timestamp:Timestamp(currentTime)) self.updateTargetsWithFramebuffer(cameraFramebuffer) if self.runBenchmark { @@ -179,7 +179,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer self.framesSinceLastCheck += 1 } - dispatch_semaphore_signal(self.frameRenderingSemaphore) + self.frameRenderingSemaphore.signal() } } @@ -187,18 +187,18 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer self.numberOfFramesCaptured = 0 self.totalFrameTimeDuringCapture = 0 - if (!captureSession.running) { + if (!captureSession.isRunning) { captureSession.startRunning() } } public func stopCapture() { - if (!captureSession.running) { + if (!captureSession.isRunning) { captureSession.stopRunning() } } - public func transmitPreviousImageToTarget(target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) { // Not needed for camera inputs } @@ -212,7 +212,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer defer { captureSession.commitConfiguration() } - microphone = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio) + microphone = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio) audioInput = try AVCaptureDeviceInput(device:microphone) if captureSession.canAddInput(audioInput) { captureSession.addInput(audioInput) @@ -236,7 +236,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer captureSession.commitConfiguration() } - func processAudioSampleBuffer(sampleBuffer:CMSampleBuffer) { + func processAudioSampleBuffer(_ sampleBuffer:CMSampleBuffer) { self.audioEncodingTarget?.processAudioBuffer(sampleBuffer) } } diff --git a/framework/Source/Mac/MovieInput.swift b/framework/Source/Mac/MovieInput.swift index ece95357..9125692f 100644 --- a/framework/Source/Mac/MovieInput.swift +++ b/framework/Source/Mac/MovieInput.swift @@ -26,16 +26,16 @@ public class MovieInput: ImageSource { assetReader = try AVAssetReader(asset:self.asset) - let outputSettings:[String:AnyObject] = [(kCVPixelBufferPixelFormatTypeKey as String):NSNumber(int:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] - let readerVideoTrackOutput = AVAssetReaderTrackOutput(track:self.asset.tracksWithMediaType(AVMediaTypeVideo)[0], outputSettings:outputSettings) + let outputSettings:[String:AnyObject] = [(kCVPixelBufferPixelFormatTypeKey as String):NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] + let readerVideoTrackOutput = AVAssetReaderTrackOutput(track:self.asset.tracks(withMediaType: AVMediaTypeVideo)[0], outputSettings:outputSettings) readerVideoTrackOutput.alwaysCopiesSampleData = false - assetReader.addOutput(readerVideoTrackOutput) + assetReader.add(readerVideoTrackOutput) // TODO: Audio here } - public convenience init(url:NSURL, playAtActualSpeed:Bool = false, loop:Bool = false) throws { - let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey:NSNumber(bool:true)] - let inputAsset = AVURLAsset(URL:url, options:inputOptions) + public convenience init(url:URL, playAtActualSpeed:Bool = false, loop:Bool = false) throws { + let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey:NSNumber(value:true)] + let inputAsset = AVURLAsset(url:url, options:inputOptions) try self.init(asset:inputAsset, playAtActualSpeed:playAtActualSpeed, loop:loop) } @@ -43,9 +43,9 @@ public class MovieInput: ImageSource { // MARK: Playback control public func start() { - asset.loadValuesAsynchronouslyForKeys(["tracks"], completionHandler: { - dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), { - guard (self.asset.statusOfValueForKey("tracks", error:nil) == .Loaded) else { return } + asset.loadValuesAsynchronously(forKeys: ["tracks"], completionHandler: { + DispatchQueue.global(attributes: DispatchQueue.GlobalAttributes.priorityDefault).async(execute: { + guard (self.asset.statusOfValue(forKey:"tracks", error:nil) == .loaded) else { return } guard self.assetReader.startReading() else { debugPrint("Couldn't start reading") @@ -60,11 +60,11 @@ public class MovieInput: ImageSource { } } - while (self.assetReader.status == .Reading) { + while (self.assetReader.status == .reading) { self.readNextVideoFrameFromOutput(readerVideoTrackOutput!) } - if (self.assetReader.status == .Completed) { + if (self.assetReader.status == .completed) { self.assetReader.cancelReading() if (self.loop) { @@ -89,8 +89,8 @@ public class MovieInput: ImageSource { // MARK: - // MARK: Internal processing functions - func readNextVideoFrameFromOutput(videoTrackOutput:AVAssetReaderOutput) { - if ((assetReader.status == .Reading) && !videoEncodingIsFinished) { + func readNextVideoFrameFromOutput(_ videoTrackOutput:AVAssetReaderOutput) { + if ((assetReader.status == .reading) && !videoEncodingIsFinished) { if let sampleBuffer = videoTrackOutput.copyNextSampleBuffer() { if (playAtActualSpeed) { // Do this outside of the video processing queue to not slow that down while waiting @@ -130,7 +130,7 @@ public class MovieInput: ImageSource { } - func processMovieFrame(frame:CMSampleBuffer) { + func processMovieFrame(_ frame:CMSampleBuffer) { let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(frame) let movieFrame = CMSampleBufferGetImageBuffer(frame)! @@ -138,10 +138,10 @@ public class MovieInput: ImageSource { self.processMovieFrame(movieFrame, withSampleTime:currentSampleTime) } - func processMovieFrame(movieFrame:CVPixelBuffer, withSampleTime:CMTime) { + func processMovieFrame(_ movieFrame:CVPixelBuffer, withSampleTime:CMTime) { let bufferHeight = CVPixelBufferGetHeight(movieFrame) let bufferWidth = CVPixelBufferGetWidth(movieFrame) - CVPixelBufferLockBaseAddress(movieFrame, 0) + CVPixelBufferLockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) let conversionMatrix = colorConversionMatrix601FullRangeDefault // TODO: Get this color query working @@ -157,24 +157,24 @@ public class MovieInput: ImageSource { let startTime = CFAbsoluteTimeGetCurrent() - let luminanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.Portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:true) + let luminanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:true) luminanceFramebuffer.lock() glActiveTexture(GLenum(GL_TEXTURE0)) glBindTexture(GLenum(GL_TEXTURE_2D), luminanceFramebuffer.texture) glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_LUMINANCE, GLsizei(bufferWidth), GLsizei(bufferHeight), 0, GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddressOfPlane(movieFrame, 0)) - let chrominanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.Portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:true) + let chrominanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:true) chrominanceFramebuffer.lock() glActiveTexture(GLenum(GL_TEXTURE1)) glBindTexture(GLenum(GL_TEXTURE_2D), chrominanceFramebuffer.texture) glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_LUMINANCE_ALPHA, GLsizei(bufferWidth / 2), GLsizei(bufferHeight / 2), 0, GLenum(GL_LUMINANCE_ALPHA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddressOfPlane(movieFrame, 1)) - let movieFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.Portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:false) + let movieFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:false) convertYUVToRGB(shader:self.yuvConversionShader, luminanceFramebuffer:luminanceFramebuffer, chrominanceFramebuffer:chrominanceFramebuffer, resultFramebuffer:movieFramebuffer, colorConversionMatrix:conversionMatrix) - CVPixelBufferUnlockBaseAddress(movieFrame, 0) + CVPixelBufferUnlockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) - movieFramebuffer.timingStyle = .VideoFrame(timestamp:Timestamp(withSampleTime)) + movieFramebuffer.timingStyle = .videoFrame(timestamp:Timestamp(withSampleTime)) self.updateTargetsWithFramebuffer(movieFramebuffer) if self.runBenchmark { @@ -186,7 +186,7 @@ public class MovieInput: ImageSource { } } - public func transmitPreviousImageToTarget(target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) { // Not needed for movie inputs } -} \ No newline at end of file +} diff --git a/framework/Source/Mac/MovieOutput.swift b/framework/Source/Mac/MovieOutput.swift index e99c2c0b..93881e3a 100644 --- a/framework/Source/Mac/MovieOutput.swift +++ b/framework/Source/Mac/MovieOutput.swift @@ -2,7 +2,7 @@ import AVFoundation public protocol AudioEncodingTarget { func activateAudioTrack() - func processAudioBuffer(sampleBuffer:CMSampleBuffer) + func processAudioBuffer(_ sampleBuffer:CMSampleBuffer) } public class MovieOutput: ImageConsumer, AudioEncodingTarget { @@ -22,9 +22,9 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { private var previousAudioTime = kCMTimeNegativeInfinity private var encodingLiveVideo:Bool - public init(URL:NSURL, size:Size, fileType:String = AVFileTypeQuickTimeMovie, liveVideo:Bool = false, settings:[String:AnyObject]? = nil) throws { + public init(URL:Foundation.URL, size:Size, fileType:String = AVFileTypeQuickTimeMovie, liveVideo:Bool = false, settings:[String:AnyObject]? = nil) throws { self.size = size - assetWriter = try AVAssetWriter(URL:URL, fileType:fileType) + assetWriter = try AVAssetWriter(url:URL, fileType:fileType) // Set this to make sure that a functional movie is produced, even if the recording is cut off mid-stream. Only the last second should be lost in that case. assetWriter.movieFragmentInterval = CMTimeMakeWithSeconds(1.0, 1000) @@ -35,8 +35,8 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { localSettings = [String:AnyObject]() } - localSettings[AVVideoWidthKey] = localSettings[AVVideoWidthKey] ?? NSNumber(float:size.width) - localSettings[AVVideoHeightKey] = localSettings[AVVideoHeightKey] ?? NSNumber(float:size.height) + localSettings[AVVideoWidthKey] = localSettings[AVVideoWidthKey] ?? NSNumber(value:size.width) + localSettings[AVVideoHeightKey] = localSettings[AVVideoHeightKey] ?? NSNumber(value:size.height) localSettings[AVVideoCodecKey] = localSettings[AVVideoCodecKey] ?? AVVideoCodecH264 assetWriterVideoInput = AVAssetWriterInput(mediaType:AVMediaTypeVideo, outputSettings:localSettings) @@ -44,12 +44,12 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { encodingLiveVideo = liveVideo // You need to use BGRA for the video in order to get realtime encoding. I use a color-swizzling shader to line up glReadPixels' normal RGBA output with the movie input's BGRA. - let sourcePixelBufferAttributesDictionary:[String:AnyObject] = [kCVPixelBufferPixelFormatTypeKey as String:NSNumber(int:Int32(kCVPixelFormatType_32BGRA)), - kCVPixelBufferWidthKey as String:NSNumber(float:size.width), - kCVPixelBufferHeightKey as String:NSNumber(float:size.height)] + let sourcePixelBufferAttributesDictionary:[String:AnyObject] = [kCVPixelBufferPixelFormatTypeKey as String:NSNumber(value:Int32(kCVPixelFormatType_32BGRA)), + kCVPixelBufferWidthKey as String:NSNumber(value:size.width), + kCVPixelBufferHeightKey as String:NSNumber(value:size.height)] assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput:assetWriterVideoInput, sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary) - assetWriter.addInput(assetWriterVideoInput) + assetWriter.add(assetWriterVideoInput) } public func startRecording() { @@ -59,36 +59,36 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } } - public func finishRecording(completionCallback:(() -> Void)? = nil) { + public func finishRecording(_ completionCallback:(() -> Void)? = nil) { sharedImageProcessingContext.runOperationSynchronously{ self.isRecording = false - if (self.assetWriter.status == .Completed || self.assetWriter.status == .Cancelled || self.assetWriter.status == .Unknown) { + if (self.assetWriter.status == .completed || self.assetWriter.status == .cancelled || self.assetWriter.status == .unknown) { sharedImageProcessingContext.runOperationAsynchronously{ completionCallback?() } return } - if ((self.assetWriter.status == .Writing) && (!self.videoEncodingIsFinished)) { + if ((self.assetWriter.status == .writing) && (!self.videoEncodingIsFinished)) { self.videoEncodingIsFinished = true self.assetWriterVideoInput.markAsFinished() } - if ((self.assetWriter.status == .Writing) && (!self.audioEncodingIsFinished)) { + if ((self.assetWriter.status == .writing) && (!self.audioEncodingIsFinished)) { self.audioEncodingIsFinished = true self.assetWriterAudioInput?.markAsFinished() } // Why can't I use ?? here for the callback? if let callback = completionCallback { - self.assetWriter.finishWritingWithCompletionHandler(callback) + self.assetWriter.finishWriting(completionHandler: callback) } else { - self.assetWriter.finishWritingWithCompletionHandler{} + self.assetWriter.finishWriting{} } } } - public func newFramebufferAvailable(framebuffer:Framebuffer, fromSourceIndex:UInt) { + public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { defer { framebuffer.unlock() } @@ -100,16 +100,16 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { guard (frameTime != previousFrameTime) else { return } if (startTime == nil) { - if (assetWriter.status != .Writing) { + if (assetWriter.status != .writing) { assetWriter.startWriting() } - assetWriter.startSessionAtSourceTime(frameTime) + assetWriter.startSession(atSourceTime: frameTime) startTime = frameTime } // TODO: Run the following on an internal movie recording dispatch queue, context - guard (assetWriterVideoInput.readyForMoreMediaData || (!encodingLiveVideo)) else { + guard (assetWriterVideoInput.isReadyForMoreMediaData || (!encodingLiveVideo)) else { debugPrint("Had to drop a frame at time \(frameTime)") return } @@ -123,23 +123,23 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { renderIntoPixelBuffer(pixelBuffer, framebuffer:framebuffer) - if (!assetWriterPixelBufferInput.appendPixelBuffer(pixelBuffer, withPresentationTime:frameTime)) { + if (!assetWriterPixelBufferInput.append(pixelBuffer, withPresentationTime:frameTime)) { print("Problem appending pixel buffer at time: \(frameTime)") } - CVPixelBufferUnlockBaseAddress(pixelBuffer, 0) + CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) } - func renderIntoPixelBuffer(pixelBuffer:CVPixelBuffer, framebuffer:Framebuffer) { + func renderIntoPixelBuffer(_ pixelBuffer:CVPixelBuffer, framebuffer:Framebuffer) { let renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:framebuffer.orientation, size:GLSize(self.size)) renderFramebuffer.lock() renderFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(Color.Black) - renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings:ShaderUniformSettings(), vertices:standardImageVertices, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.NoRotation)]) + renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings:ShaderUniformSettings(), vertices:standardImageVertices, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.noRotation)]) - CVPixelBufferLockBaseAddress(pixelBuffer, 0) + CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) glReadPixels(0, 0, renderFramebuffer.size.width, renderFramebuffer.size.height, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddress(pixelBuffer)) renderFramebuffer.unlock() } @@ -150,29 +150,29 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public func activateAudioTrack() { // TODO: Add ability to set custom output settings assetWriterAudioInput = AVAssetWriterInput(mediaType:AVMediaTypeAudio, outputSettings:nil) - assetWriter.addInput(assetWriterAudioInput!) + assetWriter.add(assetWriterAudioInput!) assetWriterAudioInput?.expectsMediaDataInRealTime = encodingLiveVideo } - public func processAudioBuffer(sampleBuffer:CMSampleBuffer) { + public func processAudioBuffer(_ sampleBuffer:CMSampleBuffer) { guard let assetWriterAudioInput = assetWriterAudioInput else { return } sharedImageProcessingContext.runOperationSynchronously{ let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) if (self.startTime == nil) { - if (self.assetWriter.status != .Writing) { + if (self.assetWriter.status != .writing) { self.assetWriter.startWriting() } - self.assetWriter.startSessionAtSourceTime(currentSampleTime) + self.assetWriter.startSession(atSourceTime: currentSampleTime) self.startTime = currentSampleTime } - guard (assetWriterAudioInput.readyForMoreMediaData || (!self.encodingLiveVideo)) else { + guard (assetWriterAudioInput.isReadyForMoreMediaData || (!self.encodingLiveVideo)) else { return } - if (!assetWriterAudioInput.appendSampleBuffer(sampleBuffer)) { + if (!assetWriterAudioInput.append(sampleBuffer)) { print("Trouble appending audio sample buffer") } } @@ -193,4 +193,4 @@ public extension Timestamp { return CMTimeMakeWithEpoch(value, timescale, epoch) } } -} \ No newline at end of file +} diff --git a/framework/Source/Mac/OpenGLContext.swift b/framework/Source/Mac/OpenGLContext.swift index 3e483014..58df9db2 100755 --- a/framework/Source/Mac/OpenGLContext.swift +++ b/framework/Source/Mac/OpenGLContext.swift @@ -15,17 +15,14 @@ public class OpenGLContext: SerialDispatch { return crashOnShaderCompileFailure("OpenGLContext"){return try self.programForVertexShader(OneInputVertexShader, fragmentShader:PassthroughFragmentShader)} }() - public let serialDispatchQueue:dispatch_queue_t = dispatch_queue_create("com.sunsetlakesoftware.GPUImage.processingQueue", nil) - var dispatchKey:Int = 1 - public let dispatchQueueKey:UnsafePointer + public let serialDispatchQueue:DispatchQueue = DispatchQueue(label: "com.sunsetlakesoftware.GPUImage.processingQueue", attributes: []) + public let dispatchQueueKey = DispatchSpecificKey() // MARK: - // MARK: Initialization and teardown init() { - let context = UnsafeMutablePointer(Unmanaged.passUnretained(self.serialDispatchQueue).toOpaque()) - dispatchQueueKey = UnsafePointer(bitPattern:dispatchKey) - dispatch_queue_set_specific(serialDispatchQueue, dispatchQueueKey, context, nil) + serialDispatchQueue.setSpecific(key:dispatchQueueKey, value:81) let pixelFormatAttributes:[NSOpenGLPixelFormatAttribute] = [ NSOpenGLPixelFormatAttribute(NSOpenGLPFADoubleBuffer), @@ -37,7 +34,7 @@ public class OpenGLContext: SerialDispatch { fatalError("No appropriate pixel format found when creating OpenGL context.") } // TODO: Take into account the sharegroup - guard let generatedContext = NSOpenGLContext(format:pixelFormat, shareContext:nil) else { + guard let generatedContext = NSOpenGLContext(format:pixelFormat, share:nil) else { fatalError("Unable to create an OpenGL context. The GPUImage framework requires OpenGL support to work.") } @@ -80,7 +77,7 @@ public class OpenGLContext: SerialDispatch { lazy var extensionString:String = { return self.runOperationSynchronously{ self.makeCurrentContext() - return String.fromCString(UnsafePointer(glGetString(GLenum(GL_EXTENSIONS))))! + return String(cString:UnsafePointer(glGetString(GLenum(GL_EXTENSIONS)))) } }() -} \ No newline at end of file +} diff --git a/framework/Source/Mac/PictureInput.swift b/framework/Source/Mac/PictureInput.swift index 3e067168..c449fc2a 100755 --- a/framework/Source/Mac/PictureInput.swift +++ b/framework/Source/Mac/PictureInput.swift @@ -6,10 +6,10 @@ public class PictureInput: ImageSource { var imageFramebuffer:Framebuffer! var hasProcessedImage:Bool = false - public init(image:CGImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .Portrait) { + public init(image:CGImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) { // TODO: Dispatch this whole thing asynchronously to move image loading off main thread - let widthOfImage = GLint(CGImageGetWidth(image)) - let heightOfImage = GLint(CGImageGetHeight(image)) + let widthOfImage = GLint(image.width) + let heightOfImage = GLint(image.height) // If passed an empty image reference, CGContextDrawImage will fail in future versions of the SDK. guard((widthOfImage > 0) && (heightOfImage > 0)) else { fatalError("Tried to pass in a zero-sized image") } @@ -37,32 +37,32 @@ public class PictureInput: ImageSource { } var imageData:UnsafeMutablePointer! - var dataFromImageDataProvider:CFDataRef! + var dataFromImageDataProvider:CFData! var format = GL_BGRA if (!shouldRedrawUsingCoreGraphics) { /* Check that the memory layout is compatible with GL, as we cannot use glPixelStore to * tell GL about the memory layout with GLES. */ - if ((CGImageGetBytesPerRow(image) != CGImageGetWidth(image) * 4) || (CGImageGetBitsPerPixel(image) != 32) || (CGImageGetBitsPerComponent(image) != 8)) + if ((image.bytesPerRow != image.width * 4) || (image.bitsPerPixel != 32) || (image.bitsPerComponent != 8)) { shouldRedrawUsingCoreGraphics = true } else { /* Check that the bitmap pixel format is compatible with GL */ - let bitmapInfo = CGImageGetBitmapInfo(image) - if (bitmapInfo.contains(.FloatComponents)) { + let bitmapInfo = image.bitmapInfo + if (bitmapInfo.contains(.floatComponents)) { /* We don't support float components for use directly in GL */ shouldRedrawUsingCoreGraphics = true } else { - let alphaInfo = CGImageAlphaInfo(rawValue:bitmapInfo.rawValue & CGBitmapInfo.AlphaInfoMask.rawValue) - if (bitmapInfo.contains(.ByteOrder32Little)) { + let alphaInfo = CGImageAlphaInfo(rawValue:bitmapInfo.rawValue & CGBitmapInfo.alphaInfoMask.rawValue) + if (bitmapInfo.contains(.byteOrder32Little)) { /* Little endian, for alpha-first we can use this bitmap directly in GL */ - if ((alphaInfo != CGImageAlphaInfo.PremultipliedFirst) && (alphaInfo != CGImageAlphaInfo.First) && (alphaInfo != CGImageAlphaInfo.NoneSkipFirst)) { + if ((alphaInfo != CGImageAlphaInfo.premultipliedFirst) && (alphaInfo != CGImageAlphaInfo.first) && (alphaInfo != CGImageAlphaInfo.noneSkipFirst)) { shouldRedrawUsingCoreGraphics = true } - } else if ((bitmapInfo.contains(.ByteOrderDefault)) || (bitmapInfo.contains(.ByteOrder32Big))) { + } else if ((bitmapInfo.contains(CGBitmapInfo())) || (bitmapInfo.contains(.byteOrder32Big))) { /* Big endian, for alpha-last we can use this bitmap directly in GL */ - if ((alphaInfo != CGImageAlphaInfo.PremultipliedLast) && (alphaInfo != CGImageAlphaInfo.Last) && (alphaInfo != CGImageAlphaInfo.NoneSkipLast)) { + if ((alphaInfo != CGImageAlphaInfo.premultipliedLast) && (alphaInfo != CGImageAlphaInfo.last) && (alphaInfo != CGImageAlphaInfo.noneSkipLast)) { shouldRedrawUsingCoreGraphics = true } else { /* Can access directly using GL_RGBA pixel format */ @@ -77,23 +77,23 @@ public class PictureInput: ImageSource { if (shouldRedrawUsingCoreGraphics) { // For resized or incompatible image: redraw - imageData = UnsafeMutablePointer.alloc(Int(widthToUseForTexture * heightToUseForTexture) * 4) + imageData = UnsafeMutablePointer(allocatingCapacity: Int(widthToUseForTexture * heightToUseForTexture) * 4) let genericRGBColorspace = CGColorSpaceCreateDeviceRGB() - let imageContext = CGBitmapContextCreate(imageData, Int(widthToUseForTexture), Int(heightToUseForTexture), 8, Int(widthToUseForTexture) * 4, genericRGBColorspace, CGImageAlphaInfo.PremultipliedFirst.rawValue | CGBitmapInfo.ByteOrder32Little.rawValue) + let imageContext = CGContext(data:imageData, width:Int(widthToUseForTexture), height:Int(heightToUseForTexture), bitsPerComponent:8, bytesPerRow:Int(widthToUseForTexture) * 4, space:genericRGBColorspace, bitmapInfo:CGImageAlphaInfo.premultipliedFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue) // CGContextSetBlendMode(imageContext, kCGBlendModeCopy); // From Technical Q&A QA1708: http://developer.apple.com/library/ios/#qa/qa1708/_index.html - CGContextDrawImage(imageContext, CGRectMake(0.0, 0.0, CGFloat(widthToUseForTexture), CGFloat(heightToUseForTexture)), image) + imageContext?.draw(in:CGRect(x:0.0, y:0.0, width:CGFloat(widthToUseForTexture), height:CGFloat(heightToUseForTexture)), image:image) } else { // Access the raw image bytes directly - dataFromImageDataProvider = CGDataProviderCopyData(CGImageGetDataProvider(image)) + dataFromImageDataProvider = image.dataProvider?.data imageData = UnsafeMutablePointer(CFDataGetBytePtr(dataFromImageDataProvider)) } sharedImageProcessingContext.makeCurrentContext() do { imageFramebuffer = try Framebuffer(context:sharedImageProcessingContext, orientation:orientation, size:GLSize(width:widthToUseForTexture, height:heightToUseForTexture), textureOnly:true) - imageFramebuffer.timingStyle = .StillImage + imageFramebuffer.timingStyle = .stillImage } catch { fatalError("ERROR: Unable to initialize framebuffer of size (\(widthToUseForTexture), \(heightToUseForTexture)) with error: \(error)") } @@ -112,20 +112,20 @@ public class PictureInput: ImageSource { glBindTexture(GLenum(GL_TEXTURE_2D), 0) if (shouldRedrawUsingCoreGraphics) { - imageData.dealloc(Int(widthToUseForTexture * heightToUseForTexture) * 4) + imageData.deallocateCapacity(Int(widthToUseForTexture * heightToUseForTexture) * 4) } } - public convenience init(image:NSImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .Portrait) { - self.init(image:image.CGImageForProposedRect(nil, context:nil, hints:nil)!, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation) + public convenience init(image:NSImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) { + self.init(image:image.cgImage(forProposedRect:nil, context:nil, hints:nil)!, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation) } - public convenience init(imageName:String, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .Portrait) { + public convenience init(imageName:String, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) { guard let image = NSImage(named:imageName) else { fatalError("No such image named: \(imageName) in your application bundle") } - self.init(image:image.CGImageForProposedRect(nil, context:nil, hints:nil)!, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation) + self.init(image:image.cgImage(forProposedRect:nil, context:nil, hints:nil)!, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation) } - public func processImage(synchronously synchronously:Bool = false) { + public func processImage(synchronously:Bool = false) { if synchronously { sharedImageProcessingContext.runOperationSynchronously{ sharedImageProcessingContext.makeCurrentContext() @@ -141,10 +141,10 @@ public class PictureInput: ImageSource { } } - public func transmitPreviousImageToTarget(target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) { if hasProcessedImage { imageFramebuffer.lock() target.newFramebufferAvailable(imageFramebuffer, fromSourceIndex:atIndex) } } -} \ No newline at end of file +} diff --git a/framework/Source/Mac/PictureOutput.swift b/framework/Source/Mac/PictureOutput.swift index 0183818c..46a7cf20 100644 --- a/framework/Source/Mac/PictureOutput.swift +++ b/framework/Source/Mac/PictureOutput.swift @@ -2,19 +2,19 @@ import OpenGL.GL3 import Cocoa public enum PictureFileFormat { - case PNG - case JPEG + case png + case jpeg } public class PictureOutput: ImageConsumer { - public var encodedImageAvailableCallback:(NSData -> ())? - public var encodedImageFormat:PictureFileFormat = .PNG - public var imageAvailableCallback:(NSImage -> ())? + public var encodedImageAvailableCallback:((Data) -> ())? + public var encodedImageFormat:PictureFileFormat = .png + public var imageAvailableCallback:((NSImage) -> ())? public var onlyCaptureNextFrame:Bool = true public let sources = SourceContainer() public let maximumInputs:UInt = 1 - var url:NSURL! + var url:URL! public init() { } @@ -23,13 +23,13 @@ public class PictureOutput: ImageConsumer { debugPrint("Deallocating picture") } - public func saveNextFrameToURL(url:NSURL, format:PictureFileFormat) { + public func saveNextFrameToURL(_ url:URL, format:PictureFileFormat) { onlyCaptureNextFrame = true encodedImageFormat = format self.url = url // Create an intentional short-term retain cycle to prevent deallocation before next frame is captured encodedImageAvailableCallback = {imageData in do { - try imageData.writeToURL(self.url, options:.DataWritingAtomic) + try imageData.write(to: self.url, options:.dataWritingAtomic) } catch { // TODO: Handle this better print("WARNING: Couldn't save image with error:\(error)") @@ -38,28 +38,28 @@ public class PictureOutput: ImageConsumer { } // TODO: Replace with texture caches and a safer capture routine - func cgImageFromFramebuffer(framebuffer:Framebuffer) -> CGImage { + func cgImageFromFramebuffer(_ framebuffer:Framebuffer) -> CGImage { let renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:framebuffer.orientation, size:framebuffer.size) renderFramebuffer.lock() renderFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(Color.Red) - renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings:ShaderUniformSettings(), vertices:standardImageVertices, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.NoRotation)]) + renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings:ShaderUniformSettings(), vertices:standardImageVertices, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.noRotation)]) framebuffer.unlock() let imageByteSize = Int(framebuffer.size.width * framebuffer.size.height * 4) - let data = UnsafeMutablePointer.alloc(imageByteSize) + let data = UnsafeMutablePointer(allocatingCapacity: imageByteSize) glReadPixels(0, 0, framebuffer.size.width, framebuffer.size.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), data) renderFramebuffer.unlock() - let dataProvider = CGDataProviderCreateWithData(nil, data, imageByteSize, dataProviderReleaseCallback) + guard let dataProvider = CGDataProvider(dataInfo: nil, data: data, size: imageByteSize, releaseData: dataProviderReleaseCallback) else {fatalError("Could not create CGDataProvider")} let defaultRGBColorSpace = CGColorSpaceCreateDeviceRGB() - return CGImageCreate(Int(framebuffer.size.width), Int(framebuffer.size.height), 8, 32, 4 * Int(framebuffer.size.width), defaultRGBColorSpace, .ByteOrderDefault /*| CGImageAlphaInfo.Last*/, dataProvider, nil, false, .RenderingIntentDefault)! + return CGImage(width: Int(framebuffer.size.width), height: Int(framebuffer.size.height), bitsPerComponent: 8, bitsPerPixel: 32, bytesPerRow: 4 * Int(framebuffer.size.width), space: defaultRGBColorSpace, bitmapInfo: CGBitmapInfo() /*| CGImageAlphaInfo.Last*/, provider: dataProvider, decode: nil, shouldInterpolate: false, intent: .defaultIntent)! } - public func newFramebufferAvailable(framebuffer:Framebuffer, fromSourceIndex:UInt) { + public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { if let imageCallback = imageAvailableCallback { let cgImageFromBytes = cgImageFromFramebuffer(framebuffer) - let image = NSImage(CGImage:cgImageFromBytes, size:NSZeroSize) + let image = NSImage(cgImage:cgImageFromBytes, size:NSZeroSize) imageCallback(image) @@ -70,11 +70,11 @@ public class PictureOutput: ImageConsumer { if let imageCallback = encodedImageAvailableCallback { let cgImageFromBytes = cgImageFromFramebuffer(framebuffer) - let bitmapRepresentation = NSBitmapImageRep(CGImage:cgImageFromBytes) - let imageData:NSData + let bitmapRepresentation = NSBitmapImageRep(cgImage:cgImageFromBytes) + let imageData:Data switch encodedImageFormat { - case .PNG: imageData = bitmapRepresentation.representationUsingType(.NSPNGFileType, properties: ["":""])! - case .JPEG: imageData = bitmapRepresentation.representationUsingType(.NSJPEGFileType, properties: ["":""])! + case .png: imageData = bitmapRepresentation.representation(using: .PNG, properties: ["":""])! + case .jpeg: imageData = bitmapRepresentation.representation(using: .JPEG, properties: ["":""])! } imageCallback(imageData) @@ -87,7 +87,7 @@ public class PictureOutput: ImageConsumer { } public extension ImageSource { - public func saveNextFrameToURL(url:NSURL, format:PictureFileFormat) { + public func saveNextFrameToURL(_ url:URL, format:PictureFileFormat) { let pictureOutput = PictureOutput() pictureOutput.saveNextFrameToURL(url, format:format) self --> pictureOutput @@ -95,13 +95,13 @@ public extension ImageSource { } public extension NSImage { - public func filterWithOperation(operation:T) -> NSImage { + public func filterWithOperation(_ operation:T) -> NSImage { return filterWithPipeline{input, output in input --> operation --> output } } - public func filterWithPipeline(pipeline:(input:PictureInput, output:PictureOutput) -> ()) -> NSImage { + public func filterWithPipeline(_ pipeline:(input:PictureInput, output:PictureOutput) -> ()) -> NSImage { let picture = PictureInput(image:self) var outputImage:NSImage? let pictureOutput = PictureOutput() @@ -116,6 +116,6 @@ public extension NSImage { } // Why are these flipped in the callback definition? -func dataProviderReleaseCallback(context:UnsafeMutablePointer, data:UnsafePointer, size:Int) { - UnsafeMutablePointer(data).dealloc(size) +func dataProviderReleaseCallback(_ context:UnsafeMutablePointer?, data:UnsafePointer, size:Int) { + UnsafeMutablePointer(data).deallocateCapacity(size) } diff --git a/framework/Source/Mac/RenderView.swift b/framework/Source/Mac/RenderView.swift index cbf29328..1248094d 100755 --- a/framework/Source/Mac/RenderView.swift +++ b/framework/Source/Mac/RenderView.swift @@ -2,7 +2,7 @@ import Cocoa public class RenderView:NSOpenGLView, ImageConsumer { public var backgroundColor = Color.Black - public var fillMode = FillMode.PreserveAspectRatio + public var fillMode = FillMode.preserveAspectRatio public var sizeInPixels:Size { get { return Size(width:Float(self.frame.size.width), height:Float(self.frame.size.width)) } } public let sources = SourceContainer() @@ -15,7 +15,7 @@ public class RenderView:NSOpenGLView, ImageConsumer { // TODO: Need to set viewport to appropriate size, resize viewport on view reshape - public func newFramebufferAvailable(framebuffer:Framebuffer, fromSourceIndex:UInt) { + public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { glBindFramebuffer(GLenum(GL_FRAMEBUFFER), 0) glBindRenderbuffer(GLenum(GL_RENDERBUFFER), 0) @@ -25,10 +25,10 @@ public class RenderView:NSOpenGLView, ImageConsumer { clearFramebufferWithColor(backgroundColor) // TODO: Cache these scaled vertices - let scaledVertices = fillMode.transformVertices(verticallyInvertedImageVertices, fromInputSize:framebuffer.sizeForTargetOrientation(.Portrait), toFitSize:viewSize) - renderQuadWithShader(self.displayShader, vertices:scaledVertices, inputTextures:[framebuffer.texturePropertiesForTargetOrientation(.Portrait)]) + let scaledVertices = fillMode.transformVertices(verticallyInvertedImageVertices, fromInputSize:framebuffer.sizeForTargetOrientation(.portrait), toFitSize:viewSize) + renderQuadWithShader(self.displayShader, vertices:scaledVertices, inputTextures:[framebuffer.texturePropertiesForTargetOrientation(.portrait)]) sharedImageProcessingContext.presentBufferForDisplay() framebuffer.unlock() } -} \ No newline at end of file +} diff --git a/framework/Source/Matrix.swift b/framework/Source/Matrix.swift index c5618a9d..bf4a50c9 100644 --- a/framework/Source/Matrix.swift +++ b/framework/Source/Matrix.swift @@ -68,7 +68,7 @@ public struct Matrix3x3 { 0.0, 0.0, 0.0]) } -func orthographicMatrix(left:Float, right:Float, bottom:Float, top:Float, near:Float, far:Float, anchorTopLeft:Bool = false) -> Matrix4x4 { +func orthographicMatrix(_ left:Float, right:Float, bottom:Float, top:Float, near:Float, far:Float, anchorTopLeft:Bool = false) -> Matrix4x4 { let r_l = right - left let t_b = top - bottom let f_n = far - near @@ -121,4 +121,4 @@ public extension Matrix4x4 { self.init(CATransform3DMakeAffineTransform(transform)) } } -#endif \ No newline at end of file +#endif diff --git a/framework/Source/OpenGLContext_Shared.swift b/framework/Source/OpenGLContext_Shared.swift index 6e3dc13f..3f108752 100755 --- a/framework/Source/OpenGLContext_Shared.swift +++ b/framework/Source/OpenGLContext_Shared.swift @@ -18,7 +18,7 @@ import Foundation public let sharedImageProcessingContext = OpenGLContext() extension OpenGLContext { - public func programForVertexShader(vertexShader:String, fragmentShader:String) throws -> ShaderProgram { + public func programForVertexShader(_ vertexShader:String, fragmentShader:String) throws -> ShaderProgram { let lookupKeyForShaderProgram = "V: \(vertexShader) - F: \(fragmentShader)" if let shaderFromCache = shaderCache[lookupKeyForShaderProgram] { return shaderFromCache @@ -31,15 +31,15 @@ extension OpenGLContext { } } - public func programForVertexShader(vertexShader:String, fragmentShader:NSURL) throws -> ShaderProgram { + public func programForVertexShader(_ vertexShader:String, fragmentShader:URL) throws -> ShaderProgram { return try programForVertexShader(vertexShader, fragmentShader:try shaderFromFile(fragmentShader)) } - public func programForVertexShader(vertexShader:NSURL, fragmentShader:NSURL) throws -> ShaderProgram { + public func programForVertexShader(_ vertexShader:URL, fragmentShader:URL) throws -> ShaderProgram { return try programForVertexShader(try shaderFromFile(vertexShader), fragmentShader:try shaderFromFile(fragmentShader)) } - public func openGLDeviceSettingForOption(option:Int32) -> GLint { + public func openGLDeviceSettingForOption(_ option:Int32) -> GLint { return self.runOperationSynchronously{() -> GLint in self.makeCurrentContext() var openGLValue:GLint = 0 @@ -48,11 +48,11 @@ extension OpenGLContext { } } - public func deviceSupportsExtension(openGLExtension:String) -> Bool { + public func deviceSupportsExtension(_ openGLExtension:String) -> Bool { #if os(Linux) return false #else - return self.extensionString.containsString(openGLExtension) + return self.extensionString.contains(openGLExtension) #endif } @@ -66,7 +66,7 @@ extension OpenGLContext { return deviceSupportsExtension("GL_EXT_shader_framebuffer_fetch") } - public func sizeThatFitsWithinATextureForSize(size:Size) -> Size { + public func sizeThatFitsWithinATextureForSize(_ size:Size) -> Size { let maxTextureSize = Float(self.maximumTextureSizeForThisDevice) if ( (size.width < maxTextureSize) && (size.height < maxTextureSize) ) { return size @@ -83,8 +83,8 @@ extension OpenGLContext { } } -@_semantics("sil.optimize.never") public func debugPrint(stringToPrint:String, file: StaticString = __FILE__, line: UInt = __LINE__, function: StaticString = __FUNCTION__) { +@_semantics("sil.optimize.never") public func debugPrint(_ stringToPrint:String, file: StaticString = #file, line: UInt = #line, function: StaticString = #function) { #if DEBUG print("\(stringToPrint) --> \((String(file) as NSString).lastPathComponent): \(function): \(line)") #endif -} \ No newline at end of file +} diff --git a/framework/Source/OpenGLRendering.swift b/framework/Source/OpenGLRendering.swift index d4742350..2313286d 100755 --- a/framework/Source/OpenGLRendering.swift +++ b/framework/Source/OpenGLRendering.swift @@ -48,7 +48,7 @@ public let standardImageVertices:[GLfloat] = [-1.0, -1.0, 1.0, -1.0, -1.0, 1.0, public let verticallyInvertedImageVertices:[GLfloat] = [-1.0, 1.0, 1.0, 1.0, -1.0, -1.0, 1.0, -1.0] // "position" and "inputTextureCoordinate", "inputTextureCoordinate2" attribute naming follows the convention of the old GPUImage -func renderQuadWithShader(shader:ShaderProgram, uniformSettings:ShaderUniformSettings? = nil, vertices:[GLfloat], inputTextures:[InputTextureProperties]) { +func renderQuadWithShader(_ shader:ShaderProgram, uniformSettings:ShaderUniformSettings? = nil, vertices:[GLfloat], inputTextures:[InputTextureProperties]) { sharedImageProcessingContext.makeCurrentContext() shader.use() uniformSettings?.restoreShaderSettings(shader) @@ -56,7 +56,7 @@ func renderQuadWithShader(shader:ShaderProgram, uniformSettings:ShaderUniformSet guard let positionAttribute = shader.attributeIndex("position") else { fatalError("A position attribute was missing from the shader program during rendering.") } glVertexAttribPointer(positionAttribute, 2, GLenum(GL_FLOAT), 0, 0, vertices) - for (index, inputTexture) in inputTextures.enumerate() { + for (index, inputTexture) in inputTextures.enumerated() { if let textureCoordinateAttribute = shader.attributeIndex("inputTextureCoordinate".withNonZeroSuffix(index)) { glVertexAttribPointer(textureCoordinateAttribute, 2, GLenum(GL_FLOAT), 0, 0, inputTexture.textureCoordinates) } else if (index == 0) { @@ -71,19 +71,19 @@ func renderQuadWithShader(shader:ShaderProgram, uniformSettings:ShaderUniformSet glDrawArrays(GLenum(GL_TRIANGLE_STRIP), 0, 4) - for (index, _) in inputTextures.enumerate() { + for (index, _) in inputTextures.enumerated() { glActiveTexture(textureUnitForIndex(index)) glBindTexture(GLenum(GL_TEXTURE_2D), 0) } } -public func clearFramebufferWithColor(color:Color) { +public func clearFramebufferWithColor(_ color:Color) { glClearColor(GLfloat(color.red), GLfloat(color.green), GLfloat(color.blue), GLfloat(color.alpha)) glClear(GLenum(GL_COLOR_BUFFER_BIT)) } -func renderStencilMaskFromFramebuffer(framebuffer:Framebuffer) { - let inputTextureProperties = framebuffer.texturePropertiesForOutputRotation(.NoRotation) +func renderStencilMaskFromFramebuffer(_ framebuffer:Framebuffer) { + let inputTextureProperties = framebuffer.texturePropertiesForOutputRotation(.noRotation) glEnable(GLenum(GL_STENCIL_TEST)) glClearStencil(0) glClear (GLenum(GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT)) @@ -114,7 +114,7 @@ func disableStencil() { glDisable(GLenum(GL_STENCIL_TEST)) } -func textureUnitForIndex(index:Int) -> GLenum { +func textureUnitForIndex(_ index:Int) -> GLenum { switch index { case 0: return GLenum(GL_TEXTURE0) case 1: return GLenum(GL_TEXTURE1) @@ -129,7 +129,7 @@ func textureUnitForIndex(index:Int) -> GLenum { } } -func generateTexture(minFilter minFilter:Int32, magFilter:Int32, wrapS:Int32, wrapT:Int32) -> GLuint { +func generateTexture(minFilter:Int32, magFilter:Int32, wrapS:Int32, wrapT:Int32) -> GLuint { var texture:GLuint = 0 glActiveTexture(GLenum(GL_TEXTURE1)) @@ -145,7 +145,7 @@ func generateTexture(minFilter minFilter:Int32, magFilter:Int32, wrapS:Int32, wr return texture } -func generateFramebufferForTexture(texture:GLuint, width:GLint, height:GLint, internalFormat:Int32, format:Int32, type:Int32, stencil:Bool) throws -> (GLuint, GLuint?) { +func generateFramebufferForTexture(_ texture:GLuint, width:GLint, height:GLint, internalFormat:Int32, format:Int32, type:Int32, stencil:Bool) throws -> (GLuint, GLuint?) { var framebuffer:GLuint = 0 glActiveTexture(GLenum(GL_TEXTURE1)) @@ -173,7 +173,7 @@ func generateFramebufferForTexture(texture:GLuint, width:GLint, height:GLint, in return (framebuffer, stencilBuffer) } -func attachStencilBuffer(width width:GLint, height:GLint) throws -> GLuint { +func attachStencilBuffer(width:GLint, height:GLint) throws -> GLuint { var stencilBuffer:GLuint = 0 glGenRenderbuffers(1, &stencilBuffer); glBindRenderbuffer(GLenum(GL_RENDERBUFFER), stencilBuffer) @@ -194,7 +194,7 @@ func attachStencilBuffer(width width:GLint, height:GLint) throws -> GLuint { } extension String { - func withNonZeroSuffix(suffix:Int) -> String { + func withNonZeroSuffix(_ suffix:Int) -> String { if suffix == 0 { return self } else { @@ -202,7 +202,7 @@ extension String { } } - func withGLChar(operation:UnsafePointer -> ()) { + func withGLChar(_ operation:(UnsafePointer) -> ()) { #if os(Linux) // cStringUsingEncoding isn't yet defined in the Linux Foundation. // This approach is roughly 35X slower than the cStringUsingEncoding one. @@ -216,7 +216,7 @@ extension String { bufferCString.dealloc(self.characters.count) #else - if let value = self.cStringUsingEncoding(NSUTF8StringEncoding) { + if let value = self.cString(using:String.Encoding.utf8) { operation(UnsafePointer(value)) } else { fatalError("Could not convert this string to UTF8: \(self)") diff --git a/framework/Source/OperationGroup.swift b/framework/Source/OperationGroup.swift index e350272e..e578c4e6 100644 --- a/framework/Source/OperationGroup.swift +++ b/framework/Source/OperationGroup.swift @@ -9,15 +9,15 @@ public class OperationGroup: ImageProcessingOperation { public init() { } - public func newFramebufferAvailable(framebuffer:Framebuffer, fromSourceIndex:UInt) { + public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { inputImageRelay.newFramebufferAvailable(framebuffer, fromSourceIndex:fromSourceIndex) } - public func configureGroup(configurationOperation:(input:ImageRelay, output:ImageRelay) -> ()) { + public func configureGroup(_ configurationOperation:(input:ImageRelay, output:ImageRelay) -> ()) { configurationOperation(input:inputImageRelay, output:outputImageRelay) } - public func transmitPreviousImageToTarget(target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) { outputImageRelay.transmitPreviousImageToTarget(target, atIndex:atIndex) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/AverageColorExtractor.swift b/framework/Source/Operations/AverageColorExtractor.swift index ad27935d..cee3d336 100755 --- a/framework/Source/Operations/AverageColorExtractor.swift +++ b/framework/Source/Operations/AverageColorExtractor.swift @@ -27,8 +27,8 @@ public class AverageColorExtractor: BasicOperation { releaseIncomingFramebuffers() } - func extractAverageColorFromFramebuffer(framebuffer:Framebuffer) { - var data = [UInt8](count:Int(framebuffer.size.width * framebuffer.size.height * 4), repeatedValue:0) + func extractAverageColorFromFramebuffer(_ framebuffer:Framebuffer) { + var data = [UInt8](repeating:0, count:Int(framebuffer.size.width * framebuffer.size.height * 4)) glReadPixels(0, 0, framebuffer.size.width, framebuffer.size.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), &data) renderFramebuffer = framebuffer framebuffer.resetRetainCount() @@ -49,7 +49,7 @@ public class AverageColorExtractor: BasicOperation { } } -func averageColorBySequentialReduction(inputFramebuffer inputFramebuffer:Framebuffer, shader:ShaderProgram, extractAverageOperation:(Framebuffer) -> ()) { +func averageColorBySequentialReduction(inputFramebuffer:Framebuffer, shader:ShaderProgram, extractAverageOperation:(Framebuffer) -> ()) { var uniformSettings = ShaderUniformSettings() let inputSize = Size(inputFramebuffer.size) let numberOfReductionsInX = floor(log(Double(inputSize.width)) / log(4.0)) diff --git a/framework/Source/Operations/AverageLuminanceExtractor.swift b/framework/Source/Operations/AverageLuminanceExtractor.swift index 27302652..f7c3686f 100644 --- a/framework/Source/Operations/AverageLuminanceExtractor.swift +++ b/framework/Source/Operations/AverageLuminanceExtractor.swift @@ -31,8 +31,8 @@ public class AverageLuminanceExtractor: BasicOperation { releaseIncomingFramebuffers() } - func extractAverageLuminanceFromFramebuffer(framebuffer:Framebuffer) { - var data = [UInt8](count:Int(framebuffer.size.width * framebuffer.size.height * 4), repeatedValue:0) + func extractAverageLuminanceFromFramebuffer(_ framebuffer:Framebuffer) { + var data = [UInt8](repeating:0, count:Int(framebuffer.size.width * framebuffer.size.height * 4)) glReadPixels(0, 0, framebuffer.size.width, framebuffer.size.height, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), &data) renderFramebuffer = framebuffer framebuffer.resetRetainCount() diff --git a/framework/Source/Operations/BoxBlur.swift b/framework/Source/Operations/BoxBlur.swift index 3e87ac74..d5dc4478 100755 --- a/framework/Source/Operations/BoxBlur.swift +++ b/framework/Source/Operations/BoxBlur.swift @@ -22,7 +22,7 @@ public class BoxBlur: TwoStageOperation { } } -func vertexShaderForOptimizedBoxBlurOfRadius(radius:UInt) -> String { +func vertexShaderForOptimizedBoxBlurOfRadius(_ radius:UInt) -> String { guard (radius > 0) else { return OneInputVertexShader } let numberOfOptimizedOffsets = min(radius / 2 + (radius % 2), 7) @@ -38,7 +38,7 @@ func vertexShaderForOptimizedBoxBlurOfRadius(radius:UInt) -> String { return shaderString } -func fragmentShaderForOptimizedBoxBlurOfRadius(radius:UInt) -> String { +func fragmentShaderForOptimizedBoxBlurOfRadius(_ radius:UInt) -> String { guard (radius > 0) else { return PassthroughFragmentShader } let numberOfOptimizedOffsets = min(radius / 2 + (radius % 2), 7) diff --git a/framework/Source/Operations/CircleGenerator.swift b/framework/Source/Operations/CircleGenerator.swift index bf428dc1..5bbe8991 100644 --- a/framework/Source/Operations/CircleGenerator.swift +++ b/framework/Source/Operations/CircleGenerator.swift @@ -21,18 +21,18 @@ public class CircleGenerator: ImageGenerator { super.init(size:size) } - public func renderCircleOfRadius(radius:Float, center:Position, circleColor:Color = Color.White, backgroundColor:Color = Color.Black) { + public func renderCircleOfRadius(_ radius:Float, center:Position, circleColor:Color = Color.White, backgroundColor:Color = Color.Black) { let scaledRadius = radius * 2.0 imageFramebuffer.activateFramebufferForRendering() var uniformSettings = ShaderUniformSettings() uniformSettings["circleColor"] = circleColor uniformSettings["backgroundColor"] = backgroundColor uniformSettings["radius"] = scaledRadius - uniformSettings["aspectRatio"] = imageFramebuffer.aspectRatioForRotation(.NoRotation) + uniformSettings["aspectRatio"] = imageFramebuffer.aspectRatioForRotation(.noRotation) let convertedCenterX = (Float(center.x) * 2.0) - 1.0 let convertedCenterY = (Float(center.y) * 2.0) - 1.0 - let scaledYRadius = scaledRadius / imageFramebuffer.aspectRatioForRotation(.NoRotation) + let scaledYRadius = scaledRadius / imageFramebuffer.aspectRatioForRotation(.noRotation) uniformSettings["center"] = Position(convertedCenterX, convertedCenterY) let circleVertices:[GLfloat] = [GLfloat(convertedCenterX - scaledRadius), GLfloat(convertedCenterY - scaledYRadius), GLfloat(convertedCenterX + scaledRadius), GLfloat(convertedCenterY - scaledYRadius), GLfloat(convertedCenterX - scaledRadius), GLfloat(convertedCenterY + scaledYRadius), GLfloat(convertedCenterX + scaledRadius), GLfloat(convertedCenterY + scaledYRadius)] @@ -48,4 +48,4 @@ public class CircleGenerator: ImageGenerator { notifyTargets() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/Crop.swift b/framework/Source/Operations/Crop.swift index 2ee47f82..2bb6e74c 100644 --- a/framework/Source/Operations/Crop.swift +++ b/framework/Source/Operations/Crop.swift @@ -11,7 +11,7 @@ public class Crop: BasicOperation { override func renderFrame() { let inputFramebuffer:Framebuffer = inputFramebuffers[0]! - let inputSize = inputFramebuffer.sizeForTargetOrientation(.Portrait) + let inputSize = inputFramebuffer.sizeForTargetOrientation(.portrait) let finalCropSize:GLSize let normalizedOffsetFromOrigin:Position @@ -29,13 +29,13 @@ public class Crop: BasicOperation { } let normalizedCropSize = Size(width:Float(finalCropSize.width) / Float(inputSize.width), height:Float(finalCropSize.height) / Float(inputSize.height)) - renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.Portrait, size:finalCropSize, stencil:false) + renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:finalCropSize, stencil:false) - let textureProperties = InputTextureProperties(textureCoordinates:inputFramebuffer.orientation.rotationNeededForOrientation(.Portrait).croppedTextureCoordinates(offsetFromOrigin:normalizedOffsetFromOrigin, cropSize:normalizedCropSize), texture:inputFramebuffer.texture) + let textureProperties = InputTextureProperties(textureCoordinates:inputFramebuffer.orientation.rotationNeededForOrientation(.portrait).croppedTextureCoordinates(offsetFromOrigin:normalizedOffsetFromOrigin, cropSize:normalizedCropSize), texture:inputFramebuffer.texture) renderFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(backgroundColor) renderQuadWithShader(shader, uniformSettings:uniformSettings, vertices:standardImageVertices, inputTextures:[textureProperties]) releaseIncomingFramebuffers() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/CrosshairGenerator.swift b/framework/Source/Operations/CrosshairGenerator.swift index 0d2ea8fb..3b24bfb5 100644 --- a/framework/Source/Operations/CrosshairGenerator.swift +++ b/framework/Source/Operations/CrosshairGenerator.swift @@ -29,9 +29,9 @@ public class CrosshairGenerator: ImageGenerator { } - public func renderCrosshairs(positions:[Position]) { + public func renderCrosshairs(_ positions:[Position]) { imageFramebuffer.activateFramebufferForRendering() - imageFramebuffer.timingStyle = .StillImage + imageFramebuffer.timingStyle = .stillImage #if GL glEnable(GLenum(GL_POINT_SPRITE)) glEnable(GLenum(GL_VERTEX_PROGRAM_POINT_SIZE)) @@ -53,4 +53,4 @@ public class CrosshairGenerator: ImageGenerator { notifyTargets() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/GaussianBlur.swift b/framework/Source/Operations/GaussianBlur.swift index 379e3df4..8d4061f6 100755 --- a/framework/Source/Operations/GaussianBlur.swift +++ b/framework/Source/Operations/GaussianBlur.swift @@ -27,7 +27,7 @@ public class GaussianBlur: TwoStageOperation { // MARK: - // MARK: Blur sizing calculations -func sigmaAndDownsamplingForBlurRadius(radius:Float, limit:Float, override:Bool = false) -> (sigma:Float, downsamplingFactor:Float?) { +func sigmaAndDownsamplingForBlurRadius(_ radius:Float, limit:Float, override:Bool = false) -> (sigma:Float, downsamplingFactor:Float?) { // For now, only do integral sigmas let startingRadius = Float(round(Double(radius))) guard ((startingRadius > limit) && (!override)) else { return (sigma:startingRadius, downsamplingFactor:nil) } @@ -37,7 +37,7 @@ func sigmaAndDownsamplingForBlurRadius(radius:Float, limit:Float, override:Bool // inputRadius for Core Image's CIGaussianBlur is really sigma in the Gaussian equation, so I'm using that for my blur radius, to be consistent -func pixelRadiusForBlurSigma(sigma:Double) -> UInt { +func pixelRadiusForBlurSigma(_ sigma:Double) -> UInt { // 7.0 is the limit for blur size for hardcoded varying offsets let minimumWeightToFindEdgeOfSamplingArea = 1.0 / 256.0 @@ -54,7 +54,7 @@ func pixelRadiusForBlurSigma(sigma:Double) -> UInt { // MARK: - // MARK: Standard Gaussian blur shaders -func standardGaussianWeightsForRadius(blurRadius:UInt, sigma:Double) -> [Double] { +func standardGaussianWeightsForRadius(_ blurRadius:UInt, sigma:Double) -> [Double] { var gaussianWeights = [Double]() var sumOfWeights = 0.0 for gaussianWeightIndex in 0...blurRadius { @@ -70,7 +70,7 @@ func standardGaussianWeightsForRadius(blurRadius:UInt, sigma:Double) -> [Double] return gaussianWeights.map{$0 / sumOfWeights} } -func vertexShaderForStandardGaussianBlurOfRadius(radius:UInt, sigma:Double) -> String { +func vertexShaderForStandardGaussianBlurOfRadius(_ radius:UInt, sigma:Double) -> String { guard (radius > 0) else { return OneInputVertexShader } let numberOfBlurCoordinates = radius * 2 + 1 @@ -90,7 +90,7 @@ func vertexShaderForStandardGaussianBlurOfRadius(radius:UInt, sigma:Double) -> S return shaderString } -func fragmentShaderForStandardGaussianBlurOfRadius(radius:UInt, sigma:Double) -> String { +func fragmentShaderForStandardGaussianBlurOfRadius(_ radius:UInt, sigma:Double) -> String { guard (radius > 0) else { return PassthroughFragmentShader } let gaussianWeights = standardGaussianWeightsForRadius(radius, sigma:sigma) @@ -117,7 +117,7 @@ func fragmentShaderForStandardGaussianBlurOfRadius(radius:UInt, sigma:Double) -> // MARK: - // MARK: Optimized Gaussian blur shaders -func optimizedGaussianOffsetsForRadius(blurRadius:UInt, sigma:Double) -> [Double] { +func optimizedGaussianOffsetsForRadius(_ blurRadius:UInt, sigma:Double) -> [Double] { let standardWeights = standardGaussianWeightsForRadius(blurRadius, sigma:sigma) let numberOfOptimizedOffsets = min(blurRadius / 2 + (blurRadius % 2), 7) @@ -133,7 +133,7 @@ func optimizedGaussianOffsetsForRadius(blurRadius:UInt, sigma:Double) -> [Double return optimizedOffsets } -func vertexShaderForOptimizedGaussianBlurOfRadius(radius:UInt, sigma:Double) -> String { +func vertexShaderForOptimizedGaussianBlurOfRadius(_ radius:UInt, sigma:Double) -> String { guard (radius > 0) else { return OneInputVertexShader } let optimizedOffsets = optimizedGaussianOffsetsForRadius(radius, sigma:sigma) @@ -151,7 +151,7 @@ func vertexShaderForOptimizedGaussianBlurOfRadius(radius:UInt, sigma:Double) -> return shaderString } -func fragmentShaderForOptimizedGaussianBlurOfRadius(radius:UInt, sigma:Double) -> String { +func fragmentShaderForOptimizedGaussianBlurOfRadius(_ radius:UInt, sigma:Double) -> String { guard (radius > 0) else { return PassthroughFragmentShader } let standardWeights = standardGaussianWeightsForRadius(radius, sigma:sigma) diff --git a/framework/Source/Operations/HarrisCornerDetector.swift b/framework/Source/Operations/HarrisCornerDetector.swift index 9cd254f4..e64fa0ed 100644 --- a/framework/Source/Operations/HarrisCornerDetector.swift +++ b/framework/Source/Operations/HarrisCornerDetector.swift @@ -28,7 +28,7 @@ public class HarrisCornerDetector: OperationGroup { public var blurRadiusInPixels:Float = 2.0 { didSet { gaussianBlur.blurRadiusInPixels = blurRadiusInPixels } } public var sensitivity:Float = 5.0 { didSet { harrisCornerDetector.uniformSettings["sensitivity"] = sensitivity } } public var threshold:Float = 0.2 { didSet { nonMaximumSuppression.uniformSettings["threshold"] = threshold } } - public var cornersDetectedCallback:([Position] -> ())? + public var cornersDetectedCallback:(([Position]) -> ())? let xyDerivative = TextureSamplingOperation(fragmentShader:XYDerivativeFragmentShader) let gaussianBlur = GaussianBlur() @@ -56,13 +56,13 @@ public class HarrisCornerDetector: OperationGroup { } } -func extractCornersFromImage(framebuffer:Framebuffer) -> [Position] { +func extractCornersFromImage(_ framebuffer:Framebuffer) -> [Position] { let imageByteSize = Int(framebuffer.size.width * framebuffer.size.height * 4) // var rawImagePixels = [UInt8](count:imageByteSize, repeatedValue:0) // let startTime = CFAbsoluteTimeGetCurrent() - let rawImagePixels = UnsafeMutablePointer.alloc(imageByteSize) + let rawImagePixels = UnsafeMutablePointer(allocatingCapacity: imageByteSize) // -Onone, [UInt8] array: 30 ms for 720p frame on Retina iMac // -O, [UInt8] array: 4 ms for 720p frame on Retina iMac // -Onone, UnsafeMutablePointer: 7 ms for 720p frame on Retina iMac @@ -88,9 +88,9 @@ func extractCornersFromImage(framebuffer:Framebuffer) -> [Position] { currentByte += 4 } - rawImagePixels.dealloc(imageByteSize) + rawImagePixels.deallocateCapacity(imageByteSize) // print("Harris extraction frame time: \(CFAbsoluteTimeGetCurrent() - startTime)") return corners -} \ No newline at end of file +} diff --git a/framework/Source/Operations/Histogram.swift b/framework/Source/Operations/Histogram.swift index ea35cf90..578b47fd 100755 --- a/framework/Source/Operations/Histogram.swift +++ b/framework/Source/Operations/Histogram.swift @@ -24,11 +24,11 @@ */ public enum HistogramType { - case Red - case Blue - case Green - case Luminance - case RGB + case red + case blue + case green + case luminance + case rgb } public class Histogram: BasicOperation { @@ -39,11 +39,11 @@ public class Histogram: BasicOperation { public init(type:HistogramType) { switch type { - case .Red: super.init(vertexShader:HistogramRedSamplingVertexShader, fragmentShader:HistogramAccumulationFragmentShader, numberOfInputs:1) - case .Blue: super.init(vertexShader:HistogramBlueSamplingVertexShader, fragmentShader:HistogramAccumulationFragmentShader, numberOfInputs:1) - case .Green: super.init(vertexShader:HistogramGreenSamplingVertexShader, fragmentShader:HistogramAccumulationFragmentShader, numberOfInputs:1) - case .Luminance: super.init(vertexShader:HistogramLuminanceSamplingVertexShader, fragmentShader:HistogramAccumulationFragmentShader, numberOfInputs:1) - case .RGB: + case .red: super.init(vertexShader:HistogramRedSamplingVertexShader, fragmentShader:HistogramAccumulationFragmentShader, numberOfInputs:1) + case .blue: super.init(vertexShader:HistogramBlueSamplingVertexShader, fragmentShader:HistogramAccumulationFragmentShader, numberOfInputs:1) + case .green: super.init(vertexShader:HistogramGreenSamplingVertexShader, fragmentShader:HistogramAccumulationFragmentShader, numberOfInputs:1) + case .luminance: super.init(vertexShader:HistogramLuminanceSamplingVertexShader, fragmentShader:HistogramAccumulationFragmentShader, numberOfInputs:1) + case .rgb: super.init(vertexShader:HistogramRedSamplingVertexShader, fragmentShader:HistogramAccumulationFragmentShader, numberOfInputs:1) shader2 = crashOnShaderCompileFailure("Histogram"){try sharedImageProcessingContext.programForVertexShader(HistogramGreenSamplingVertexShader, fragmentShader:HistogramAccumulationFragmentShader)} shader3 = crashOnShaderCompileFailure("Histogram"){try sharedImageProcessingContext.programForVertexShader(HistogramBlueSamplingVertexShader, fragmentShader:HistogramAccumulationFragmentShader)} @@ -53,10 +53,10 @@ public class Histogram: BasicOperation { override func renderFrame() { let inputSize = sizeOfInitialStageBasedOnFramebuffer(inputFramebuffers[0]!) let inputByteSize = Int(inputSize.width * inputSize.height * 4) - let data = UnsafeMutablePointer.alloc(inputByteSize) + let data = UnsafeMutablePointer(allocatingCapacity:inputByteSize) glReadPixels(0, 0, inputSize.width, inputSize.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), data) - renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.Portrait, size:GLSize(width:256, height:3), stencil:mask != nil) + renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:256, height:3), stencil:mask != nil) releaseIncomingFramebuffers() renderFramebuffer.activateFramebufferForRendering() @@ -86,6 +86,6 @@ public class Histogram: BasicOperation { } glDisable(GLenum(GL_BLEND)) - data.dealloc(inputByteSize) + data.deallocateCapacity(inputByteSize) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/HistogramEqualization.swift b/framework/Source/Operations/HistogramEqualization.swift index c861d688..49dc45a3 100644 --- a/framework/Source/Operations/HistogramEqualization.swift +++ b/framework/Source/Operations/HistogramEqualization.swift @@ -10,11 +10,11 @@ public class HistogramEqualization: OperationGroup { self.histogram = Histogram(type:type) switch type { - case .Red: self.equalizationFilter = BasicOperation(fragmentShader:HistogramEqualizationRedFragmentShader, numberOfInputs:2) - case .Blue: self.equalizationFilter = BasicOperation(fragmentShader:HistogramEqualizationBlueFragmentShader, numberOfInputs:2) - case .Green: self.equalizationFilter = BasicOperation(fragmentShader:HistogramEqualizationGreenFragmentShader, numberOfInputs:2) - case .Luminance: self.equalizationFilter = BasicOperation(fragmentShader:HistogramEqualizationLuminanceFragmentShader, numberOfInputs:2) - case .RGB: self.equalizationFilter = BasicOperation(fragmentShader:HistogramEqualizationRGBFragmentShader, numberOfInputs:2) + case .red: self.equalizationFilter = BasicOperation(fragmentShader:HistogramEqualizationRedFragmentShader, numberOfInputs:2) + case .blue: self.equalizationFilter = BasicOperation(fragmentShader:HistogramEqualizationBlueFragmentShader, numberOfInputs:2) + case .green: self.equalizationFilter = BasicOperation(fragmentShader:HistogramEqualizationGreenFragmentShader, numberOfInputs:2) + case .luminance: self.equalizationFilter = BasicOperation(fragmentShader:HistogramEqualizationLuminanceFragmentShader, numberOfInputs:2) + case .rgb: self.equalizationFilter = BasicOperation(fragmentShader:HistogramEqualizationRGBFragmentShader, numberOfInputs:2) } super.init() @@ -23,9 +23,9 @@ public class HistogramEqualization: OperationGroup { self.configureGroup{input, output in self.rawDataOutput.dataAvailableCallback = {data in - var redHistogramBin = [Int](count:256, repeatedValue:0) - var greenHistogramBin = [Int](count:256, repeatedValue:0) - var blueHistogramBin = [Int](count:256, repeatedValue:0) + var redHistogramBin = [Int](repeating:0, count:256) + var greenHistogramBin = [Int](repeating:0, count:256) + var blueHistogramBin = [Int](repeating:0, count:256) let rowWidth = 256 * 4 redHistogramBin[0] = Int(data[rowWidth]) @@ -38,7 +38,7 @@ public class HistogramEqualization: OperationGroup { blueHistogramBin[dataIndex] = blueHistogramBin[dataIndex - 1] + Int(data[rowWidth + (dataIndex * 4) + 2]) } - var equalizationLookupTable = [UInt8](count:256 * 4, repeatedValue:0) + var equalizationLookupTable = [UInt8](repeating:0, count:256 * 4) for binIndex in 0..<256 { equalizationLookupTable[binIndex * 4] = UInt8((((redHistogramBin[binIndex] - redHistogramBin[0]) * 255) / redHistogramBin[255])) equalizationLookupTable[(binIndex * 4) + 1] = UInt8((((greenHistogramBin[binIndex] - greenHistogramBin[0]) * 255) / greenHistogramBin[255])) @@ -46,7 +46,7 @@ public class HistogramEqualization: OperationGroup { equalizationLookupTable[(binIndex * 4) + 3] = 255 } - self.rawDataInput.uploadBytes(equalizationLookupTable, size:Size(width:256, height:1), pixelFormat:.RGBA) + self.rawDataInput.uploadBytes(equalizationLookupTable, size:Size(width:256, height:1), pixelFormat:.rgba) } input --> self.histogram --> self.rawDataOutput @@ -54,4 +54,4 @@ public class HistogramEqualization: OperationGroup { self.rawDataInput --> self.equalizationFilter } } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ImageBuffer.swift b/framework/Source/Operations/ImageBuffer.swift index 3747bb09..2325e583 100644 --- a/framework/Source/Operations/ImageBuffer.swift +++ b/framework/Source/Operations/ImageBuffer.swift @@ -8,7 +8,7 @@ public class ImageBuffer: ImageProcessingOperation { public let sources = SourceContainer() var bufferedFramebuffers = [Framebuffer]() - public func newFramebufferAvailable(framebuffer:Framebuffer, fromSourceIndex:UInt) { + public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { bufferedFramebuffers.append(framebuffer) if (bufferedFramebuffers.count > Int(bufferSize)) { let releasedFramebuffer = bufferedFramebuffers.removeFirst() @@ -23,7 +23,7 @@ public class ImageBuffer: ImageProcessingOperation { } } - public func transmitPreviousImageToTarget(target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) { // Buffers most likely won't need this } } diff --git a/framework/Source/Operations/LanczosResampling.swift b/framework/Source/Operations/LanczosResampling.swift index 2df99aba..7184cd69 100644 --- a/framework/Source/Operations/LanczosResampling.swift +++ b/framework/Source/Operations/LanczosResampling.swift @@ -3,12 +3,12 @@ public class LanczosResampling: BasicOperation { super.init(vertexShader:LanczosResamplingVertexShader, fragmentShader:LanczosResamplingFragmentShader) } - override func internalRenderFunction(inputFramebuffer:Framebuffer, textureProperties:[InputTextureProperties]) { - let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.Portrait) + override func internalRenderFunction(_ inputFramebuffer:Framebuffer, textureProperties:[InputTextureProperties]) { + let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.portrait) // Shrink the vertical component of the first stage - let inputSize = inputFramebuffer.sizeForTargetOrientation(.Portrait) - let firstStageFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.Portrait, size:GLSize(width:inputSize.width, height:renderFramebuffer.size.height), stencil:false) + let inputSize = inputFramebuffer.sizeForTargetOrientation(.portrait) + let firstStageFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:inputSize.width, height:renderFramebuffer.size.height), stencil:false) firstStageFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(backgroundColor) @@ -20,12 +20,12 @@ public class LanczosResampling: BasicOperation { releaseIncomingFramebuffers() // Shrink the width component of the result - let secondStageTexelSize = firstStageFramebuffer.texelSizeForRotation(.NoRotation) + let secondStageTexelSize = firstStageFramebuffer.texelSizeForRotation(.noRotation) uniformSettings["texelWidth"] = secondStageTexelSize.width uniformSettings["texelHeight"] = 0.0 renderFramebuffer.activateFramebufferForRendering() - renderQuadWithShader(shader, uniformSettings:uniformSettings, vertices:standardImageVertices, inputTextures:[firstStageFramebuffer.texturePropertiesForOutputRotation(.NoRotation)]) + renderQuadWithShader(shader, uniformSettings:uniformSettings, vertices:standardImageVertices, inputTextures:[firstStageFramebuffer.texturePropertiesForOutputRotation(.noRotation)]) firstStageFramebuffer.unlock() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/LineGenerator.swift b/framework/Source/Operations/LineGenerator.swift index 2e8ab44c..a00c0ba7 100644 --- a/framework/Source/Operations/LineGenerator.swift +++ b/framework/Source/Operations/LineGenerator.swift @@ -13,18 +13,18 @@ #endif public enum Line { - case Infinite(slope:Float, intercept:Float) - case Segment(p1:Position, p2:Position) + case infinite(slope:Float, intercept:Float) + case segment(p1:Position, p2:Position) func toGLEndpoints() -> [GLfloat] { switch self { - case .Infinite(let slope, let intercept): + case .infinite(let slope, let intercept): if (slope > 9000.0) {// Vertical line return [intercept, -1.0, intercept, 1.0] } else { return [-1.0, GLfloat(slope * -1.0 + intercept), 1.0, GLfloat(slope * 1.0 + intercept)] } - case .Segment(let p1, let p2): + case .segment(let p1, let p2): return [p1.x, p1.y, p2.x, p2.y].map {GLfloat($0)} } } @@ -50,7 +50,7 @@ public class LineGenerator: ImageGenerator { ({lineColor = Color.Red})() } - public func renderLines(lines:[Line]) { + public func renderLines(_ lines:[Line]) { imageFramebuffer.activateFramebufferForRendering() lineShader.use() @@ -74,4 +74,4 @@ public class LineGenerator: ImageGenerator { notifyTargets() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/MotionBlur.swift b/framework/Source/Operations/MotionBlur.swift index f0ece7ca..87917a5b 100755 --- a/framework/Source/Operations/MotionBlur.swift +++ b/framework/Source/Operations/MotionBlur.swift @@ -12,8 +12,8 @@ public class MotionBlur: BasicOperation { super.init(vertexShader:MotionBlurVertexShader, fragmentShader:MotionBlurFragmentShader, numberOfInputs:1) } - override func configureFramebufferSpecificUniforms(inputFramebuffer:Framebuffer) { - let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.Portrait) + override func configureFramebufferSpecificUniforms(_ inputFramebuffer:Framebuffer) { + let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.portrait) let texelSize = inputFramebuffer.texelSizeForRotation(outputRotation) let aspectRatio = inputFramebuffer.aspectRatioForRotation(outputRotation) diff --git a/framework/Source/Operations/Sharpen.swift b/framework/Source/Operations/Sharpen.swift index e9fd2b8c..166780c8 100644 --- a/framework/Source/Operations/Sharpen.swift +++ b/framework/Source/Operations/Sharpen.swift @@ -8,10 +8,10 @@ public class Sharpen: BasicOperation { ({sharpness = 0.0})() } - override func configureFramebufferSpecificUniforms(inputFramebuffer:Framebuffer) { - let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.Portrait) + override func configureFramebufferSpecificUniforms(_ inputFramebuffer:Framebuffer) { + let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.portrait) let texelSize = overriddenTexelSize ?? inputFramebuffer.texelSizeForRotation(outputRotation) uniformSettings["texelWidth"] = texelSize.width uniformSettings["texelHeight"] = texelSize.height } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/SingleComponentGaussianBlur.swift b/framework/Source/Operations/SingleComponentGaussianBlur.swift index bc576360..3f1bc0af 100644 --- a/framework/Source/Operations/SingleComponentGaussianBlur.swift +++ b/framework/Source/Operations/SingleComponentGaussianBlur.swift @@ -17,7 +17,7 @@ public class SingleComponentGaussianBlur: TwoStageOperation { } -func fragmentShaderForOptimizedSingleComponentGaussianBlurOfRadius(radius:UInt, sigma:Double) -> String { +func fragmentShaderForOptimizedSingleComponentGaussianBlurOfRadius(_ radius:UInt, sigma:Double) -> String { guard (radius > 0) else { return PassthroughFragmentShader } let standardWeights = standardGaussianWeightsForRadius(radius, sigma:sigma) diff --git a/framework/Source/Operations/SolidColorGenerator.swift b/framework/Source/Operations/SolidColorGenerator.swift index 1258e2c1..67facd28 100644 --- a/framework/Source/Operations/SolidColorGenerator.swift +++ b/framework/Source/Operations/SolidColorGenerator.swift @@ -1,10 +1,10 @@ public class SolidColorGenerator: ImageGenerator { - public func renderColor(color:Color) { + public func renderColor(_ color:Color) { imageFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(color) notifyTargets() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/TransformOperation.swift b/framework/Source/Operations/TransformOperation.swift index 263cd62b..01b90360 100644 --- a/framework/Source/Operations/TransformOperation.swift +++ b/framework/Source/Operations/TransformOperation.swift @@ -22,13 +22,13 @@ public class TransformOperation: BasicOperation { ({transform = Matrix4x4.Identity})() } - override func internalRenderFunction(inputFramebuffer:Framebuffer, textureProperties:[InputTextureProperties]) { + override func internalRenderFunction(_ inputFramebuffer:Framebuffer, textureProperties:[InputTextureProperties]) { renderQuadWithShader(shader, uniformSettings:uniformSettings, vertices:normalizedImageVertices, inputTextures:textureProperties) releaseIncomingFramebuffers() } - override func configureFramebufferSpecificUniforms(inputFramebuffer:Framebuffer) { - let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.Portrait) + override func configureFramebufferSpecificUniforms(_ inputFramebuffer:Framebuffer) { + let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.portrait) let aspectRatio = inputFramebuffer.aspectRatioForRotation(outputRotation) let orthoMatrix = orthographicMatrix(-1.0, right:1.0, bottom:-1.0 * aspectRatio, top:1.0 * aspectRatio, near:-1.0, far:1.0) normalizedImageVertices = normalizedImageVerticesForAspectRatio(aspectRatio) @@ -37,6 +37,6 @@ public class TransformOperation: BasicOperation { } } -func normalizedImageVerticesForAspectRatio(aspectRatio:Float) -> [GLfloat] { +func normalizedImageVerticesForAspectRatio(_ aspectRatio:Float) -> [GLfloat] { return [-1.0, GLfloat(-aspectRatio), 1.0, GLfloat(-aspectRatio), -1.0, GLfloat(aspectRatio), 1.0, GLfloat(aspectRatio)] -} \ No newline at end of file +} diff --git a/framework/Source/Pipeline.swift b/framework/Source/Pipeline.swift index 31397047..9b7e71f4 100755 --- a/framework/Source/Pipeline.swift +++ b/framework/Source/Pipeline.swift @@ -3,14 +3,14 @@ public protocol ImageSource { var targets:TargetContainer { get } - func transmitPreviousImageToTarget(target:ImageConsumer, atIndex:UInt) + func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) } public protocol ImageConsumer:AnyObject { var maximumInputs:UInt { get } var sources:SourceContainer { get } - func newFramebufferAvailable(framebuffer:Framebuffer, fromSourceIndex:UInt) + func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) } public protocol ImageProcessingOperation: ImageConsumer, ImageSource { @@ -18,7 +18,7 @@ public protocol ImageProcessingOperation: ImageConsumer, ImageSource { infix operator --> { associativity left precedence 140 } -public func -->(source:ImageSource, destination:T) -> T { +@discardableResult public func -->(source:ImageSource, destination:T) -> T { source.addTarget(destination) return destination } @@ -27,7 +27,7 @@ public func -->(source:ImageSource, destination:T) -> T { // MARK: Extensions and supporting types public extension ImageSource { - public func addTarget(target:ImageConsumer, atTargetIndex:UInt? = nil) { + public func addTarget(_ target:ImageConsumer, atTargetIndex:UInt? = nil) { if let targetIndex = atTargetIndex { target.setSource(self, atIndex:targetIndex) targets.append(target, indexAtTarget:targetIndex) @@ -47,7 +47,7 @@ public extension ImageSource { targets.removeAll() } - public func updateTargetsWithFramebuffer(framebuffer:Framebuffer) { + public func updateTargetsWithFramebuffer(_ framebuffer:Framebuffer) { if targets.count == 0 { // Deal with the case where no targets are attached by immediately returning framebuffer to cache framebuffer.lock() framebuffer.unlock() @@ -64,15 +64,15 @@ public extension ImageSource { } public extension ImageConsumer { - public func addSource(source:ImageSource) -> UInt? { + public func addSource(_ source:ImageSource) -> UInt? { return sources.append(source, maximumInputs:maximumInputs) } - public func setSource(source:ImageSource, atIndex:UInt) { - sources.insert(source, atIndex:atIndex, maximumInputs:maximumInputs) + public func setSource(_ source:ImageSource, atIndex:UInt) { + _ = sources.insert(source, atIndex:atIndex, maximumInputs:maximumInputs) } - public func removeSourceAtIndex(index:UInt) { + public func removeSourceAtIndex(_ index:UInt) { sources.removeAtIndex(index) } } @@ -86,28 +86,28 @@ class WeakImageConsumer { } } -public class TargetContainer:SequenceType { +public class TargetContainer:Sequence { var targets = [WeakImageConsumer]() var count:Int { get {return targets.count}} public init() { } - public func append(target:ImageConsumer, indexAtTarget:UInt) { + public func append(_ target:ImageConsumer, indexAtTarget:UInt) { // TODO: Don't allow the addition of a target more than once targets.append(WeakImageConsumer(value:target, indexAtTarget:indexAtTarget)) } - public func generate() -> AnyGenerator<(ImageConsumer, UInt)> { + public func makeIterator() -> AnyIterator<(ImageConsumer, UInt)> { var index = 0 - return AnyGenerator { () -> (ImageConsumer, UInt)? in + return AnyIterator { () -> (ImageConsumer, UInt)? in if (index >= self.targets.count) { return nil } while (self.targets[index].value == nil) { - self.targets.removeAtIndex(index) + self.targets.remove(at:index) if (index >= self.targets.count) { return nil } @@ -129,7 +129,7 @@ public class SourceContainer { public init() { } - public func append(source:ImageSource, maximumInputs:UInt) -> UInt? { + public func append(_ source:ImageSource, maximumInputs:UInt) -> UInt? { var currentIndex:UInt = 0 while currentIndex < maximumInputs { if (sources[currentIndex] == nil) { @@ -142,13 +142,13 @@ public class SourceContainer { return nil } - public func insert(source:ImageSource, atIndex:UInt, maximumInputs:UInt) -> UInt { + public func insert(_ source:ImageSource, atIndex:UInt, maximumInputs:UInt) -> UInt { guard (atIndex < maximumInputs) else { fatalError("ERROR: Attempted to set a source beyond the maximum number of inputs on this operation") } sources[atIndex] = source return atIndex } - public func removeAtIndex(index:UInt) { + public func removeAtIndex(_ index:UInt) { sources[index] = nil } } @@ -164,11 +164,11 @@ public class ImageRelay: ImageProcessingOperation { init() { } - public func transmitPreviousImageToTarget(target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) { sources.sources[0]?.transmitPreviousImageToTarget(self, atIndex:0) } - public func newFramebufferAvailable(framebuffer:Framebuffer, fromSourceIndex:UInt) { + public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { if let newImageCallback = newImageCallback { newImageCallback(framebuffer) } @@ -177,7 +177,7 @@ public class ImageRelay: ImageProcessingOperation { } } - public func relayFramebufferOnward(framebuffer:Framebuffer) { + public func relayFramebufferOnward(_ framebuffer:Framebuffer) { // Need to override to guarantee a removal of the previously applied lock for _ in targets { framebuffer.lock() diff --git a/framework/Source/RawDataInput.swift b/framework/Source/RawDataInput.swift index 4a216edb..5be5e891 100644 --- a/framework/Source/RawDataInput.swift +++ b/framework/Source/RawDataInput.swift @@ -13,17 +13,17 @@ #endif public enum PixelFormat { - case BGRA - case RGBA - case RGB - case Luminance + case bgra + case rgba + case rgb + case luminance func toGL() -> Int32 { switch self { - case .BGRA: return GL_BGRA - case .RGBA: return GL_RGBA - case .RGB: return GL_RGB - case .Luminance: return GL_LUMINANCE + case .bgra: return GL_BGRA + case .rgba: return GL_RGBA + case .rgb: return GL_RGB + case .luminance: return GL_LUMINANCE } } } @@ -36,7 +36,7 @@ public class RawDataInput: ImageSource { } - public func uploadBytes(bytes:[UInt8], size:Size, pixelFormat:PixelFormat, orientation:ImageOrientation = .Portrait) { + public func uploadBytes(_ bytes:[UInt8], size:Size, pixelFormat:PixelFormat, orientation:ImageOrientation = .portrait) { let dataFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:orientation, size:GLSize(size), textureOnly:true, internalFormat:pixelFormat.toGL(), format:pixelFormat.toGL()) glActiveTexture(GLenum(GL_TEXTURE1)) @@ -46,7 +46,7 @@ public class RawDataInput: ImageSource { updateTargetsWithFramebuffer(dataFramebuffer) } - public func transmitPreviousImageToTarget(target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) { // TODO: Determine if this is necessary for the raw data uploads } -} \ No newline at end of file +} diff --git a/framework/Source/RawDataOutput.swift b/framework/Source/RawDataOutput.swift index 6985bc7c..a457e0ca 100644 --- a/framework/Source/RawDataOutput.swift +++ b/framework/Source/RawDataOutput.swift @@ -13,7 +13,7 @@ #endif public class RawDataOutput: ImageConsumer { - public var dataAvailableCallback:([UInt8] -> ())? + public var dataAvailableCallback:(([UInt8]) -> ())? public let sources = SourceContainer() public let maximumInputs:UInt = 1 @@ -22,19 +22,19 @@ public class RawDataOutput: ImageConsumer { } // TODO: Replace with texture caches - public func newFramebufferAvailable(framebuffer:Framebuffer, fromSourceIndex:UInt) { + public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { let renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:framebuffer.orientation, size:framebuffer.size) renderFramebuffer.lock() renderFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(Color.Black) - renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings:ShaderUniformSettings(), vertices:standardImageVertices, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.NoRotation)]) + renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings:ShaderUniformSettings(), vertices:standardImageVertices, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.noRotation)]) framebuffer.unlock() - var data = [UInt8](count:Int(framebuffer.size.width * framebuffer.size.height * 4), repeatedValue:0) + var data = [UInt8](repeating:0, count:Int(framebuffer.size.width * framebuffer.size.height * 4)) glReadPixels(0, 0, framebuffer.size.width, framebuffer.size.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), &data) renderFramebuffer.unlock() dataAvailableCallback?(data) } -} \ No newline at end of file +} diff --git a/framework/Source/SerialDispatch.swift b/framework/Source/SerialDispatch.swift index cef600ec..6c293363 100755 --- a/framework/Source/SerialDispatch.swift +++ b/framework/Source/SerialDispatch.swift @@ -18,23 +18,23 @@ extension SerialDispatch { #else -func runAsynchronouslyOnMainQueue(mainThreadOperation:() -> ()) { - if (NSThread.isMainThread()) { +func runAsynchronouslyOnMainQueue(_ mainThreadOperation:() -> ()) { + if (Thread.isMainThread()) { mainThreadOperation() } else { - dispatch_async(dispatch_get_main_queue(), mainThreadOperation) + DispatchQueue.main.async(execute: mainThreadOperation) } } -func runOnMainQueue(mainThreadOperation:() -> ()) { - if (NSThread.isMainThread()) { +func runOnMainQueue(_ mainThreadOperation:() -> ()) { + if (Thread.isMainThread()) { mainThreadOperation() } else { - dispatch_sync(dispatch_get_main_queue(), mainThreadOperation) + DispatchQueue.main.sync(execute: mainThreadOperation) } } -@warn_unused_result func runOnMainQueue(mainThreadOperation:() -> T) -> T { +@warn_unused_result func runOnMainQueue(_ mainThreadOperation:() -> T) -> T { var returnedValue: T! runOnMainQueue { returnedValue = mainThreadOperation() @@ -46,34 +46,33 @@ func runOnMainQueue(mainThreadOperation:() -> ()) { // MARK: SerialDispatch extension public protocol SerialDispatch { - var serialDispatchQueue:dispatch_queue_t { get } - var dispatchQueueKey:UnsafePointer { get } + var serialDispatchQueue:DispatchQueue { get } + var dispatchQueueKey:DispatchSpecificKey { get } func makeCurrentContext() } public extension SerialDispatch { - public func runOperationAsynchronously(operation:() -> ()) { - dispatch_async(self.serialDispatchQueue) { + public func runOperationAsynchronously(_ operation:() -> ()) { + self.serialDispatchQueue.async { self.makeCurrentContext() operation() } } - public func runOperationSynchronously(operation:() -> ()) { + public func runOperationSynchronously(_ operation:() -> ()) { // TODO: Verify this works as intended - let context = UnsafeMutablePointer(Unmanaged.passUnretained(self.serialDispatchQueue).toOpaque()) - if (dispatch_get_specific(self.dispatchQueueKey) == context) { + if (DispatchQueue.getSpecific(key:self.dispatchQueueKey) == 81) { operation() } else { - dispatch_sync(self.serialDispatchQueue) { + self.serialDispatchQueue.sync { self.makeCurrentContext() operation() } } } - public func runOperationSynchronously(operation:() throws -> ()) throws { - var caughtError:ErrorType? = nil + public func runOperationSynchronously(_ operation:() throws -> ()) throws { + var caughtError:ErrorProtocol? = nil runOperationSynchronously { do { try operation() @@ -84,7 +83,7 @@ public extension SerialDispatch { if (caughtError != nil) {throw caughtError!} } - public func runOperationSynchronously(operation:() throws -> T) throws -> T { + public func runOperationSynchronously(_ operation:() throws -> T) throws -> T { var returnedValue: T! try runOperationSynchronously { returnedValue = try operation() @@ -92,7 +91,7 @@ public extension SerialDispatch { return returnedValue } - public func runOperationSynchronously(operation:() -> T) -> T { + public func runOperationSynchronously(_ operation:() -> T) -> T { var returnedValue: T! runOperationSynchronously { returnedValue = operation() @@ -100,4 +99,4 @@ public extension SerialDispatch { return returnedValue } } -#endif \ No newline at end of file +#endif diff --git a/framework/Source/ShaderProgram.swift b/framework/Source/ShaderProgram.swift index 2cefc5c5..67817642 100755 --- a/framework/Source/ShaderProgram.swift +++ b/framework/Source/ShaderProgram.swift @@ -15,13 +15,13 @@ import Foundation -struct ShaderCompileError:ErrorType { +struct ShaderCompileError:ErrorProtocol { let compileLog:String } enum ShaderType { - case VertexShader - case FragmentShader + case vertexShader + case fragmentShader } public class ShaderProgram { @@ -42,8 +42,8 @@ public class ShaderProgram { public init(vertexShader:String, fragmentShader:String) throws { program = glCreateProgram() - self.vertexShader = try compileShader(vertexShader, type:.VertexShader) - self.fragmentShader = try compileShader(fragmentShader, type:.FragmentShader) + self.vertexShader = try compileShader(vertexShader, type:.vertexShader) + self.fragmentShader = try compileShader(fragmentShader, type:.fragmentShader) glAttachShader(program, self.vertexShader) glAttachShader(program, self.fragmentShader) @@ -51,11 +51,11 @@ public class ShaderProgram { try link() } - public convenience init(vertexShader:String, fragmentShaderFile:NSURL) throws { + public convenience init(vertexShader:String, fragmentShaderFile:URL) throws { try self.init(vertexShader:vertexShader, fragmentShader:try shaderFromFile(fragmentShaderFile)) } - public convenience init(vertexShaderFile:NSURL, fragmentShaderFile:NSURL) throws { + public convenience init(vertexShaderFile:URL, fragmentShaderFile:URL) throws { try self.init(vertexShader:try shaderFromFile(vertexShaderFile), fragmentShader:try shaderFromFile(fragmentShaderFile)) } @@ -74,7 +74,7 @@ public class ShaderProgram { // MARK: - // MARK: Attributes and uniforms - public func attributeIndex(attribute:String) -> GLuint? { + public func attributeIndex(_ attribute:String) -> GLuint? { if let attributeAddress = attributeAddresses[attribute] { return attributeAddress } else { @@ -93,7 +93,7 @@ public class ShaderProgram { } } - public func uniformIndex(uniform:String) -> GLint? { + public func uniformIndex(_ uniform:String) -> GLint? { if let uniformAddress = uniformAddresses[uniform] { return uniformAddress } else { @@ -114,7 +114,7 @@ public class ShaderProgram { // MARK: - // MARK: Uniform accessors - public func setValue(value:GLfloat, forUniform:String) { + public func setValue(_ value:GLfloat, forUniform:String) { guard let uniformAddress = uniformIndex(forUniform) else { debugPrint("Warning: Tried to set a uniform (\(forUniform)) that was missing or optimized out by the compiler") return @@ -125,7 +125,7 @@ public class ShaderProgram { } } - public func setValue(value:GLint, forUniform:String) { + public func setValue(_ value:GLint, forUniform:String) { guard let uniformAddress = uniformIndex(forUniform) else { debugPrint("Warning: Tried to set a uniform (\(forUniform)) that was missing or optimized out by the compiler") return @@ -136,7 +136,7 @@ public class ShaderProgram { } } - public func setValue(value:Color, forUniform:String) { + public func setValue(_ value:Color, forUniform:String) { if colorUniformsUseFourComponents { self.setValue(value.toGLArrayWithAlpha(), forUniform:forUniform) } else { @@ -144,7 +144,7 @@ public class ShaderProgram { } } - public func setValue(value:[GLfloat], forUniform:String) { + public func setValue(_ value:[GLfloat], forUniform:String) { guard let uniformAddress = uniformIndex(forUniform) else { debugPrint("Warning: Tried to set a uniform (\(forUniform)) that was missing or optimized out by the compiler") return @@ -164,7 +164,7 @@ public class ShaderProgram { } } - public func setMatrix(value:[GLfloat], forUniform:String) { + public func setMatrix(_ value:[GLfloat], forUniform:String) { guard let uniformAddress = uniformIndex(forUniform) else { debugPrint("Warning: Tried to set a uniform (\(forUniform)) that was missing or optimized out by the compiler") return @@ -194,10 +194,10 @@ public class ShaderProgram { var logLength:GLint = 0 glGetProgramiv(program, GLenum(GL_INFO_LOG_LENGTH), &logLength) if (logLength > 0) { - var compileLog = [CChar](count:Int(logLength), repeatedValue:0) + var compileLog = [CChar](repeating:0, count:Int(logLength)) glGetProgramInfoLog(program, logLength, &logLength, &compileLog) - print("Link log: \(String.fromCString(compileLog))") + print("Link log: \(String(cString:compileLog))") } throw ShaderCompileError(compileLog:"Link error") @@ -210,15 +210,15 @@ public class ShaderProgram { } } -func compileShader(shaderString:String, type:ShaderType) throws -> GLuint { +func compileShader(_ shaderString:String, type:ShaderType) throws -> GLuint { let shaderHandle:GLuint switch type { - case .VertexShader: shaderHandle = glCreateShader(GLenum(GL_VERTEX_SHADER)) - case .FragmentShader: shaderHandle = glCreateShader(GLenum(GL_FRAGMENT_SHADER)) + case .vertexShader: shaderHandle = glCreateShader(GLenum(GL_VERTEX_SHADER)) + case .fragmentShader: shaderHandle = glCreateShader(GLenum(GL_FRAGMENT_SHADER)) } shaderString.withGLChar{glString in - var tempString = glString + var tempString:UnsafePointer? = glString glShaderSource(shaderHandle, 1, &tempString, nil) glCompileShader(shaderHandle) } @@ -229,15 +229,15 @@ func compileShader(shaderString:String, type:ShaderType) throws -> GLuint { var logLength:GLint = 0 glGetShaderiv(shaderHandle, GLenum(GL_INFO_LOG_LENGTH), &logLength) if (logLength > 0) { - var compileLog = [CChar](count:Int(logLength), repeatedValue:0) + var compileLog = [CChar](repeating:0, count:Int(logLength)) glGetShaderInfoLog(shaderHandle, logLength, &logLength, &compileLog) - print("Compile log: \(String.fromCString(compileLog))") + print("Compile log: \(String(cString:compileLog))") // let compileLogString = String(bytes:compileLog.map{UInt8($0)}, encoding:NSASCIIStringEncoding) switch type { - case .VertexShader: throw ShaderCompileError(compileLog:"Vertex shader compile error:") - case .FragmentShader: throw ShaderCompileError(compileLog:"Fragment shader compile error:") + case .vertexShader: throw ShaderCompileError(compileLog:"Vertex shader compile error:") + case .fragmentShader: throw ShaderCompileError(compileLog:"Fragment shader compile error:") } } } @@ -245,7 +245,7 @@ func compileShader(shaderString:String, type:ShaderType) throws -> GLuint { return shaderHandle } -public func crashOnShaderCompileFailure(shaderName:String, _ operation:() throws -> T) -> T { +public func crashOnShaderCompileFailure(_ shaderName:String, _ operation:() throws -> T) -> T { do { return try operation() } catch { @@ -254,11 +254,11 @@ public func crashOnShaderCompileFailure(shaderName:String, _ operation:() thr } } -public func shaderFromFile(file:NSURL) throws -> String { +public func shaderFromFile(_ file:URL) throws -> String { // Note: this is a hack until Foundation's String initializers are fully functional // let fragmentShaderString = String(contentsOfURL:fragmentShaderFile, encoding:NSASCIIStringEncoding) - guard (NSFileManager.defaultManager().fileExistsAtPath(file.path!)) else { throw ShaderCompileError(compileLog:"Shader file \(file) missing")} - let fragmentShaderString = try NSString(contentsOfFile:file.path!, encoding:NSASCIIStringEncoding) + guard (FileManager.default().fileExists(atPath: file.path!)) else { throw ShaderCompileError(compileLog:"Shader file \(file) missing")} + let fragmentShaderString = try NSString(contentsOfFile:file.path!, encoding:String.Encoding.ascii.rawValue) return String(fragmentShaderString) -} \ No newline at end of file +} diff --git a/framework/Source/ShaderUniformSettings.swift b/framework/Source/ShaderUniformSettings.swift index 0c67fa65..c9a56c7d 100644 --- a/framework/Source/ShaderUniformSettings.swift +++ b/framework/Source/ShaderUniformSettings.swift @@ -50,7 +50,7 @@ public struct ShaderUniformSettings { set(newValue) { uniformValues[index] = newValue } } - func restoreShaderSettings(shader:ShaderProgram) { + func restoreShaderSettings(_ shader:ShaderProgram) { for (uniform, value) in uniformValues { switch value { case let value as Float: shader.setValue(GLfloat(value), forUniform:uniform) diff --git a/framework/Source/TextureInput.swift b/framework/Source/TextureInput.swift index f2ef6fdc..6665d0a1 100644 --- a/framework/Source/TextureInput.swift +++ b/framework/Source/TextureInput.swift @@ -17,7 +17,7 @@ public class TextureInput: ImageSource { let textureFramebuffer:Framebuffer - public init(texture:GLuint, size:Size, orientation:ImageOrientation = .Portrait) { + public init(texture:GLuint, size:Size, orientation:ImageOrientation = .portrait) { do { textureFramebuffer = try Framebuffer(context:sharedImageProcessingContext, orientation:orientation, size:GLSize(size), textureOnly:true, overriddenTexture:texture) } catch { @@ -29,8 +29,8 @@ public class TextureInput: ImageSource { updateTargetsWithFramebuffer(textureFramebuffer) } - public func transmitPreviousImageToTarget(target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) { textureFramebuffer.lock() target.newFramebufferAvailable(textureFramebuffer, fromSourceIndex:atIndex) } -} \ No newline at end of file +} diff --git a/framework/Source/TextureOutput.swift b/framework/Source/TextureOutput.swift index 26a2ea0b..072e07c6 100644 --- a/framework/Source/TextureOutput.swift +++ b/framework/Source/TextureOutput.swift @@ -13,14 +13,14 @@ #endif public class TextureOutput: ImageConsumer { - public var newTextureAvailableCallback:(GLuint -> ())? + public var newTextureAvailableCallback:((GLuint) -> ())? public let sources = SourceContainer() public let maximumInputs:UInt = 1 - public func newFramebufferAvailable(framebuffer:Framebuffer, fromSourceIndex:UInt) { + public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { newTextureAvailableCallback?(framebuffer.texture) // TODO: Maybe extend the lifetime of the texture past this if needed framebuffer.unlock() } -} \ No newline at end of file +} diff --git a/framework/Source/TextureSamplingOperation.swift b/framework/Source/TextureSamplingOperation.swift index daf068f7..8534ec6a 100644 --- a/framework/Source/TextureSamplingOperation.swift +++ b/framework/Source/TextureSamplingOperation.swift @@ -5,10 +5,10 @@ public class TextureSamplingOperation: BasicOperation { super.init(vertexShader:vertexShader, fragmentShader:fragmentShader, numberOfInputs:numberOfInputs) } - override func configureFramebufferSpecificUniforms(inputFramebuffer:Framebuffer) { - let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.Portrait) + override func configureFramebufferSpecificUniforms(_ inputFramebuffer:Framebuffer) { + let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.portrait) let texelSize = overriddenTexelSize ?? inputFramebuffer.texelSizeForRotation(outputRotation) uniformSettings["texelWidth"] = texelSize.width uniformSettings["texelHeight"] = texelSize.height } -} \ No newline at end of file +} diff --git a/framework/Source/Timestamp.swift b/framework/Source/Timestamp.swift index d861483a..899d5b5d 100644 --- a/framework/Source/Timestamp.swift +++ b/framework/Source/Timestamp.swift @@ -1,7 +1,7 @@ import Foundation // This reimplements CMTime such that it can reach across to Linux -public struct TimestampFlags: OptionSetType { +public struct TimestampFlags: OptionSet { public let rawValue:UInt32 public init(rawValue:UInt32) { self.rawValue = rawValue } diff --git a/framework/Source/TwoStageOperation.swift b/framework/Source/TwoStageOperation.swift index 9dc8851c..834317e2 100644 --- a/framework/Source/TwoStageOperation.swift +++ b/framework/Source/TwoStageOperation.swift @@ -5,8 +5,8 @@ public class TwoStageOperation: BasicOperation { var downsamplingFactor:Float? - override func internalRenderFunction(inputFramebuffer:Framebuffer, textureProperties:[InputTextureProperties]) { - let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.Portrait) + override func internalRenderFunction(_ inputFramebuffer:Framebuffer, textureProperties:[InputTextureProperties]) { + let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.portrait) // Downsample let internalStageSize:GLSize @@ -14,14 +14,14 @@ public class TwoStageOperation: BasicOperation { let downsamplingFramebuffer:Framebuffer? if let downsamplingFactor = downsamplingFactor { internalStageSize = GLSize(Size(width:max(5.0, Float(renderFramebuffer.size.width) / downsamplingFactor), height:max(5.0, Float(renderFramebuffer.size.height) / downsamplingFactor))) - downsamplingFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.Portrait, size:internalStageSize, stencil:false) + downsamplingFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:internalStageSize, stencil:false) downsamplingFramebuffer!.lock() downsamplingFramebuffer!.activateFramebufferForRendering() clearFramebufferWithColor(backgroundColor) renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings:nil, vertices:standardImageVertices, inputTextures:textureProperties) releaseIncomingFramebuffers() - firstStageTextureProperties = [downsamplingFramebuffer!.texturePropertiesForOutputRotation(.NoRotation)] + firstStageTextureProperties = [downsamplingFramebuffer!.texturePropertiesForOutputRotation(.noRotation)] } else { firstStageTextureProperties = textureProperties internalStageSize = renderFramebuffer.size @@ -29,7 +29,7 @@ public class TwoStageOperation: BasicOperation { } // Render first stage - let firstStageFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.Portrait, size:internalStageSize, stencil:false) + let firstStageFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:internalStageSize, stencil:false) firstStageFramebuffer.lock() firstStageFramebuffer.activateFramebufferForRendering() @@ -46,26 +46,26 @@ public class TwoStageOperation: BasicOperation { releaseIncomingFramebuffers() } - let secondStageTexelSize = renderFramebuffer.texelSizeForRotation(.NoRotation) + let secondStageTexelSize = renderFramebuffer.texelSizeForRotation(.noRotation) uniformSettings["texelWidth"] = secondStageTexelSize.width * (downsamplingFactor ?? 1.0) uniformSettings["texelHeight"] = 0.0 // Render second stage and upsample if (downsamplingFactor != nil) { - let beforeUpsamplingFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.Portrait, size:internalStageSize, stencil:false) + let beforeUpsamplingFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:internalStageSize, stencil:false) beforeUpsamplingFramebuffer.activateFramebufferForRendering() beforeUpsamplingFramebuffer.lock() clearFramebufferWithColor(backgroundColor) - renderQuadWithShader(shader, uniformSettings:uniformSettings, vertices:standardImageVertices, inputTextures:[firstStageFramebuffer.texturePropertiesForOutputRotation(.NoRotation)]) + renderQuadWithShader(shader, uniformSettings:uniformSettings, vertices:standardImageVertices, inputTextures:[firstStageFramebuffer.texturePropertiesForOutputRotation(.noRotation)]) firstStageFramebuffer.unlock() renderFramebuffer.activateFramebufferForRendering() - renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings:nil, vertices:standardImageVertices, inputTextures:[beforeUpsamplingFramebuffer.texturePropertiesForOutputRotation(.NoRotation)]) + renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings:nil, vertices:standardImageVertices, inputTextures:[beforeUpsamplingFramebuffer.texturePropertiesForOutputRotation(.noRotation)]) beforeUpsamplingFramebuffer.unlock() } else { renderFramebuffer.activateFramebufferForRendering() - renderQuadWithShader(shader, uniformSettings:uniformSettings, vertices:standardImageVertices, inputTextures:[firstStageFramebuffer.texturePropertiesForOutputRotation(.NoRotation)]) + renderQuadWithShader(shader, uniformSettings:uniformSettings, vertices:standardImageVertices, inputTextures:[firstStageFramebuffer.texturePropertiesForOutputRotation(.noRotation)]) firstStageFramebuffer.unlock() } } -} \ No newline at end of file +} diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index b771aa41..5c80c32a 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -2,40 +2,40 @@ import Foundation import AVFoundation public protocol CameraDelegate { - func didCaptureBuffer(sampleBuffer: CMSampleBuffer) + func didCaptureBuffer(_ sampleBuffer: CMSampleBuffer) } public enum PhysicalCameraLocation { - case BackFacing - case FrontFacing + case backFacing + case frontFacing // Documentation: "The front-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeLeft and the back-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeRight." func imageOrientation() -> ImageOrientation { switch self { - case .BackFacing: return .LandscapeRight - case .FrontFacing: return .LandscapeLeft + case .backFacing: return .landscapeRight + case .frontFacing: return .landscapeLeft } } func captureDevicePosition() -> AVCaptureDevicePosition { switch self { - case .BackFacing: return .Back - case .FrontFacing: return .Front + case .backFacing: return .back + case .frontFacing: return .front } } func device() -> AVCaptureDevice? { - let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) - for device in devices { + let devices = AVCaptureDevice.devices(withMediaType:AVMediaTypeVideo) + for device in devices! { if (device.position == self.captureDevicePosition()) { return device as? AVCaptureDevice } } - return AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo) + return AVCaptureDevice.defaultDevice(withMediaType:AVMediaTypeVideo) } } -struct CameraError: ErrorType { +struct CameraError: ErrorProtocol { } let initialBenchmarkFramesToIgnore = 5 @@ -76,9 +76,9 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer var supportsFullYUVRange:Bool = false let captureAsYUV:Bool let yuvConversionShader:ShaderProgram? - let frameRenderingSemaphore = dispatch_semaphore_create(1) - let cameraProcessingQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH,0) - let audioProcessingQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW,0) + let frameRenderingSemaphore = DispatchSemaphore(value:1) + let cameraProcessingQueue = DispatchQueue.global(attributes:DispatchQueue.GlobalAttributes.qosUserInitiated) + let audioProcessingQueue = DispatchQueue.global(attributes:DispatchQueue.GlobalAttributes.qosUtility) let framesToIgnore = 5 var numberOfFramesCaptured = 0 @@ -86,7 +86,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer var framesSinceLastCheck = 0 var lastCheckTime = CFAbsoluteTimeGetCurrent() - public init(sessionPreset:String, cameraDevice:AVCaptureDevice? = nil, location:PhysicalCameraLocation = .BackFacing, captureAsYUV:Bool = true) throws { + public init(sessionPreset:String, cameraDevice:AVCaptureDevice? = nil, location:PhysicalCameraLocation = .backFacing, captureAsYUV:Bool = true) throws { self.location = location self.captureAsYUV = captureAsYUV @@ -129,22 +129,22 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer if captureAsYUV { supportsFullYUVRange = false let supportedPixelFormats = videoOutput.availableVideoCVPixelFormatTypes - for currentPixelFormat in supportedPixelFormats { - if ((currentPixelFormat as! NSNumber).intValue == Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)) { + for currentPixelFormat in supportedPixelFormats! { + if ((currentPixelFormat as! NSNumber).int32Value == Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)) { supportsFullYUVRange = true } } if (supportsFullYUVRange) { yuvConversionShader = crashOnShaderCompileFailure("Camera"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionFullRangeFragmentShader)} - videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey:NSNumber(int:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] + videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey:NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] } else { yuvConversionShader = crashOnShaderCompileFailure("Camera"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionVideoRangeFragmentShader)} - videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey:NSNumber(int:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange))] + videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey:NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange))] } } else { yuvConversionShader = nil - videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey:NSNumber(int:Int32(kCVPixelFormatType_32BGRA))] + videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey:NSNumber(value:Int32(kCVPixelFormatType_32BGRA))] } if (captureSession.canAddOutput(videoOutput)) { @@ -166,13 +166,13 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } } - public func captureOutput(captureOutput:AVCaptureOutput!, didOutputSampleBuffer sampleBuffer:CMSampleBuffer!, fromConnection connection:AVCaptureConnection!) { + public func captureOutput(_ captureOutput:AVCaptureOutput!, didOutputSampleBuffer sampleBuffer:CMSampleBuffer!, from connection:AVCaptureConnection!) { guard (captureOutput != audioOutput) else { self.processAudioSampleBuffer(sampleBuffer) return } - guard (dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_NOW) == 0) else { return } + guard (frameRenderingSemaphore.wait(timeout:DispatchTime.now()) == DispatchTimeoutResult.Success) else { return } let startTime = CFAbsoluteTimeGetCurrent() @@ -190,7 +190,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer let luminanceFramebuffer:Framebuffer let chrominanceFramebuffer:Framebuffer if sharedImageProcessingContext.supportsTextureCaches() { - var luminanceTextureRef:CVOpenGLESTextureRef? = nil + var luminanceTextureRef:CVOpenGLESTexture? = nil let _ = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, cameraFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE, GLsizei(bufferWidth), GLsizei(bufferHeight), GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), 0, &luminanceTextureRef) let luminanceTexture = CVOpenGLESTextureGetName(luminanceTextureRef!) glActiveTexture(GLenum(GL_TEXTURE4)) @@ -199,7 +199,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GL_CLAMP_TO_EDGE) luminanceFramebuffer = try! Framebuffer(context:sharedImageProcessingContext, orientation:self.location.imageOrientation(), size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:true, overriddenTexture:luminanceTexture) - var chrominanceTextureRef:CVOpenGLESTextureRef? = nil + var chrominanceTextureRef:CVOpenGLESTexture? = nil let _ = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, cameraFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE_ALPHA, GLsizei(bufferWidth / 2), GLsizei(bufferHeight / 2), GLenum(GL_LUMINANCE_ALPHA), GLenum(GL_UNSIGNED_BYTE), 1, &chrominanceTextureRef) let chrominanceTexture = CVOpenGLESTextureGetName(chrominanceTextureRef!) glActiveTexture(GLenum(GL_TEXTURE5)) @@ -222,7 +222,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_LUMINANCE_ALPHA, GLsizei(bufferWidth / 2), GLsizei(bufferHeight / 2), 0, GLenum(GL_LUMINANCE_ALPHA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddressOfPlane(cameraFrame, 1)) } - cameraFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.Portrait, size:luminanceFramebuffer.sizeForTargetOrientation(.Portrait), textureOnly:false) + cameraFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:luminanceFramebuffer.sizeForTargetOrientation(.portrait), textureOnly:false) let conversionMatrix:Matrix3x3 if (self.supportsFullYUVRange) { @@ -238,7 +238,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } CVPixelBufferUnlockBaseAddress(cameraFrame, 0) - cameraFramebuffer.timingStyle = .VideoFrame(timestamp:Timestamp(currentTime)) + cameraFramebuffer.timingStyle = .videoFrame(timestamp:Timestamp(currentTime)) self.updateTargetsWithFramebuffer(cameraFramebuffer) if self.runBenchmark { @@ -261,7 +261,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer self.framesSinceLastCheck += 1 } - dispatch_semaphore_signal(self.frameRenderingSemaphore) + self.frameRenderingSemaphore.signal() } } @@ -269,18 +269,18 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer self.numberOfFramesCaptured = 0 self.totalFrameTimeDuringCapture = 0 - if (!captureSession.running) { + if (!captureSession.isRunning) { captureSession.startRunning() } } public func stopCapture() { - if (!captureSession.running) { + if (!captureSession.isRunning) { captureSession.stopRunning() } } - public func transmitPreviousImageToTarget(target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) { // Not needed for camera inputs } @@ -294,7 +294,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer defer { captureSession.commitConfiguration() } - microphone = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio) + microphone = AVCaptureDevice.defaultDevice(withMediaType:AVMediaTypeAudio) audioInput = try AVCaptureDeviceInput(device:microphone) if captureSession.canAddInput(audioInput) { captureSession.addInput(audioInput) @@ -318,7 +318,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer captureSession.commitConfiguration() } - func processAudioSampleBuffer(sampleBuffer:CMSampleBuffer) { + func processAudioSampleBuffer(_ sampleBuffer:CMSampleBuffer) { self.audioEncodingTarget?.processAudioBuffer(sampleBuffer) } } diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 1705b6e4..64452173 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -26,16 +26,16 @@ public class MovieInput: ImageSource { assetReader = try AVAssetReader(asset:self.asset) - let outputSettings:[String:AnyObject] = [(kCVPixelBufferPixelFormatTypeKey as String):NSNumber(int:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] - let readerVideoTrackOutput = AVAssetReaderTrackOutput(track:self.asset.tracksWithMediaType(AVMediaTypeVideo)[0], outputSettings:outputSettings) + let outputSettings:[String:AnyObject] = [(kCVPixelBufferPixelFormatTypeKey as String):NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] + let readerVideoTrackOutput = AVAssetReaderTrackOutput(track:self.asset.tracks(withMediaType: AVMediaTypeVideo)[0], outputSettings:outputSettings) readerVideoTrackOutput.alwaysCopiesSampleData = false - assetReader.addOutput(readerVideoTrackOutput) + assetReader.add(readerVideoTrackOutput) // TODO: Audio here } - public convenience init(url:NSURL, playAtActualSpeed:Bool = false, loop:Bool = false) throws { - let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey:NSNumber(bool:true)] - let inputAsset = AVURLAsset(URL:url, options:inputOptions) + public convenience init(url:URL, playAtActualSpeed:Bool = false, loop:Bool = false) throws { + let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey:NSNumber(value:true)] + let inputAsset = AVURLAsset(url:url, options:inputOptions) try self.init(asset:inputAsset, playAtActualSpeed:playAtActualSpeed, loop:loop) } @@ -43,9 +43,9 @@ public class MovieInput: ImageSource { // MARK: Playback control public func start() { - asset.loadValuesAsynchronouslyForKeys(["tracks"], completionHandler: { - dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), { - guard (self.asset.statusOfValueForKey("tracks", error:nil) == .Loaded) else { return } + asset.loadValuesAsynchronously(forKeys:["tracks"], completionHandler:{ + DispatchQueue.global(attributes: DispatchQueue.GlobalAttributes.qosDefault).async(execute: { + guard (self.asset.statusOfValue(forKey: "tracks", error:nil) == .loaded) else { return } guard self.assetReader.startReading() else { print("Couldn't start reading") @@ -60,11 +60,11 @@ public class MovieInput: ImageSource { } } - while (self.assetReader.status == .Reading) { + while (self.assetReader.status == .reading) { self.readNextVideoFrameFromOutput(readerVideoTrackOutput!) } - if (self.assetReader.status == .Completed) { + if (self.assetReader.status == .completed) { self.assetReader.cancelReading() if (self.loop) { @@ -89,8 +89,8 @@ public class MovieInput: ImageSource { // MARK: - // MARK: Internal processing functions - func readNextVideoFrameFromOutput(videoTrackOutput:AVAssetReaderOutput) { - if ((assetReader.status == .Reading) && !videoEncodingIsFinished) { + func readNextVideoFrameFromOutput(_ videoTrackOutput:AVAssetReaderOutput) { + if ((assetReader.status == .reading) && !videoEncodingIsFinished) { if let sampleBuffer = videoTrackOutput.copyNextSampleBuffer() { if (playAtActualSpeed) { // Do this outside of the video processing queue to not slow that down while waiting @@ -130,7 +130,7 @@ public class MovieInput: ImageSource { } - func processMovieFrame(frame:CMSampleBuffer) { + func processMovieFrame(_ frame:CMSampleBuffer) { let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(frame) let movieFrame = CMSampleBufferGetImageBuffer(frame)! @@ -138,7 +138,7 @@ public class MovieInput: ImageSource { self.processMovieFrame(movieFrame, withSampleTime:currentSampleTime) } - func processMovieFrame(movieFrame:CVPixelBuffer, withSampleTime:CMTime) { + func processMovieFrame(_ movieFrame:CVPixelBuffer, withSampleTime:CMTime) { let bufferHeight = CVPixelBufferGetHeight(movieFrame) let bufferWidth = CVPixelBufferGetWidth(movieFrame) CVPixelBufferLockBaseAddress(movieFrame, 0) @@ -157,24 +157,24 @@ public class MovieInput: ImageSource { let startTime = CFAbsoluteTimeGetCurrent() - let luminanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.Portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:true) + let luminanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:true) luminanceFramebuffer.lock() glActiveTexture(GLenum(GL_TEXTURE0)) glBindTexture(GLenum(GL_TEXTURE_2D), luminanceFramebuffer.texture) glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_LUMINANCE, GLsizei(bufferWidth), GLsizei(bufferHeight), 0, GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddressOfPlane(movieFrame, 0)) - let chrominanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.Portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:true) + let chrominanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:true) chrominanceFramebuffer.lock() glActiveTexture(GLenum(GL_TEXTURE1)) glBindTexture(GLenum(GL_TEXTURE_2D), chrominanceFramebuffer.texture) glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_LUMINANCE_ALPHA, GLsizei(bufferWidth / 2), GLsizei(bufferHeight / 2), 0, GLenum(GL_LUMINANCE_ALPHA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddressOfPlane(movieFrame, 1)) - let movieFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.Portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:false) + let movieFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:false) convertYUVToRGB(shader:self.yuvConversionShader, luminanceFramebuffer:luminanceFramebuffer, chrominanceFramebuffer:chrominanceFramebuffer, resultFramebuffer:movieFramebuffer, colorConversionMatrix:conversionMatrix) CVPixelBufferUnlockBaseAddress(movieFrame, 0) - movieFramebuffer.timingStyle = .VideoFrame(timestamp:Timestamp(withSampleTime)) + movieFramebuffer.timingStyle = .videoFrame(timestamp:Timestamp(withSampleTime)) self.updateTargetsWithFramebuffer(movieFramebuffer) if self.runBenchmark { @@ -186,7 +186,7 @@ public class MovieInput: ImageSource { } } - public func transmitPreviousImageToTarget(target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) { // Not needed for movie inputs } -} \ No newline at end of file +} diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index a5833c72..0a0df377 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -2,7 +2,7 @@ import AVFoundation public protocol AudioEncodingTarget { func activateAudioTrack() - func processAudioBuffer(sampleBuffer:CMSampleBuffer) + func processAudioBuffer(_ sampleBuffer:CMSampleBuffer) } public class MovieOutput: ImageConsumer, AudioEncodingTarget { @@ -26,7 +26,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { var pixelBuffer:CVPixelBuffer? = nil var renderFramebuffer:Framebuffer! - public init(URL:NSURL, size:Size, fileType:String = AVFileTypeQuickTimeMovie, liveVideo:Bool = false, settings:[String:AnyObject]? = nil) throws { + public init(URL:Foundation.URL, size:Size, fileType:String = AVFileTypeQuickTimeMovie, liveVideo:Bool = false, settings:[String:AnyObject]? = nil) throws { if sharedImageProcessingContext.supportsTextureCaches() { self.colorSwizzlingShader = sharedImageProcessingContext.passthroughShader } else { @@ -34,7 +34,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } self.size = size - assetWriter = try AVAssetWriter(URL:URL, fileType:fileType) + assetWriter = try AVAssetWriter(url:URL, fileType:fileType) // Set this to make sure that a functional movie is produced, even if the recording is cut off mid-stream. Only the last second should be lost in that case. assetWriter.movieFragmentInterval = CMTimeMakeWithSeconds(1.0, 1000) @@ -45,8 +45,8 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { localSettings = [String:AnyObject]() } - localSettings[AVVideoWidthKey] = localSettings[AVVideoWidthKey] ?? NSNumber(float:size.width) - localSettings[AVVideoHeightKey] = localSettings[AVVideoHeightKey] ?? NSNumber(float:size.height) + localSettings[AVVideoWidthKey] = localSettings[AVVideoWidthKey] ?? NSNumber(value:size.width) + localSettings[AVVideoHeightKey] = localSettings[AVVideoHeightKey] ?? NSNumber(value:size.height) localSettings[AVVideoCodecKey] = localSettings[AVVideoCodecKey] ?? AVVideoCodecH264 assetWriterVideoInput = AVAssetWriterInput(mediaType:AVMediaTypeVideo, outputSettings:localSettings) @@ -54,12 +54,12 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { encodingLiveVideo = liveVideo // You need to use BGRA for the video in order to get realtime encoding. I use a color-swizzling shader to line up glReadPixels' normal RGBA output with the movie input's BGRA. - let sourcePixelBufferAttributesDictionary:[String:AnyObject] = [kCVPixelBufferPixelFormatTypeKey as String:NSNumber(int:Int32(kCVPixelFormatType_32BGRA)), - kCVPixelBufferWidthKey as String:NSNumber(float:size.width), - kCVPixelBufferHeightKey as String:NSNumber(float:size.height)] + let sourcePixelBufferAttributesDictionary:[String:AnyObject] = [kCVPixelBufferPixelFormatTypeKey as String:NSNumber(value:Int32(kCVPixelFormatType_32BGRA)), + kCVPixelBufferWidthKey as String:NSNumber(value:size.width), + kCVPixelBufferHeightKey as String:NSNumber(value:size.height)] assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput:assetWriterVideoInput, sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary) - assetWriter.addInput(assetWriterVideoInput) + assetWriter.add(assetWriterVideoInput) } public func startRecording() { @@ -79,44 +79,44 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { CVBufferSetAttachment(self.pixelBuffer!, kCVImageBufferTransferFunctionKey, kCVImageBufferTransferFunction_ITU_R_709_2, kCVAttachmentMode_ShouldPropagate) let bufferSize = GLSize(self.size) - var cachedTextureRef:CVOpenGLESTextureRef? = nil + var cachedTextureRef:CVOpenGLESTexture? = nil let _ = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, self.pixelBuffer!, nil, GLenum(GL_TEXTURE_2D), GL_RGBA, bufferSize.width, bufferSize.height, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), 0, &cachedTextureRef) let cachedTexture = CVOpenGLESTextureGetName(cachedTextureRef!) - self.renderFramebuffer = try! Framebuffer(context:sharedImageProcessingContext, orientation:.Portrait, size:bufferSize, textureOnly:false, overriddenTexture:cachedTexture) + self.renderFramebuffer = try! Framebuffer(context:sharedImageProcessingContext, orientation:.portrait, size:bufferSize, textureOnly:false, overriddenTexture:cachedTexture) } } - public func finishRecording(completionCallback:(() -> Void)? = nil) { + public func finishRecording(_ completionCallback:(() -> Void)? = nil) { sharedImageProcessingContext.runOperationSynchronously{ self.isRecording = false - if (self.assetWriter.status == .Completed || self.assetWriter.status == .Cancelled || self.assetWriter.status == .Unknown) { + if (self.assetWriter.status == .completed || self.assetWriter.status == .cancelled || self.assetWriter.status == .unknown) { sharedImageProcessingContext.runOperationAsynchronously{ completionCallback?() } return } - if ((self.assetWriter.status == .Writing) && (!self.videoEncodingIsFinished)) { + if ((self.assetWriter.status == .writing) && (!self.videoEncodingIsFinished)) { self.videoEncodingIsFinished = true self.assetWriterVideoInput.markAsFinished() } - if ((self.assetWriter.status == .Writing) && (!self.audioEncodingIsFinished)) { + if ((self.assetWriter.status == .writing) && (!self.audioEncodingIsFinished)) { self.audioEncodingIsFinished = true self.assetWriterAudioInput?.markAsFinished() } // Why can't I use ?? here for the callback? if let callback = completionCallback { - self.assetWriter.finishWritingWithCompletionHandler(callback) + self.assetWriter.finishWriting(completionHandler: callback) } else { - self.assetWriter.finishWritingWithCompletionHandler{} + self.assetWriter.finishWriting{} } } } - public func newFramebufferAvailable(framebuffer:Framebuffer, fromSourceIndex:UInt) { + public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { defer { framebuffer.unlock() } @@ -127,16 +127,16 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { guard (frameTime != previousFrameTime) else { return } if (startTime == nil) { - if (assetWriter.status != .Writing) { + if (assetWriter.status != .writing) { assetWriter.startWriting() } - assetWriter.startSessionAtSourceTime(frameTime) + assetWriter.startSession(atSourceTime: frameTime) startTime = frameTime } // TODO: Run the following on an internal movie recording dispatch queue, context - guard (assetWriterVideoInput.readyForMoreMediaData || (!encodingLiveVideo)) else { + guard (assetWriterVideoInput.isReadyForMoreMediaData || (!encodingLiveVideo)) else { debugPrint("Had to drop a frame at time \(frameTime)") return } @@ -148,7 +148,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { renderIntoPixelBuffer(pixelBuffer!, framebuffer:framebuffer) - if (!assetWriterPixelBufferInput.appendPixelBuffer(pixelBuffer!, withPresentationTime:frameTime)) { + if (!assetWriterPixelBufferInput.append(pixelBuffer!, withPresentationTime:frameTime)) { debugPrint("Problem appending pixel buffer at time: \(frameTime)") } @@ -158,7 +158,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } } - func renderIntoPixelBuffer(pixelBuffer:CVPixelBuffer, framebuffer:Framebuffer) { + func renderIntoPixelBuffer(_ pixelBuffer:CVPixelBuffer, framebuffer:Framebuffer) { if !sharedImageProcessingContext.supportsTextureCaches() { renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:framebuffer.orientation, size:GLSize(self.size)) renderFramebuffer.lock() @@ -167,7 +167,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { renderFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(Color.Black) CVPixelBufferLockBaseAddress(pixelBuffer, 0) - renderQuadWithShader(colorSwizzlingShader, uniformSettings:ShaderUniformSettings(), vertices:standardImageVertices, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.NoRotation)]) + renderQuadWithShader(colorSwizzlingShader, uniformSettings:ShaderUniformSettings(), vertices:standardImageVertices, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.noRotation)]) if sharedImageProcessingContext.supportsTextureCaches() { glFinish() @@ -183,29 +183,29 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { public func activateAudioTrack() { // TODO: Add ability to set custom output settings assetWriterAudioInput = AVAssetWriterInput(mediaType:AVMediaTypeAudio, outputSettings:nil) - assetWriter.addInput(assetWriterAudioInput!) + assetWriter.add(assetWriterAudioInput!) assetWriterAudioInput?.expectsMediaDataInRealTime = encodingLiveVideo } - public func processAudioBuffer(sampleBuffer:CMSampleBuffer) { + public func processAudioBuffer(_ sampleBuffer:CMSampleBuffer) { guard let assetWriterAudioInput = assetWriterAudioInput else { return } sharedImageProcessingContext.runOperationSynchronously{ let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) if (self.startTime == nil) { - if (self.assetWriter.status != .Writing) { + if (self.assetWriter.status != .writing) { self.assetWriter.startWriting() } - self.assetWriter.startSessionAtSourceTime(currentSampleTime) + self.assetWriter.startSession(atSourceTime: currentSampleTime) self.startTime = currentSampleTime } - guard (assetWriterAudioInput.readyForMoreMediaData || (!self.encodingLiveVideo)) else { + guard (assetWriterAudioInput.isReadyForMoreMediaData || (!self.encodingLiveVideo)) else { return } - if (!assetWriterAudioInput.appendSampleBuffer(sampleBuffer)) { + if (!assetWriterAudioInput.append(sampleBuffer)) { print("Trouble appending audio sample buffer") } } @@ -226,4 +226,4 @@ public extension Timestamp { return CMTimeMakeWithEpoch(value, timescale, epoch) } } -} \ No newline at end of file +} diff --git a/framework/Source/iOS/OpenGLContext.swift b/framework/Source/iOS/OpenGLContext.swift index a740d7ac..81b3f9e3 100755 --- a/framework/Source/iOS/OpenGLContext.swift +++ b/framework/Source/iOS/OpenGLContext.swift @@ -16,26 +16,23 @@ public class OpenGLContext: SerialDispatch { return crashOnShaderCompileFailure("OpenGLContext"){return try self.programForVertexShader(OneInputVertexShader, fragmentShader:PassthroughFragmentShader)} }() - lazy var coreVideoTextureCache:CVOpenGLESTextureCacheRef = { - var newTextureCache:CVOpenGLESTextureCacheRef? = nil + lazy var coreVideoTextureCache:CVOpenGLESTextureCache = { + var newTextureCache:CVOpenGLESTextureCache? = nil let err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, nil, self.context, nil, &newTextureCache) return newTextureCache! }() - public let serialDispatchQueue:dispatch_queue_t = dispatch_queue_create("com.sunsetlakesoftware.GPUImage.processingQueue", nil) - var dispatchKey:Int = 1 - public let dispatchQueueKey:UnsafePointer - + public let serialDispatchQueue:DispatchQueue = DispatchQueue(label:"com.sunsetlakesoftware.GPUImage.processingQueue", attributes: []) + public let dispatchQueueKey = DispatchSpecificKey() + // MARK: - // MARK: Initialization and teardown init() { - let context = UnsafeMutablePointer(Unmanaged.passUnretained(self.serialDispatchQueue).toOpaque()) - dispatchQueueKey = UnsafePointer(bitPattern:dispatchKey) - dispatch_queue_set_specific(serialDispatchQueue, dispatchQueueKey, context, nil) + serialDispatchQueue.setSpecific(key:dispatchQueueKey, value:81) - guard let generatedContext = EAGLContext(API:.OpenGLES2, sharegroup:imageProcessingShareGroup) else { + guard let generatedContext = EAGLContext(api:.openGLES2, sharegroup:imageProcessingShareGroup) else { fatalError("Unable to create an OpenGL ES 2.0 context. The GPUImage framework requires OpenGL ES 2.0 support to work.") } @@ -50,9 +47,9 @@ public class OpenGLContext: SerialDispatch { // MARK: Rendering public func makeCurrentContext() { - if (EAGLContext.currentContext() != self.context) + if (EAGLContext.current() != self.context) { - EAGLContext.setCurrentContext(self.context) + EAGLContext.setCurrent(self.context) } } @@ -90,7 +87,7 @@ public class OpenGLContext: SerialDispatch { lazy var extensionString:String = { return self.runOperationSynchronously{ self.makeCurrentContext() - return String.fromCString(UnsafePointer(glGetString(GLenum(GL_EXTENSIONS))))! + return String(cString:UnsafePointer(glGetString(GLenum(GL_EXTENSIONS)))) } }() -} \ No newline at end of file +} diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index 7c236d78..3c1b879e 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -6,10 +6,10 @@ public class PictureInput: ImageSource { var imageFramebuffer:Framebuffer! var hasProcessedImage:Bool = false - public init(image:CGImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .Portrait) { + public init(image:CGImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) { // TODO: Dispatch this whole thing asynchronously to move image loading off main thread - let widthOfImage = GLint(CGImageGetWidth(image)) - let heightOfImage = GLint(CGImageGetHeight(image)) + let widthOfImage = GLint(image.width) + let heightOfImage = GLint(image.height) // If passed an empty image reference, CGContextDrawImage will fail in future versions of the SDK. guard((widthOfImage > 0) && (heightOfImage > 0)) else { fatalError("Tried to pass in a zero-sized image") } @@ -37,32 +37,32 @@ public class PictureInput: ImageSource { } var imageData:UnsafeMutablePointer! - var dataFromImageDataProvider:CFDataRef! + var dataFromImageDataProvider:CFData! var format = GL_BGRA if (!shouldRedrawUsingCoreGraphics) { /* Check that the memory layout is compatible with GL, as we cannot use glPixelStore to * tell GL about the memory layout with GLES. */ - if ((CGImageGetBytesPerRow(image) != CGImageGetWidth(image) * 4) || (CGImageGetBitsPerPixel(image) != 32) || (CGImageGetBitsPerComponent(image) != 8)) + if ((image.bytesPerRow != image.width * 4) || (image.bitsPerPixel != 32) || (image.bitsPerComponent != 8)) { shouldRedrawUsingCoreGraphics = true } else { /* Check that the bitmap pixel format is compatible with GL */ - let bitmapInfo = CGImageGetBitmapInfo(image) - if (bitmapInfo.contains(.FloatComponents)) { + let bitmapInfo = image.bitmapInfo + if (bitmapInfo.contains(.floatComponents)) { /* We don't support float components for use directly in GL */ shouldRedrawUsingCoreGraphics = true } else { - let alphaInfo = CGImageAlphaInfo(rawValue:bitmapInfo.rawValue & CGBitmapInfo.AlphaInfoMask.rawValue) - if (bitmapInfo.contains(.ByteOrder32Little)) { + let alphaInfo = CGImageAlphaInfo(rawValue:bitmapInfo.rawValue & CGBitmapInfo.alphaInfoMask.rawValue) + if (bitmapInfo.contains(.byteOrder32Little)) { /* Little endian, for alpha-first we can use this bitmap directly in GL */ - if ((alphaInfo != CGImageAlphaInfo.PremultipliedFirst) && (alphaInfo != CGImageAlphaInfo.First) && (alphaInfo != CGImageAlphaInfo.NoneSkipFirst)) { + if ((alphaInfo != CGImageAlphaInfo.premultipliedFirst) && (alphaInfo != CGImageAlphaInfo.first) && (alphaInfo != CGImageAlphaInfo.noneSkipFirst)) { shouldRedrawUsingCoreGraphics = true } - } else if ((bitmapInfo.contains(.ByteOrderDefault)) || (bitmapInfo.contains(.ByteOrder32Big))) { + } else if ((bitmapInfo.contains(CGBitmapInfo())) || (bitmapInfo.contains(.byteOrder32Big))) { /* Big endian, for alpha-last we can use this bitmap directly in GL */ - if ((alphaInfo != CGImageAlphaInfo.PremultipliedLast) && (alphaInfo != CGImageAlphaInfo.Last) && (alphaInfo != CGImageAlphaInfo.NoneSkipLast)) { + if ((alphaInfo != CGImageAlphaInfo.premultipliedLast) && (alphaInfo != CGImageAlphaInfo.last) && (alphaInfo != CGImageAlphaInfo.noneSkipLast)) { shouldRedrawUsingCoreGraphics = true } else { /* Can access directly using GL_RGBA pixel format */ @@ -77,16 +77,16 @@ public class PictureInput: ImageSource { if (shouldRedrawUsingCoreGraphics) { // For resized or incompatible image: redraw - imageData = UnsafeMutablePointer.alloc(Int(widthToUseForTexture * heightToUseForTexture) * 4) + imageData = UnsafeMutablePointer(allocatingCapacity: Int(widthToUseForTexture * heightToUseForTexture) * 4) let genericRGBColorspace = CGColorSpaceCreateDeviceRGB() - let imageContext = CGBitmapContextCreate(imageData, Int(widthToUseForTexture), Int(heightToUseForTexture), 8, Int(widthToUseForTexture) * 4, genericRGBColorspace, CGImageAlphaInfo.PremultipliedFirst.rawValue | CGBitmapInfo.ByteOrder32Little.rawValue) + let imageContext = CGContext(data: imageData, width: Int(widthToUseForTexture), height: Int(heightToUseForTexture), bitsPerComponent: 8, bytesPerRow: Int(widthToUseForTexture) * 4, space: genericRGBColorspace, bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue) // CGContextSetBlendMode(imageContext, kCGBlendModeCopy); // From Technical Q&A QA1708: http://developer.apple.com/library/ios/#qa/qa1708/_index.html - CGContextDrawImage(imageContext, CGRectMake(0.0, 0.0, CGFloat(widthToUseForTexture), CGFloat(heightToUseForTexture)), image) + imageContext?.draw(in: CGRect(x: 0.0, y: 0.0, width: CGFloat(widthToUseForTexture), height: CGFloat(heightToUseForTexture)), image: image) } else { // Access the raw image bytes directly - dataFromImageDataProvider = CGDataProviderCopyData(CGImageGetDataProvider(image)) + dataFromImageDataProvider = image.dataProvider?.data imageData = UnsafeMutablePointer(CFDataGetBytePtr(dataFromImageDataProvider)) } @@ -112,20 +112,20 @@ public class PictureInput: ImageSource { } if (shouldRedrawUsingCoreGraphics) { - imageData.dealloc(Int(widthToUseForTexture * heightToUseForTexture) * 4) + imageData.deallocateCapacity(Int(widthToUseForTexture * heightToUseForTexture) * 4) } } - public convenience init(image:UIImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .Portrait) { - self.init(image:image.CGImage!, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation) + public convenience init(image:UIImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) { + self.init(image:image.cgImage!, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation) } - public convenience init(imageName:String, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .Portrait) { + public convenience init(imageName:String, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) { guard let image = UIImage(named:imageName) else { fatalError("No such image named: \(imageName) in your application bundle") } - self.init(image:image.CGImage!, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation) + self.init(image:image.cgImage!, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation) } - public func processImage(synchronously synchronously:Bool = false) { + public func processImage(synchronously:Bool = false) { if synchronously { sharedImageProcessingContext.runOperationSynchronously{ self.updateTargetsWithFramebuffer(self.imageFramebuffer) @@ -139,10 +139,10 @@ public class PictureInput: ImageSource { } } - public func transmitPreviousImageToTarget(target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) { if hasProcessedImage { imageFramebuffer.lock() target.newFramebufferAvailable(imageFramebuffer, fromSourceIndex:atIndex) } } -} \ No newline at end of file +} diff --git a/framework/Source/iOS/PictureOutput.swift b/framework/Source/iOS/PictureOutput.swift index 96725679..bd74eb54 100644 --- a/framework/Source/iOS/PictureOutput.swift +++ b/framework/Source/iOS/PictureOutput.swift @@ -2,19 +2,19 @@ import UIKit import OpenGLES public enum PictureFileFormat { - case PNG - case JPEG + case png + case jpeg } public class PictureOutput: ImageConsumer { - public var encodedImageAvailableCallback:(NSData -> ())? - public var encodedImageFormat:PictureFileFormat = .PNG - public var imageAvailableCallback:(UIImage -> ())? + public var encodedImageAvailableCallback:((Data) -> ())? + public var encodedImageFormat:PictureFileFormat = .png + public var imageAvailableCallback:((UIImage) -> ())? public var onlyCaptureNextFrame:Bool = true public let sources = SourceContainer() public let maximumInputs:UInt = 1 - var url:NSURL! + var url:URL! public init() { } @@ -22,13 +22,13 @@ public class PictureOutput: ImageConsumer { deinit { } - public func saveNextFrameToURL(url:NSURL, format:PictureFileFormat) { + public func saveNextFrameToURL(_ url:URL, format:PictureFileFormat) { onlyCaptureNextFrame = true encodedImageFormat = format self.url = url // Create an intentional short-term retain cycle to prevent deallocation before next frame is captured encodedImageAvailableCallback = {imageData in do { - try imageData.writeToURL(self.url, options:.DataWritingAtomic) + try imageData.write(to: self.url, options:.dataWritingAtomic) } catch { // TODO: Handle this better print("WARNING: Couldn't save image with error:\(error)") @@ -37,29 +37,29 @@ public class PictureOutput: ImageConsumer { } // TODO: Replace with texture caches - func cgImageFromFramebuffer(framebuffer:Framebuffer) -> CGImage { + func cgImageFromFramebuffer(_ framebuffer:Framebuffer) -> CGImage { let renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:framebuffer.orientation, size:framebuffer.size) renderFramebuffer.lock() renderFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(Color.Red) - renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings:ShaderUniformSettings(), vertices:standardImageVertices, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.NoRotation)]) + renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings:ShaderUniformSettings(), vertices:standardImageVertices, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.noRotation)]) framebuffer.unlock() let imageByteSize = Int(framebuffer.size.width * framebuffer.size.height * 4) - let data = UnsafeMutablePointer.alloc(imageByteSize) + let data = UnsafeMutablePointer(allocatingCapacity: imageByteSize) glReadPixels(0, 0, framebuffer.size.width, framebuffer.size.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), data) renderFramebuffer.unlock() - let dataProvider = CGDataProviderCreateWithData(nil, data, imageByteSize, dataProviderReleaseCallback) + guard let dataProvider = CGDataProvider(dataInfo:nil, data:data, size:imageByteSize, releaseData: dataProviderReleaseCallback) else {fatalError("Could not allocate a CGDataProvider")} let defaultRGBColorSpace = CGColorSpaceCreateDeviceRGB() - return CGImageCreate(Int(framebuffer.size.width), Int(framebuffer.size.height), 8, 32, 4 * Int(framebuffer.size.width), defaultRGBColorSpace, .ByteOrderDefault /*| CGImageAlphaInfo.Last*/, dataProvider, nil, false, .RenderingIntentDefault)! + return CGImage(width:Int(framebuffer.size.width), height:Int(framebuffer.size.height), bitsPerComponent:8, bitsPerPixel:32, bytesPerRow:4 * Int(framebuffer.size.width), space:defaultRGBColorSpace, bitmapInfo:CGBitmapInfo() /*| CGImageAlphaInfo.Last*/, provider:dataProvider, decode:nil, shouldInterpolate:false, intent:.defaultIntent)! } - public func newFramebufferAvailable(framebuffer:Framebuffer, fromSourceIndex:UInt) { + public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { if let imageCallback = imageAvailableCallback { let cgImageFromBytes = cgImageFromFramebuffer(framebuffer) // TODO: Let people specify orientations - let image = UIImage(CGImage:cgImageFromBytes, scale:1.0, orientation:.Up) + let image = UIImage(cgImage:cgImageFromBytes, scale:1.0, orientation:.up) imageCallback(image) @@ -70,11 +70,11 @@ public class PictureOutput: ImageConsumer { if let imageCallback = encodedImageAvailableCallback { let cgImageFromBytes = cgImageFromFramebuffer(framebuffer) - let image = UIImage(CGImage:cgImageFromBytes, scale:1.0, orientation:.Up) - let imageData:NSData + let image = UIImage(cgImage:cgImageFromBytes, scale:1.0, orientation:.up) + let imageData:Data switch encodedImageFormat { - case .PNG: imageData = UIImagePNGRepresentation(image)! // TODO: Better error handling here - case .JPEG: imageData = UIImageJPEGRepresentation(image, 0.8)! // TODO: Be able to set image quality + case .png: imageData = UIImagePNGRepresentation(image)! // TODO: Better error handling here + case .jpeg: imageData = UIImageJPEGRepresentation(image, 0.8)! // TODO: Be able to set image quality } imageCallback(imageData) @@ -87,7 +87,7 @@ public class PictureOutput: ImageConsumer { } public extension ImageSource { - public func saveNextFrameToURL(url:NSURL, format:PictureFileFormat) { + public func saveNextFrameToURL(_ url:URL, format:PictureFileFormat) { let pictureOutput = PictureOutput() pictureOutput.saveNextFrameToURL(url, format:format) self --> pictureOutput @@ -95,13 +95,13 @@ public extension ImageSource { } public extension UIImage { - public func filterWithOperation(operation:T) -> UIImage { + public func filterWithOperation(_ operation:T) -> UIImage { return filterWithPipeline{input, output in input --> operation --> output } } - public func filterWithPipeline(pipeline:(input:PictureInput, output:PictureOutput) -> ()) -> UIImage { + public func filterWithPipeline(_ pipeline:(input:PictureInput, output:PictureOutput) -> ()) -> UIImage { let picture = PictureInput(image:self) var outputImage:UIImage? let pictureOutput = PictureOutput() @@ -116,6 +116,6 @@ public extension UIImage { } // Why are these flipped in the callback definition? -func dataProviderReleaseCallback(context:UnsafeMutablePointer, data:UnsafePointer, size:Int) { - UnsafeMutablePointer(data).dealloc(size) +func dataProviderReleaseCallback(_ context:UnsafeMutablePointer?, data:UnsafePointer, size:Int) { + UnsafeMutablePointer(data).deallocateCapacity(size) } diff --git a/framework/Source/iOS/RenderView.swift b/framework/Source/iOS/RenderView.swift index 6f3d0182..b2036b29 100755 --- a/framework/Source/iOS/RenderView.swift +++ b/framework/Source/iOS/RenderView.swift @@ -4,8 +4,8 @@ import UIKit // TODO: Deal with view resizing public class RenderView:UIView, ImageConsumer { public var backgroundRenderColor = Color.Black - public var fillMode = FillMode.PreserveAspectRatio - public var orientation:ImageOrientation = .Portrait + public var fillMode = FillMode.preserveAspectRatio + public var orientation:ImageOrientation = .portrait public var sizeInPixels:Size { get { return Size(width:Float(frame.size.width * contentScaleFactor), height:Float(frame.size.height * contentScaleFactor))}} public let sources = SourceContainer() @@ -35,11 +35,11 @@ public class RenderView:UIView, ImageConsumer { } func commonInit() { - self.contentScaleFactor = UIScreen.mainScreen().scale + self.contentScaleFactor = UIScreen.main().scale let eaglLayer = self.layer as! CAEAGLLayer - eaglLayer.opaque = true - eaglLayer.drawableProperties = [NSNumber(bool:false): kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8: kEAGLDrawablePropertyColorFormat] + eaglLayer.isOpaque = true + eaglLayer.drawableProperties = [NSNumber(value:false): kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8: kEAGLDrawablePropertyColorFormat] } deinit { @@ -57,7 +57,7 @@ public class RenderView:UIView, ImageConsumer { displayRenderbuffer = newDisplayRenderbuffer glBindRenderbuffer(GLenum(GL_RENDERBUFFER), displayRenderbuffer!) - sharedImageProcessingContext.context.renderbufferStorage(Int(GL_RENDERBUFFER), fromDrawable:self.layer as! CAEAGLLayer) + sharedImageProcessingContext.context.renderbufferStorage(Int(GL_RENDERBUFFER), from:self.layer as! CAEAGLLayer) var backingWidth:GLint = 0 var backingHeight:GLint = 0 @@ -98,7 +98,7 @@ public class RenderView:UIView, ImageConsumer { glViewport(0, 0, backingSize.width, backingSize.height) } - public func newFramebufferAvailable(framebuffer:Framebuffer, fromSourceIndex:UInt) { + public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { if (displayFramebuffer == nil) { self.createDisplayFramebuffer() } @@ -113,4 +113,4 @@ public class RenderView:UIView, ImageConsumer { glBindRenderbuffer(GLenum(GL_RENDERBUFFER), displayRenderbuffer!) sharedImageProcessingContext.presentBufferForDisplay() } -} \ No newline at end of file +} From 5acea56c54fca85baf32043f17de150f56bcf515 Mon Sep 17 00:00:00 2001 From: Brad Larson Date: Wed, 22 Jun 2016 12:04:05 -0500 Subject: [PATCH 02/14] Removed some extraneous logging code. --- framework/GPUImage-iOS.xcodeproj/project.pbxproj | 7 +++++-- framework/Source/Mac/MovieInput.swift | 4 ++-- framework/Source/Mac/PictureOutput.swift | 1 - 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/framework/GPUImage-iOS.xcodeproj/project.pbxproj b/framework/GPUImage-iOS.xcodeproj/project.pbxproj index f567fe77..27825d48 100755 --- a/framework/GPUImage-iOS.xcodeproj/project.pbxproj +++ b/framework/GPUImage-iOS.xcodeproj/project.pbxproj @@ -970,6 +970,7 @@ }; BCD1B12E1C66A262001F2BDC = { CreatedOnToolsVersion = 7.2; + LastSwiftMigration = 0800; }; }; }; @@ -1232,7 +1233,7 @@ IPHONEOS_DEPLOYMENT_TARGET = 8.0; MTL_ENABLE_DEBUG_INFO = YES; ONLY_ACTIVE_ARCH = YES; - OTHER_SWIFT_FLAGS = "-DGLES -Xfrontend -debug-time-function-bodies"; + OTHER_SWIFT_FLAGS = "-DGLES"; SDKROOT = iphoneos; SWIFT_OPTIMIZATION_LEVEL = "-Onone"; TARGETED_DEVICE_FAMILY = "1,2"; @@ -1274,7 +1275,7 @@ GCC_WARN_UNUSED_VARIABLE = YES; IPHONEOS_DEPLOYMENT_TARGET = 8.0; MTL_ENABLE_DEBUG_INFO = NO; - OTHER_SWIFT_FLAGS = "-DGLES -Xfrontend -debug-time-function-bodies"; + OTHER_SWIFT_FLAGS = "-DGLES"; SDKROOT = iphoneos; TARGETED_DEVICE_FAMILY = "1,2"; VALIDATE_PRODUCT = YES; @@ -1333,6 +1334,7 @@ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.GPUImageTests; PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 3.0; }; name = Debug; }; @@ -1343,6 +1345,7 @@ LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = com.sunsetlakesoftware.GPUImageTests; PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 3.0; }; name = Release; }; diff --git a/framework/Source/Mac/MovieInput.swift b/framework/Source/Mac/MovieInput.swift index 9125692f..77fe1ed7 100644 --- a/framework/Source/Mac/MovieInput.swift +++ b/framework/Source/Mac/MovieInput.swift @@ -44,7 +44,7 @@ public class MovieInput: ImageSource { public func start() { asset.loadValuesAsynchronously(forKeys: ["tracks"], completionHandler: { - DispatchQueue.global(attributes: DispatchQueue.GlobalAttributes.priorityDefault).async(execute: { + DispatchQueue.global(attributes: DispatchQueue.GlobalAttributes.priorityDefault).async { guard (self.asset.statusOfValue(forKey:"tracks", error:nil) == .loaded) else { return } guard self.assetReader.startReading() else { @@ -73,7 +73,7 @@ public class MovieInput: ImageSource { self.endProcessing() } } - }) + } }) } diff --git a/framework/Source/Mac/PictureOutput.swift b/framework/Source/Mac/PictureOutput.swift index 46a7cf20..3dda2e3a 100644 --- a/framework/Source/Mac/PictureOutput.swift +++ b/framework/Source/Mac/PictureOutput.swift @@ -20,7 +20,6 @@ public class PictureOutput: ImageConsumer { } deinit { - debugPrint("Deallocating picture") } public func saveNextFrameToURL(_ url:URL, format:PictureFileFormat) { From 7cc2f9d2303d6fd86b460f4b2c17e9d5f5ac06bf Mon Sep 17 00:00:00 2001 From: Brad Larson Date: Wed, 22 Jun 2016 13:39:12 -0500 Subject: [PATCH 03/14] Whole module optimization leads to a segmentation fault in the GPUImage projects at present. --- framework/GPUImage-Mac.xcodeproj/project.pbxproj | 2 +- framework/GPUImage-iOS.xcodeproj/project.pbxproj | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/framework/GPUImage-Mac.xcodeproj/project.pbxproj b/framework/GPUImage-Mac.xcodeproj/project.pbxproj index 9c14d86a..542e16c8 100755 --- a/framework/GPUImage-Mac.xcodeproj/project.pbxproj +++ b/framework/GPUImage-Mac.xcodeproj/project.pbxproj @@ -1343,7 +1343,7 @@ PRODUCT_BUNDLE_IDENTIFIER = "com.sunsetlakesoftware.GPUImage-Mac"; PRODUCT_NAME = GPUImage; SKIP_INSTALL = YES; - SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; + SWIFT_OPTIMIZATION_LEVEL = "-O"; SWIFT_VERSION = 3.0; }; name = Release; diff --git a/framework/GPUImage-iOS.xcodeproj/project.pbxproj b/framework/GPUImage-iOS.xcodeproj/project.pbxproj index 27825d48..81b0ef35 100755 --- a/framework/GPUImage-iOS.xcodeproj/project.pbxproj +++ b/framework/GPUImage-iOS.xcodeproj/project.pbxproj @@ -1277,6 +1277,7 @@ MTL_ENABLE_DEBUG_INFO = NO; OTHER_SWIFT_FLAGS = "-DGLES"; SDKROOT = iphoneos; + SWIFT_OPTIMIZATION_LEVEL = "-O"; TARGETED_DEVICE_FAMILY = "1,2"; VALIDATE_PRODUCT = YES; VERSIONING_SYSTEM = "apple-generic"; @@ -1323,6 +1324,7 @@ PRODUCT_NAME = "$(TARGET_NAME)"; PROVISIONING_PROFILE = ""; SKIP_INSTALL = YES; + SWIFT_OPTIMIZATION_LEVEL = "-O"; SWIFT_VERSION = 3.0; }; name = Release; From 47575f5ff805df4f32980235a3baa9473b0bf047 Mon Sep 17 00:00:00 2001 From: Brad Larson Date: Fri, 24 Jun 2016 17:03:03 -0500 Subject: [PATCH 04/14] Fixed some minor race conditions. --- framework/Source/Mac/Camera.swift | 6 ++++-- framework/Source/Operations/BoxBlur.swift | 8 +++++--- framework/Source/Operations/GaussianBlur.swift | 8 +++++--- .../Source/Operations/SingleComponentGaussianBlur.swift | 8 +++++--- 4 files changed, 19 insertions(+), 11 deletions(-) diff --git a/framework/Source/Mac/Camera.swift b/framework/Source/Mac/Camera.swift index f4f490c1..646ea1f7 100755 --- a/framework/Source/Mac/Camera.swift +++ b/framework/Source/Mac/Camera.swift @@ -184,8 +184,10 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } public func startCapture() { - self.numberOfFramesCaptured = 0 - self.totalFrameTimeDuringCapture = 0 + sharedImageProcessingContext.runOperationAsynchronously{ + self.numberOfFramesCaptured = 0 + self.totalFrameTimeDuringCapture = 0 + } if (!captureSession.isRunning) { captureSession.startRunning() diff --git a/framework/Source/Operations/BoxBlur.swift b/framework/Source/Operations/BoxBlur.swift index d5dc4478..c66c1357 100755 --- a/framework/Source/Operations/BoxBlur.swift +++ b/framework/Source/Operations/BoxBlur.swift @@ -8,9 +8,11 @@ public class BoxBlur: TwoStageOperation { public var blurRadiusInPixels:Float { didSet { let (sigma, downsamplingFactor) = sigmaAndDownsamplingForBlurRadius(blurRadiusInPixels, limit:8.0, override:overrideDownsamplingOptimization) - self.downsamplingFactor = downsamplingFactor - let pixelRadius = pixelRadiusForBlurSigma(Double(sigma)) - shader = crashOnShaderCompileFailure("BoxBlur"){try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedBoxBlurOfRadius(pixelRadius), fragmentShader:fragmentShaderForOptimizedBoxBlurOfRadius(pixelRadius))} + sharedImageProcessingContext.runOperationAsynchronously { + self.downsamplingFactor = downsamplingFactor + let pixelRadius = pixelRadiusForBlurSigma(Double(sigma)) + self.shader = crashOnShaderCompileFailure("BoxBlur"){try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedBoxBlurOfRadius(pixelRadius), fragmentShader:fragmentShaderForOptimizedBoxBlurOfRadius(pixelRadius))} + } } } diff --git a/framework/Source/Operations/GaussianBlur.swift b/framework/Source/Operations/GaussianBlur.swift index 8d4061f6..9c38d247 100755 --- a/framework/Source/Operations/GaussianBlur.swift +++ b/framework/Source/Operations/GaussianBlur.swift @@ -9,9 +9,11 @@ public class GaussianBlur: TwoStageOperation { public var blurRadiusInPixels:Float { didSet { let (sigma, downsamplingFactor) = sigmaAndDownsamplingForBlurRadius(blurRadiusInPixels, limit:8.0, override:overrideDownsamplingOptimization) - self.downsamplingFactor = downsamplingFactor - let pixelRadius = pixelRadiusForBlurSigma(Double(sigma)) - shader = crashOnShaderCompileFailure("GaussianBlur"){try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma:Double(sigma)), fragmentShader:fragmentShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma:Double(sigma)))} + sharedImageProcessingContext.runOperationAsynchronously { + self.downsamplingFactor = downsamplingFactor + let pixelRadius = pixelRadiusForBlurSigma(Double(sigma)) + self.shader = crashOnShaderCompileFailure("GaussianBlur"){try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma:Double(sigma)), fragmentShader:fragmentShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma:Double(sigma)))} + } } } diff --git a/framework/Source/Operations/SingleComponentGaussianBlur.swift b/framework/Source/Operations/SingleComponentGaussianBlur.swift index 3f1bc0af..919210ef 100644 --- a/framework/Source/Operations/SingleComponentGaussianBlur.swift +++ b/framework/Source/Operations/SingleComponentGaussianBlur.swift @@ -2,9 +2,11 @@ public class SingleComponentGaussianBlur: TwoStageOperation { public var blurRadiusInPixels:Float { didSet { let (sigma, downsamplingFactor) = sigmaAndDownsamplingForBlurRadius(blurRadiusInPixels, limit:8.0, override:overrideDownsamplingOptimization) - self.downsamplingFactor = downsamplingFactor - let pixelRadius = pixelRadiusForBlurSigma(Double(sigma)) - shader = crashOnShaderCompileFailure("GaussianBlur"){try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma:Double(sigma)), fragmentShader:fragmentShaderForOptimizedSingleComponentGaussianBlurOfRadius(pixelRadius, sigma:Double(sigma)))} + sharedImageProcessingContext.runOperationAsynchronously { + self.downsamplingFactor = downsamplingFactor + let pixelRadius = pixelRadiusForBlurSigma(Double(sigma)) + self.shader = crashOnShaderCompileFailure("GaussianBlur"){try sharedImageProcessingContext.programForVertexShader(vertexShaderForOptimizedGaussianBlurOfRadius(pixelRadius, sigma:Double(sigma)), fragmentShader:fragmentShaderForOptimizedSingleComponentGaussianBlurOfRadius(pixelRadius, sigma:Double(sigma)))} + } } } From a47862952b1587cb6b65ed332e29062939b44819 Mon Sep 17 00:00:00 2001 From: Brad Larson Date: Wed, 6 Jul 2016 10:40:42 -0500 Subject: [PATCH 05/14] Minor updates to centralize some deprecation warnings, replace M_PI with .pi. --- framework/GPUImage-Mac.xcodeproj/project.pbxproj | 4 ++-- framework/Source/Mac/Camera.swift | 4 ++-- framework/Source/Mac/MovieInput.swift | 2 +- framework/Source/Operations/GaussianBlur.swift | 4 ++-- framework/Source/Operations/MotionBlur.swift | 8 ++++---- framework/Source/SerialDispatch.swift | 16 ++++++++++++++++ 6 files changed, 27 insertions(+), 11 deletions(-) diff --git a/framework/GPUImage-Mac.xcodeproj/project.pbxproj b/framework/GPUImage-Mac.xcodeproj/project.pbxproj index 542e16c8..bc8ce2c9 100755 --- a/framework/GPUImage-Mac.xcodeproj/project.pbxproj +++ b/framework/GPUImage-Mac.xcodeproj/project.pbxproj @@ -228,7 +228,7 @@ BC4EE1681CB34B8900AD8A65 /* HistogramDisplay.vsh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.glsl; name = HistogramDisplay.vsh; path = Source/Operations/Shaders/HistogramDisplay.vsh; sourceTree = ""; }; BC4EE16B1CB34D7B00AD8A65 /* RawDataInput.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = RawDataInput.swift; path = Source/RawDataInput.swift; sourceTree = ""; }; BC4EE16E1CB3554200AD8A65 /* RawDataOutput.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = RawDataOutput.swift; path = Source/RawDataOutput.swift; sourceTree = ""; }; - BC4EE1731CB3711600AD8A65 /* GaussianBlur.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = GaussianBlur.swift; path = Source/Operations/GaussianBlur.swift; sourceTree = ""; }; + BC4EE1731CB3711600AD8A65 /* GaussianBlur.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; lineEnding = 0; name = GaussianBlur.swift; path = Source/Operations/GaussianBlur.swift; sourceTree = ""; xcLanguageSpecificationIdentifier = xcode.lang.swift; }; BC5B866E1CC07145006CDE75 /* PictureOutput.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = PictureOutput.swift; path = Source/Mac/PictureOutput.swift; sourceTree = ""; }; BC6E7CAB1C39A9D8006DF678 /* GPUImage.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = GPUImage.framework; sourceTree = BUILT_PRODUCTS_DIR; }; BC6E7CB01C39A9D8006DF678 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist; name = Info.plist; path = Source/Mac/Info.plist; sourceTree = ""; }; @@ -460,7 +460,7 @@ BCFF46BD1CB8ACDA00A0C521 /* TiltShift_GL.fsh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.glsl; name = TiltShift_GL.fsh; path = Source/Operations/Shaders/TiltShift_GL.fsh; sourceTree = ""; }; BCFF46BF1CB9556B00A0C521 /* WhiteBalance.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = WhiteBalance.swift; path = Source/Operations/WhiteBalance.swift; sourceTree = ""; }; BCFF46C11CB9560700A0C521 /* WhiteBalance_GL.fsh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.glsl; name = WhiteBalance_GL.fsh; path = Source/Operations/Shaders/WhiteBalance_GL.fsh; sourceTree = ""; }; - BCFF46C31CB9565F00A0C521 /* MotionBlur.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = MotionBlur.swift; path = Source/Operations/MotionBlur.swift; sourceTree = ""; }; + BCFF46C31CB9565F00A0C521 /* MotionBlur.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; lineEnding = 0; name = MotionBlur.swift; path = Source/Operations/MotionBlur.swift; sourceTree = ""; xcLanguageSpecificationIdentifier = xcode.lang.swift; }; BCFF46C51CB968DE00A0C521 /* ImageBuffer.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = ImageBuffer.swift; path = Source/Operations/ImageBuffer.swift; sourceTree = ""; }; BCFF46C71CB96AB100A0C521 /* LowPassFilter.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = LowPassFilter.swift; path = Source/Operations/LowPassFilter.swift; sourceTree = ""; }; BCFF46C91CB96BD700A0C521 /* HighPassFilter.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = HighPassFilter.swift; path = Source/Operations/HighPassFilter.swift; sourceTree = ""; }; diff --git a/framework/Source/Mac/Camera.swift b/framework/Source/Mac/Camera.swift index 646ea1f7..ce1f268a 100755 --- a/framework/Source/Mac/Camera.swift +++ b/framework/Source/Mac/Camera.swift @@ -35,8 +35,8 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer let captureAsYUV:Bool let yuvConversionShader:ShaderProgram? let frameRenderingSemaphore = DispatchSemaphore(value:1) - let cameraProcessingQueue = DispatchQueue.global(attributes:DispatchQueue.GlobalAttributes.priorityDefault) - let audioProcessingQueue = DispatchQueue.global(attributes:DispatchQueue.GlobalAttributes.priorityBackground) + let cameraProcessingQueue = DispatchQueue.global(attributes:standardProcessingQueuePriority) + let audioProcessingQueue = DispatchQueue.global(attributes:lowProcessingQueuePriority) var numberOfFramesCaptured = 0 var totalFrameTimeDuringCapture:Double = 0.0 diff --git a/framework/Source/Mac/MovieInput.swift b/framework/Source/Mac/MovieInput.swift index 77fe1ed7..7ff8b721 100644 --- a/framework/Source/Mac/MovieInput.swift +++ b/framework/Source/Mac/MovieInput.swift @@ -44,7 +44,7 @@ public class MovieInput: ImageSource { public func start() { asset.loadValuesAsynchronously(forKeys: ["tracks"], completionHandler: { - DispatchQueue.global(attributes: DispatchQueue.GlobalAttributes.priorityDefault).async { + DispatchQueue.global(attributes:standardProcessingQueuePriority).async { guard (self.asset.statusOfValue(forKey:"tracks", error:nil) == .loaded) else { return } guard self.assetReader.startReading() else { diff --git a/framework/Source/Operations/GaussianBlur.swift b/framework/Source/Operations/GaussianBlur.swift index 9c38d247..b1ed4fec 100755 --- a/framework/Source/Operations/GaussianBlur.swift +++ b/framework/Source/Operations/GaussianBlur.swift @@ -46,7 +46,7 @@ func pixelRadiusForBlurSigma(_ sigma:Double) -> UInt { var calculatedSampleRadius:UInt = 0 if (sigma >= 1.0) { // Avoid a divide-by-zero error here // Calculate the number of pixels to sample from by setting a bottom limit for the contribution of the outermost pixel - calculatedSampleRadius = UInt(floor(sqrt(-2.0 * pow(sigma, 2.0) * log(minimumWeightToFindEdgeOfSamplingArea * sqrt(2.0 * M_PI * pow(sigma, 2.0))) ))) + calculatedSampleRadius = UInt(floor(sqrt(-2.0 * pow(sigma, 2.0) * log(minimumWeightToFindEdgeOfSamplingArea * sqrt(2.0 * .pi * pow(sigma, 2.0))) ))) calculatedSampleRadius += calculatedSampleRadius % 2 // There's nothing to gain from handling odd radius sizes, due to the optimizations I use } @@ -60,7 +60,7 @@ func standardGaussianWeightsForRadius(_ blurRadius:UInt, sigma:Double) -> [Doubl var gaussianWeights = [Double]() var sumOfWeights = 0.0 for gaussianWeightIndex in 0...blurRadius { - let weight = (1.0 / sqrt(2.0 * M_PI * pow(sigma, 2.0))) * exp(-pow(Double(gaussianWeightIndex), 2.0) / (2.0 * pow(sigma, 2.0))) + let weight = (1.0 / sqrt(2.0 * .pi * pow(sigma, 2.0))) * exp(-pow(Double(gaussianWeightIndex), 2.0) / (2.0 * pow(sigma, 2.0))) gaussianWeights.append(weight) if (gaussianWeightIndex == 0) { sumOfWeights += weight diff --git a/framework/Source/Operations/MotionBlur.swift b/framework/Source/Operations/MotionBlur.swift index 87917a5b..25f93bf6 100755 --- a/framework/Source/Operations/MotionBlur.swift +++ b/framework/Source/Operations/MotionBlur.swift @@ -19,12 +19,12 @@ public class MotionBlur: BasicOperation { let aspectRatio = inputFramebuffer.aspectRatioForRotation(outputRotation) let directionalTexelStep:Position if outputRotation.flipsDimensions() { - let xOffset = blurSize * Float(sin(Double(blurAngle) * M_PI / 180.0)) * aspectRatio * texelSize.width - let yOffset = blurSize * Float(cos(Double(blurAngle) * M_PI / 180.0)) * texelSize.width + let xOffset = blurSize * Float(sin(Double(blurAngle) * .pi / 180.0)) * aspectRatio * texelSize.width + let yOffset = blurSize * Float(cos(Double(blurAngle) * .pi / 180.0)) * texelSize.width directionalTexelStep = Position(xOffset, yOffset) } else { - let xOffset = blurSize * Float(cos(Double(blurAngle) * M_PI / 180.0)) * aspectRatio * texelSize.width - let yOffset = blurSize * Float(sin(Double(blurAngle) * M_PI / 180.0)) * texelSize.width + let xOffset = blurSize * Float(cos(Double(blurAngle) * .pi / 180.0)) * aspectRatio * texelSize.width + let yOffset = blurSize * Float(sin(Double(blurAngle) * .pi / 180.0)) * texelSize.width directionalTexelStep = Position(xOffset, yOffset) } diff --git a/framework/Source/SerialDispatch.swift b/framework/Source/SerialDispatch.swift index 6c293363..bf45d2ae 100755 --- a/framework/Source/SerialDispatch.swift +++ b/framework/Source/SerialDispatch.swift @@ -18,6 +18,22 @@ extension SerialDispatch { #else +public let standardProcessingQueuePriority:DispatchQueue.GlobalAttributes = { + if #available(iOS 10, OSX 10.10, *) { + return DispatchQueue.GlobalAttributes.qosDefault + } else { + return DispatchQueue.GlobalAttributes.priorityDefault + } +}() + +public let lowProcessingQueuePriority:DispatchQueue.GlobalAttributes = { + if #available(iOS 10, OSX 10.10, *) { + return DispatchQueue.GlobalAttributes.qosBackground + } else { + return DispatchQueue.GlobalAttributes.priorityLow + } +}() + func runAsynchronouslyOnMainQueue(_ mainThreadOperation:() -> ()) { if (Thread.isMainThread()) { mainThreadOperation() From 9c4e29619cb9ded84fa82226b69dc52d02a0b1a8 Mon Sep 17 00:00:00 2001 From: Brad Larson Date: Wed, 6 Jul 2016 11:44:11 -0500 Subject: [PATCH 06/14] Updated based on Xcode 8 beta 2 API changes. --- framework/Source/Mac/PictureOutput.swift | 2 +- framework/Source/SerialDispatch.swift | 4 ++-- framework/Source/ShaderProgram.swift | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/framework/Source/Mac/PictureOutput.swift b/framework/Source/Mac/PictureOutput.swift index 3dda2e3a..59d5d23a 100644 --- a/framework/Source/Mac/PictureOutput.swift +++ b/framework/Source/Mac/PictureOutput.swift @@ -28,7 +28,7 @@ public class PictureOutput: ImageConsumer { self.url = url // Create an intentional short-term retain cycle to prevent deallocation before next frame is captured encodedImageAvailableCallback = {imageData in do { - try imageData.write(to: self.url, options:.dataWritingAtomic) + try imageData.write(to: self.url, options:.atomic) } catch { // TODO: Handle this better print("WARNING: Couldn't save image with error:\(error)") diff --git a/framework/Source/SerialDispatch.swift b/framework/Source/SerialDispatch.swift index bf45d2ae..324d8a28 100755 --- a/framework/Source/SerialDispatch.swift +++ b/framework/Source/SerialDispatch.swift @@ -35,7 +35,7 @@ public let lowProcessingQueuePriority:DispatchQueue.GlobalAttributes = { }() func runAsynchronouslyOnMainQueue(_ mainThreadOperation:() -> ()) { - if (Thread.isMainThread()) { + if (Thread.isMainThread) { mainThreadOperation() } else { DispatchQueue.main.async(execute: mainThreadOperation) @@ -43,7 +43,7 @@ func runAsynchronouslyOnMainQueue(_ mainThreadOperation:() -> ()) { } func runOnMainQueue(_ mainThreadOperation:() -> ()) { - if (Thread.isMainThread()) { + if (Thread.isMainThread) { mainThreadOperation() } else { DispatchQueue.main.sync(execute: mainThreadOperation) diff --git a/framework/Source/ShaderProgram.swift b/framework/Source/ShaderProgram.swift index 67817642..ae8d693f 100755 --- a/framework/Source/ShaderProgram.swift +++ b/framework/Source/ShaderProgram.swift @@ -257,7 +257,7 @@ public func crashOnShaderCompileFailure(_ shaderName:String, _ operation:() t public func shaderFromFile(_ file:URL) throws -> String { // Note: this is a hack until Foundation's String initializers are fully functional // let fragmentShaderString = String(contentsOfURL:fragmentShaderFile, encoding:NSASCIIStringEncoding) - guard (FileManager.default().fileExists(atPath: file.path!)) else { throw ShaderCompileError(compileLog:"Shader file \(file) missing")} + guard (FileManager.default.fileExists(atPath: file.path!)) else { throw ShaderCompileError(compileLog:"Shader file \(file) missing")} let fragmentShaderString = try NSString(contentsOfFile:file.path!, encoding:String.Encoding.ascii.rawValue) return String(fragmentShaderString) From f478f485b1c5f8638c4a89bf9dea7d3855babc32 Mon Sep 17 00:00:00 2001 From: Brad Larson Date: Thu, 7 Jul 2016 15:50:13 -0500 Subject: [PATCH 07/14] Fixed a bug with Camera.stopRunning(), updated for Xcode 8 beta 2, fixed a series of race conditions. --- .../FilterShowcaseWindowController.swift | 3 +- .../SimpleMovieFilter/AppDelegate.swift | 2 +- .../SimpleImageFilter/ViewController.swift | 4 +-- .../SimpleMovieFilter/ViewController.swift | 2 +- .../SimpleVideoFilter/ViewController.swift | 2 +- .../SimpleVideoRecorder/ViewController.swift | 11 ++++--- .../GPUImage-Mac.xcodeproj/project.pbxproj | 2 ++ framework/Source/BasicOperation.swift | 10 +++--- framework/Source/Mac/Camera.swift | 2 +- framework/Source/Mac/OpenGLContext.swift | 2 +- framework/Source/Operations/Halftone.swift | 8 +++-- framework/Source/Pipeline.swift | 32 +++++++++++------- framework/Source/SerialDispatch.swift | 2 +- framework/Source/iOS/Camera.swift | 6 ++-- framework/Source/iOS/MovieInput.swift | 4 +-- framework/Source/iOS/MovieOutput.swift | 10 +++--- framework/Source/iOS/PictureOutput.swift | 2 +- framework/Tests/Pipeline_Tests.swift | 33 ++++++++++++------- 18 files changed, 83 insertions(+), 54 deletions(-) diff --git a/examples/Mac/FilterShowcase/FilterShowcase/FilterShowcaseWindowController.swift b/examples/Mac/FilterShowcase/FilterShowcase/FilterShowcaseWindowController.swift index 817ccff9..3b8fd055 100755 --- a/examples/Mac/FilterShowcase/FilterShowcase/FilterShowcaseWindowController.swift +++ b/examples/Mac/FilterShowcase/FilterShowcase/FilterShowcaseWindowController.swift @@ -32,6 +32,7 @@ class FilterShowcaseWindowController: NSWindowController { do { videoCamera = try Camera(sessionPreset:AVCaptureSessionPreset1280x720) videoCamera.runBenchmark = true + videoCamera.startCapture() } catch { fatalError("Couldn't initialize camera with error: \(error)") } @@ -43,7 +44,7 @@ class FilterShowcaseWindowController: NSWindowController { currentlySelectedRow = row // Clean up everything from the previous filter selection first - videoCamera.stopCapture() +// videoCamera.stopCapture() videoCamera.removeAllTargets() currentFilterOperation?.filter.removeAllTargets() currentFilterOperation?.secondInput?.removeAllTargets() diff --git a/examples/Mac/SimpleMovieFilter/SimpleMovieFilter/AppDelegate.swift b/examples/Mac/SimpleMovieFilter/SimpleMovieFilter/AppDelegate.swift index f49b40b4..188cc4f1 100644 --- a/examples/Mac/SimpleMovieFilter/SimpleMovieFilter/AppDelegate.swift +++ b/examples/Mac/SimpleMovieFilter/SimpleMovieFilter/AppDelegate.swift @@ -17,7 +17,7 @@ class AppDelegate: NSObject, NSApplicationDelegate { } func applicationDidFinishLaunching(_ aNotification: Notification) { - let bundleURL = Bundle.main().resourceURL! + let bundleURL = Bundle.main.resourceURL! let movieURL = URL(string:"sample_iPod.m4v", relativeTo:bundleURL)! do { diff --git a/examples/iOS/SimpleImageFilter/SimpleImageFilter/ViewController.swift b/examples/iOS/SimpleImageFilter/SimpleImageFilter/ViewController.swift index c209e695..699cf25f 100644 --- a/examples/iOS/SimpleImageFilter/SimpleImageFilter/ViewController.swift +++ b/examples/iOS/SimpleImageFilter/SimpleImageFilter/ViewController.swift @@ -18,9 +18,9 @@ class ViewController: UIViewController { let pngImage = UIImagePNGRepresentation(filteredImage)! do { - let documentsDir = try FileManager.default().urlForDirectory(.documentDirectory, in:.userDomainMask, appropriateFor:nil, create:true) + let documentsDir = try FileManager.default.urlForDirectory(.documentDirectory, in:.userDomainMask, appropriateFor:nil, create:true) let fileURL = URL(string:"test.png", relativeTo:documentsDir)! - try pngImage.write(to:fileURL, options:.dataWritingAtomic) + try pngImage.write(to:fileURL, options:.atomic) } catch { print("Couldn't write to file with error: \(error)") } diff --git a/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/ViewController.swift b/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/ViewController.swift index d985aeb6..173b6b8a 100644 --- a/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/ViewController.swift +++ b/examples/iOS/SimpleMovieFilter/SimpleMovieFilter/ViewController.swift @@ -11,7 +11,7 @@ class ViewController: UIViewController { override func viewDidLayoutSubviews() { super.viewDidLayoutSubviews() - let bundleURL = Bundle.main().resourceURL! + let bundleURL = Bundle.main.resourceURL! let movieURL = URL(string:"sample_iPod.m4v", relativeTo:bundleURL)! do { diff --git a/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/ViewController.swift b/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/ViewController.swift index 32ea9cee..b17a3c99 100755 --- a/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/ViewController.swift +++ b/examples/iOS/SimpleVideoFilter/SimpleVideoFilter/ViewController.swift @@ -46,7 +46,7 @@ class ViewController: UIViewController { @IBAction func capture(_ sender: AnyObject) { print("Capture") do { - let documentsDir = try FileManager.default().urlForDirectory(.documentDirectory, in:.userDomainMask, appropriateFor:nil, create:true) + let documentsDir = try FileManager.default.urlForDirectory(.documentDirectory, in:.userDomainMask, appropriateFor:nil, create:true) saturationFilter.saveNextFrameToURL(URL(string:"TestImage.png", relativeTo:documentsDir)!, format:.png) } catch { print("Couldn't save image: \(error)") diff --git a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift index 35a542e9..288238db 100644 --- a/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift +++ b/examples/iOS/SimpleVideoRecorder/SimpleVideoRecorder/ViewController.swift @@ -31,10 +31,10 @@ class ViewController: UIViewController { if (!isRecording) { do { self.isRecording = true - let documentsDir = try FileManager.default().urlForDirectory(.documentDirectory, in:.userDomainMask, appropriateFor:nil, create:true) + let documentsDir = try FileManager.default.urlForDirectory(.documentDirectory, in:.userDomainMask, appropriateFor:nil, create:true) let fileURL = URL(string:"test.mp4", relativeTo:documentsDir)! do { - try FileManager.default().removeItem(at:fileURL) + try FileManager.default.removeItem(at:fileURL) } catch { } @@ -42,7 +42,10 @@ class ViewController: UIViewController { camera.audioEncodingTarget = movieOutput filter --> movieOutput! movieOutput!.startRecording() - (sender as! UIButton).titleLabel?.text = "Stop" + DispatchQueue.main.async { + // Label not updating on the main thread, for some reason, so dispatching slightly after this + (sender as! UIButton).titleLabel!.text = "Stop" + } } catch { fatalError("Couldn't initialize movie, error: \(error)") } @@ -50,7 +53,7 @@ class ViewController: UIViewController { movieOutput?.finishRecording{ self.isRecording = false DispatchQueue.main.async { - (sender as! UIButton).titleLabel?.text = "Record" + (sender as! UIButton).titleLabel!.text = "Record" } self.camera.audioEncodingTarget = nil self.movieOutput = nil diff --git a/framework/GPUImage-Mac.xcodeproj/project.pbxproj b/framework/GPUImage-Mac.xcodeproj/project.pbxproj index bc8ce2c9..8e9dfcc8 100755 --- a/framework/GPUImage-Mac.xcodeproj/project.pbxproj +++ b/framework/GPUImage-Mac.xcodeproj/project.pbxproj @@ -146,6 +146,7 @@ BCBEC0C61CCD2E6200B70ED7 /* Histogram.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCBEC0C51CCD2E6200B70ED7 /* Histogram.swift */; }; BCBEC0E01CCD492D00B70ED7 /* HistogramEqualization.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCBEC0DF1CCD492D00B70ED7 /* HistogramEqualization.swift */; }; BCBEC0FA1CCD993900B70ED7 /* MovieInput.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCBEC0F91CCD993900B70ED7 /* MovieInput.swift */; }; + BCCAED291D2ED9C700DE28D1 /* Timestamp.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCB825BA1CC9C96B00339790 /* Timestamp.swift */; }; BCD1B14A1C66AE00001F2BDC /* SerialDispatch.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCD1B1491C66AE00001F2BDC /* SerialDispatch.swift */; }; BCD1B14C1C66B225001F2BDC /* Pipeline.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCD1B14B1C66B225001F2BDC /* Pipeline.swift */; }; BCE111A51CBC94FD005293A4 /* AverageLuminanceExtractor.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCE111A41CBC94FD005293A4 /* AverageLuminanceExtractor.swift */; }; @@ -1192,6 +1193,7 @@ BC09239E1C92658200A2ADFA /* ShaderProgram_Tests.swift in Sources */, BC7FD1941CB1D3E000037949 /* Size.swift in Sources */, BC0923A51C92669900A2ADFA /* FramebufferCache.swift in Sources */, + BCCAED291D2ED9C700DE28D1 /* Timestamp.swift in Sources */, BC6E7CC81C39AD9E006DF678 /* ShaderProgram.swift in Sources */, BC7FD0FF1CB071DC00037949 /* Color.swift in Sources */, BC4C85EF1C9F043400FD95D8 /* ConvertedShaders_GL.swift in Sources */, diff --git a/framework/Source/BasicOperation.swift b/framework/Source/BasicOperation.swift index 5b0813ed..7f02b9cb 100755 --- a/framework/Source/BasicOperation.swift +++ b/framework/Source/BasicOperation.swift @@ -183,9 +183,11 @@ public class BasicOperation: ImageProcessingOperation { } public func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) { - guard let renderFramebuffer = renderFramebuffer where (!renderFramebuffer.timingStyle.isTransient()) else { return } - - renderFramebuffer.lock() - target.newFramebufferAvailable(renderFramebuffer, fromSourceIndex:atIndex) + sharedImageProcessingContext.runOperationAsynchronously{ + guard let renderFramebuffer = self.renderFramebuffer where (!renderFramebuffer.timingStyle.isTransient()) else { return } + + renderFramebuffer.lock() + target.newFramebufferAvailable(renderFramebuffer, fromSourceIndex:atIndex) + } } } diff --git a/framework/Source/Mac/Camera.swift b/framework/Source/Mac/Camera.swift index ce1f268a..3a0876dc 100755 --- a/framework/Source/Mac/Camera.swift +++ b/framework/Source/Mac/Camera.swift @@ -195,7 +195,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } public func stopCapture() { - if (!captureSession.isRunning) { + if (captureSession.isRunning) { captureSession.stopRunning() } } diff --git a/framework/Source/Mac/OpenGLContext.swift b/framework/Source/Mac/OpenGLContext.swift index 58df9db2..49416c93 100755 --- a/framework/Source/Mac/OpenGLContext.swift +++ b/framework/Source/Mac/OpenGLContext.swift @@ -1,4 +1,4 @@ -import OpenGL.GL3 +import OpenGL.GL import Cocoa // TODO: Figure out way to allow for multiple contexts for different GPUs diff --git a/framework/Source/Operations/Halftone.swift b/framework/Source/Operations/Halftone.swift index 8cff970b..9bf1f24f 100644 --- a/framework/Source/Operations/Halftone.swift +++ b/framework/Source/Operations/Halftone.swift @@ -1,8 +1,10 @@ public class Halftone: BasicOperation { public var fractionalWidthOfAPixel:Float = 0.01 { didSet { - let imageWidth = 1.0 / Float(self.renderFramebuffer?.size.width ?? 2048) - uniformSettings["fractionalWidthOfPixel"] = max(fractionalWidthOfAPixel, imageWidth) + sharedImageProcessingContext.runOperationAsynchronously{ + let imageWidth = 1.0 / Float(self.renderFramebuffer?.size.width ?? 2048) + self.uniformSettings["fractionalWidthOfPixel"] = max(self.fractionalWidthOfAPixel, imageWidth) + } } } @@ -11,4 +13,4 @@ public class Halftone: BasicOperation { ({fractionalWidthOfAPixel = 0.01})() } -} \ No newline at end of file +} diff --git a/framework/Source/Pipeline.swift b/framework/Source/Pipeline.swift index 9b7e71f4..c6ec55e2 100755 --- a/framework/Source/Pipeline.swift +++ b/framework/Source/Pipeline.swift @@ -1,5 +1,6 @@ // MARK: - // MARK: Basic types +import Foundation public protocol ImageSource { var targets:TargetContainer { get } @@ -89,37 +90,44 @@ class WeakImageConsumer { public class TargetContainer:Sequence { var targets = [WeakImageConsumer]() var count:Int { get {return targets.count}} - + let dispatchQueue = DispatchQueue(label:"com.sunsetlakesoftware.GPUImage.targetContainerQueue", attributes: []) + public init() { } public func append(_ target:ImageConsumer, indexAtTarget:UInt) { // TODO: Don't allow the addition of a target more than once - targets.append(WeakImageConsumer(value:target, indexAtTarget:indexAtTarget)) + dispatchQueue.async{ + self.targets.append(WeakImageConsumer(value:target, indexAtTarget:indexAtTarget)) + } } public func makeIterator() -> AnyIterator<(ImageConsumer, UInt)> { var index = 0 return AnyIterator { () -> (ImageConsumer, UInt)? in - if (index >= self.targets.count) { - return nil - } - - while (self.targets[index].value == nil) { - self.targets.remove(at:index) + return self.dispatchQueue.sync{ if (index >= self.targets.count) { return nil } + + while (self.targets[index].value == nil) { + self.targets.remove(at:index) + if (index >= self.targets.count) { + return nil + } + } + + index += 1 + return (self.targets[index - 1].value!, self.targets[index - 1].indexAtTarget) } - - index += 1 - return (self.targets[index - 1].value!, self.targets[index - 1].indexAtTarget) } } public func removeAll() { - targets.removeAll() + dispatchQueue.async{ + self.targets.removeAll() + } } } diff --git a/framework/Source/SerialDispatch.swift b/framework/Source/SerialDispatch.swift index 324d8a28..4275e5bc 100755 --- a/framework/Source/SerialDispatch.swift +++ b/framework/Source/SerialDispatch.swift @@ -50,7 +50,7 @@ func runOnMainQueue(_ mainThreadOperation:() -> ()) { } } -@warn_unused_result func runOnMainQueue(_ mainThreadOperation:() -> T) -> T { +func runOnMainQueue(_ mainThreadOperation:() -> T) -> T { var returnedValue: T! runOnMainQueue { returnedValue = mainThreadOperation() diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index 5c80c32a..8e377a22 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -181,7 +181,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer let bufferHeight = CVPixelBufferGetHeight(cameraFrame) let currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) - CVPixelBufferLockBaseAddress(cameraFrame, 0) + CVPixelBufferLockBaseAddress(cameraFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) sharedImageProcessingContext.runOperationAsynchronously{ let cameraFramebuffer:Framebuffer @@ -236,7 +236,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer glBindTexture(GLenum(GL_TEXTURE_2D), cameraFramebuffer.texture) glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_RGBA, GLsizei(bufferWidth), GLsizei(bufferHeight), 0, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddress(cameraFrame)) } - CVPixelBufferUnlockBaseAddress(cameraFrame, 0) + CVPixelBufferUnlockBaseAddress(cameraFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) cameraFramebuffer.timingStyle = .videoFrame(timestamp:Timestamp(currentTime)) self.updateTargetsWithFramebuffer(cameraFramebuffer) @@ -275,7 +275,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } public func stopCapture() { - if (!captureSession.isRunning) { + if (captureSession.isRunning) { captureSession.stopRunning() } } diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 64452173..19b2fad9 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -141,7 +141,7 @@ public class MovieInput: ImageSource { func processMovieFrame(_ movieFrame:CVPixelBuffer, withSampleTime:CMTime) { let bufferHeight = CVPixelBufferGetHeight(movieFrame) let bufferWidth = CVPixelBufferGetWidth(movieFrame) - CVPixelBufferLockBaseAddress(movieFrame, 0) + CVPixelBufferLockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) let conversionMatrix = colorConversionMatrix601FullRangeDefault // TODO: Get this color query working @@ -172,7 +172,7 @@ public class MovieInput: ImageSource { let movieFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:false) convertYUVToRGB(shader:self.yuvConversionShader, luminanceFramebuffer:luminanceFramebuffer, chrominanceFramebuffer:chrominanceFramebuffer, resultFramebuffer:movieFramebuffer, colorConversionMatrix:conversionMatrix) - CVPixelBufferUnlockBaseAddress(movieFrame, 0) + CVPixelBufferUnlockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) movieFramebuffer.timingStyle = .videoFrame(timestamp:Timestamp(withSampleTime)) self.updateTargetsWithFramebuffer(movieFramebuffer) diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index 0a0df377..d5651ed6 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -74,9 +74,9 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { * Tagging the resulting video file as BT.601, is the best option right now. * Creating a proper BT.709 video is not possible at the moment. */ - CVBufferSetAttachment(self.pixelBuffer!, kCVImageBufferColorPrimariesKey, kCVImageBufferColorPrimaries_ITU_R_709_2, kCVAttachmentMode_ShouldPropagate) - CVBufferSetAttachment(self.pixelBuffer!, kCVImageBufferYCbCrMatrixKey, kCVImageBufferYCbCrMatrix_ITU_R_601_4, kCVAttachmentMode_ShouldPropagate) - CVBufferSetAttachment(self.pixelBuffer!, kCVImageBufferTransferFunctionKey, kCVImageBufferTransferFunction_ITU_R_709_2, kCVAttachmentMode_ShouldPropagate) + CVBufferSetAttachment(self.pixelBuffer!, kCVImageBufferColorPrimariesKey, kCVImageBufferColorPrimaries_ITU_R_709_2, .shouldPropagate) + CVBufferSetAttachment(self.pixelBuffer!, kCVImageBufferYCbCrMatrixKey, kCVImageBufferYCbCrMatrix_ITU_R_601_4, .shouldPropagate) + CVBufferSetAttachment(self.pixelBuffer!, kCVImageBufferTransferFunctionKey, kCVImageBufferTransferFunction_ITU_R_709_2, .shouldPropagate) let bufferSize = GLSize(self.size) var cachedTextureRef:CVOpenGLESTexture? = nil @@ -152,7 +152,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { debugPrint("Problem appending pixel buffer at time: \(frameTime)") } - CVPixelBufferUnlockBaseAddress(pixelBuffer!, 0) + CVPixelBufferUnlockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) if !sharedImageProcessingContext.supportsTextureCaches() { pixelBuffer = nil } @@ -166,7 +166,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { renderFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(Color.Black) - CVPixelBufferLockBaseAddress(pixelBuffer, 0) + CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) renderQuadWithShader(colorSwizzlingShader, uniformSettings:ShaderUniformSettings(), vertices:standardImageVertices, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.noRotation)]) if sharedImageProcessingContext.supportsTextureCaches() { diff --git a/framework/Source/iOS/PictureOutput.swift b/framework/Source/iOS/PictureOutput.swift index bd74eb54..11ee74a3 100644 --- a/framework/Source/iOS/PictureOutput.swift +++ b/framework/Source/iOS/PictureOutput.swift @@ -28,7 +28,7 @@ public class PictureOutput: ImageConsumer { self.url = url // Create an intentional short-term retain cycle to prevent deallocation before next frame is captured encodedImageAvailableCallback = {imageData in do { - try imageData.write(to: self.url, options:.dataWritingAtomic) + try imageData.write(to: self.url, options:.atomic) } catch { // TODO: Handle this better print("WARNING: Couldn't save image with error:\(error)") diff --git a/framework/Tests/Pipeline_Tests.swift b/framework/Tests/Pipeline_Tests.swift index 86ad111a..23d040bd 100755 --- a/framework/Tests/Pipeline_Tests.swift +++ b/framework/Tests/Pipeline_Tests.swift @@ -11,7 +11,10 @@ class FakeOperation: ImageProcessingOperation { self.name = name } - func newFramebufferAvailable(framebuffer:Framebuffer, fromProducer:ImageSource) { + func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { + } + + func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) { } } @@ -19,13 +22,16 @@ class FakeRenderView: ImageConsumer { let sources = SourceContainer() let maximumInputs:UInt = 1 - func newFramebufferAvailable(framebuffer:Framebuffer, fromProducer:ImageSource) { + func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { } } class FakeCamera: ImageSource { let targets = TargetContainer() + func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) { + } + func newCameraFrame() { // Framebuffer has size, orientation encoded in it @@ -46,15 +52,15 @@ class Pipeline_Tests: XCTestCase { // All operations have been added and should have a strong reference var operation1:FakeOperation? = FakeOperation(name:"Operation 1") - targetContainer.append(operation1!) + targetContainer.append(operation1!, indexAtTarget:0) var operation2:FakeOperation? = FakeOperation(name:"Operation 2") - targetContainer.append(operation2!) - let operation3:FakeOperation? = FakeOperation(name:"Operation 3") - targetContainer.append(operation3!) + targetContainer.append(operation2!, indexAtTarget:0) + var operation3:FakeOperation? = FakeOperation(name:"Operation 3") + targetContainer.append(operation3!, indexAtTarget:0) var operation4:FakeOperation? = FakeOperation(name:"Operation 4") - targetContainer.append(operation4!) + targetContainer.append(operation4!, indexAtTarget:0) - for (index, target) in targetContainer.enumerate() { + for (index, (target, _)) in targetContainer.enumerated() { let operation = target as! FakeOperation switch index { case 0: XCTAssert(operation.name == "Operation 1") @@ -69,7 +75,7 @@ class Pipeline_Tests: XCTestCase { operation2 = nil operation4 = nil - for (index, target) in targetContainer.enumerate() { + for (index, (target, _)) in targetContainer.enumerated() { let operation = target as! FakeOperation switch index { case 0: XCTAssert(operation.name == "Operation 1") @@ -80,13 +86,18 @@ class Pipeline_Tests: XCTestCase { operation1 = nil - for (index, target) in targetContainer.enumerate() { + for (index, (target, _)) in targetContainer.enumerated() { let operation = target as! FakeOperation switch index { case 0: XCTAssert(operation.name == "Operation 3") - default: XCTFail("Should not have hit an index this high") + t default: XCTFail("Should not have hit an index this high") } } + + operation3 = nil + for (_, (_, _)) in targetContainer.enumerated() { + XCTFail("Should not be any targets left in the container") + } } func testSourceContainer() { From f9fc5181e1a4f2aaed9cfe94d1920d5e61b5d167 Mon Sep 17 00:00:00 2001 From: Brad Larson Date: Mon, 11 Jul 2016 17:19:11 -0500 Subject: [PATCH 08/14] Some API renaming to bring things in line with Swift 3. --- .../FilterShowcase/FilterOperations.swift | 2 +- .../GPUImage-Mac.xcodeproj/project.pbxproj | 2 +- framework/Source/BasicOperation.swift | 4 +-- framework/Source/CameraConversion.swift | 2 +- framework/Source/Color.swift | 30 +++++++++---------- framework/Source/Framebuffer.swift | 6 ++-- framework/Source/FramebufferCache.swift | 2 +- framework/Source/ImageGenerator.swift | 2 +- framework/Source/Mac/Camera.swift | 2 +- framework/Source/Mac/MovieInput.swift | 14 ++++----- framework/Source/Mac/MovieOutput.swift | 2 +- framework/Source/Mac/PictureInput.swift | 2 +- framework/Source/Mac/PictureOutput.swift | 2 +- framework/Source/Mac/RenderView.swift | 2 +- framework/Source/Matrix.swift | 6 ++-- framework/Source/OpenGLRendering.swift | 2 +- framework/Source/OperationGroup.swift | 4 +-- .../Source/Operations/BulgeDistortion.swift | 6 ++-- .../Source/Operations/ChromaKeyBlend.swift | 6 ++-- .../Source/Operations/ChromaKeying.swift | 6 ++-- .../Source/Operations/CircleGenerator.swift | 2 +- .../Source/Operations/ColorMatrixFilter.swift | 6 ++-- .../Source/Operations/Convolution3x3.swift | 6 ++-- framework/Source/Operations/Crop.swift | 2 +- .../Operations/CrosshairGenerator.swift | 6 ++-- framework/Source/Operations/FalseColor.swift | 6 ++-- .../Operations/GlassSphereRefraction.swift | 6 ++-- .../Operations/HighlightAndShadowTint.swift | 10 +++---- framework/Source/Operations/Histogram.swift | 2 +- framework/Source/Operations/ImageBuffer.swift | 2 +- .../Source/Operations/LanczosResampling.swift | 4 +-- .../Source/Operations/LineGenerator.swift | 6 ++-- framework/Source/Operations/MotionBlur.swift | 2 +- .../Source/Operations/MotionDetector.swift | 4 +-- .../Source/Operations/PinchDistortion.swift | 6 ++-- .../Source/Operations/PolarPixellate.swift | 6 ++-- framework/Source/Operations/Sharpen.swift | 2 +- .../Source/Operations/SphereRefraction.swift | 6 ++-- .../Source/Operations/StretchDistortion.swift | 6 ++-- .../Source/Operations/SwirlDistortion.swift | 6 ++-- .../Operations/TransformOperation.swift | 4 +-- framework/Source/Operations/Vignette.swift | 10 +++---- framework/Source/Operations/ZoomBlur.swift | 6 ++-- framework/Source/Pipeline.swift | 10 +++---- framework/Source/Position.swift | 6 ++-- framework/Source/RawDataInput.swift | 2 +- framework/Source/RawDataOutput.swift | 2 +- framework/Source/SerialDispatch.swift | 4 +-- framework/Source/ShaderProgram.swift | 18 +++++------ framework/Source/ShaderUniformSettings.swift | 4 +-- framework/Source/TextureInput.swift | 2 +- .../Source/TextureSamplingOperation.swift | 2 +- framework/Source/Timestamp.swift | 10 +++---- framework/Source/TwoStageOperation.swift | 4 +-- framework/Source/iOS/Camera.swift | 2 +- framework/Source/iOS/MovieInput.swift | 14 ++++----- framework/Source/iOS/MovieOutput.swift | 2 +- framework/Source/iOS/PictureInput.swift | 2 +- framework/Source/iOS/PictureOutput.swift | 2 +- framework/Source/iOS/RenderView.swift | 2 +- framework/Tests/Pipeline_Tests.swift | 4 +-- 61 files changed, 155 insertions(+), 157 deletions(-) diff --git a/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift b/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift index dc60509f..234cdfd7 100755 --- a/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift +++ b/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift @@ -422,7 +422,7 @@ let filterOperations: Array = [ let circleGenerator = CircleGenerator(size:Size(width:1280, height:720)) #endif castFilter.mask = circleGenerator - circleGenerator.renderCircleOfRadius(0.25, center:Position.Center, circleColor:Color.White, backgroundColor:Color.Transparent) + circleGenerator.renderCircleOfRadius(0.25, center:Position.center, circleColor:Color.white, backgroundColor:Color.transparent) camera --> castFilter --> outputView return nil }) diff --git a/framework/GPUImage-Mac.xcodeproj/project.pbxproj b/framework/GPUImage-Mac.xcodeproj/project.pbxproj index 8e9dfcc8..b30a723c 100755 --- a/framework/GPUImage-Mac.xcodeproj/project.pbxproj +++ b/framework/GPUImage-Mac.xcodeproj/project.pbxproj @@ -461,7 +461,7 @@ BCFF46BD1CB8ACDA00A0C521 /* TiltShift_GL.fsh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.glsl; name = TiltShift_GL.fsh; path = Source/Operations/Shaders/TiltShift_GL.fsh; sourceTree = ""; }; BCFF46BF1CB9556B00A0C521 /* WhiteBalance.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = WhiteBalance.swift; path = Source/Operations/WhiteBalance.swift; sourceTree = ""; }; BCFF46C11CB9560700A0C521 /* WhiteBalance_GL.fsh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.glsl; name = WhiteBalance_GL.fsh; path = Source/Operations/Shaders/WhiteBalance_GL.fsh; sourceTree = ""; }; - BCFF46C31CB9565F00A0C521 /* MotionBlur.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; lineEnding = 0; name = MotionBlur.swift; path = Source/Operations/MotionBlur.swift; sourceTree = ""; xcLanguageSpecificationIdentifier = xcode.lang.swift; }; + BCFF46C31CB9565F00A0C521 /* MotionBlur.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; lineEnding = 0; name = MotionBlur.swift; path = Source/Operations/MotionBlur.swift; sourceTree = ""; }; BCFF46C51CB968DE00A0C521 /* ImageBuffer.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = ImageBuffer.swift; path = Source/Operations/ImageBuffer.swift; sourceTree = ""; }; BCFF46C71CB96AB100A0C521 /* LowPassFilter.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = LowPassFilter.swift; path = Source/Operations/LowPassFilter.swift; sourceTree = ""; }; BCFF46C91CB96BD700A0C521 /* HighPassFilter.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = HighPassFilter.swift; path = Source/Operations/HighPassFilter.swift; sourceTree = ""; }; diff --git a/framework/Source/BasicOperation.swift b/framework/Source/BasicOperation.swift index 7f02b9cb..c68eec04 100755 --- a/framework/Source/BasicOperation.swift +++ b/framework/Source/BasicOperation.swift @@ -15,7 +15,7 @@ public class BasicOperation: ImageProcessingOperation { public let maximumInputs:UInt public var overriddenOutputSize:Size? public var overriddenOutputRotation:Rotation? - public var backgroundColor = Color.Black + public var backgroundColor = Color.black public var drawUnmodifiedImageOutsideOfMask:Bool = true public var mask:ImageSource? { didSet { @@ -182,7 +182,7 @@ public class BasicOperation: ImageProcessingOperation { } } - public func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { sharedImageProcessingContext.runOperationAsynchronously{ guard let renderFramebuffer = self.renderFramebuffer where (!renderFramebuffer.timingStyle.isTransient()) else { return } diff --git a/framework/Source/CameraConversion.swift b/framework/Source/CameraConversion.swift index 96622843..8abe5b73 100644 --- a/framework/Source/CameraConversion.swift +++ b/framework/Source/CameraConversion.swift @@ -29,7 +29,7 @@ public func convertYUVToRGB(shader:ShaderProgram, luminanceFramebuffer:Framebuff textureProperties = [luminanceFramebuffer.texturePropertiesForTargetOrientation(resultFramebuffer.orientation), chrominanceFramebuffer.texturePropertiesForTargetOrientation(resultFramebuffer.orientation)] } resultFramebuffer.activateFramebufferForRendering() - clearFramebufferWithColor(Color.Black) + clearFramebufferWithColor(Color.black) var uniformSettings = ShaderUniformSettings() uniformSettings["colorConversionMatrix"] = colorConversionMatrix renderQuadWithShader(shader, uniformSettings:uniformSettings, vertices:standardImageVertices, inputTextures:textureProperties) diff --git a/framework/Source/Color.swift b/framework/Source/Color.swift index 251d36b7..370e4f00 100644 --- a/framework/Source/Color.swift +++ b/framework/Source/Color.swift @@ -1,20 +1,20 @@ public struct Color { - public let red:Float - public let green:Float - public let blue:Float - public let alpha:Float + public let redComponent:Float + public let greenComponent:Float + public let blueComponent:Float + public let alphaComponent:Float public init(red:Float, green:Float, blue:Float, alpha:Float = 1.0) { - self.red = red - self.green = green - self.blue = blue - self.alpha = alpha + self.redComponent = red + self.greenComponent = green + self.blueComponent = blue + self.alphaComponent = alpha } - public static let Black = Color(red:0.0, green:0.0, blue:0.0, alpha:1.0) - public static let White = Color(red:1.0, green:1.0, blue:1.0, alpha:1.0) - public static let Red = Color(red:1.0, green:0.0, blue:0.0, alpha:1.0) - public static let Green = Color(red:0.0, green:1.0, blue:0.0, alpha:1.0) - public static let Blue = Color(red:0.0, green:0.0, blue:1.0, alpha:1.0) - public static let Transparent = Color(red:0.0, green:0.0, blue:0.0, alpha:0.0) -} \ No newline at end of file + public static let black = Color(red:0.0, green:0.0, blue:0.0, alpha:1.0) + public static let white = Color(red:1.0, green:1.0, blue:1.0, alpha:1.0) + public static let red = Color(red:1.0, green:0.0, blue:0.0, alpha:1.0) + public static let green = Color(red:0.0, green:1.0, blue:0.0, alpha:1.0) + public static let blue = Color(red:0.0, green:0.0, blue:1.0, alpha:1.0) + public static let transparent = Color(red:0.0, green:0.0, blue:0.0, alpha:0.0) +} diff --git a/framework/Source/Framebuffer.swift b/framework/Source/Framebuffer.swift index eb9f7cd4..70872ac3 100755 --- a/framework/Source/Framebuffer.swift +++ b/framework/Source/Framebuffer.swift @@ -124,7 +124,7 @@ public class Framebuffer { } } - func texelSizeForRotation(_ rotation:Rotation) -> Size { + func texelSize(for rotation:Rotation) -> Size { if rotation.flipsDimensions() { return Size(width:1.0 / Float(size.height), height:1.0 / Float(size.width)) } else { @@ -132,7 +132,7 @@ public class Framebuffer { } } - func initialStageTexelSizeForRotation(_ rotation:Rotation) -> Size { + func initialStageTexelSize(for rotation:Rotation) -> Size { if rotation.flipsDimensions() { return Size(width:1.0 / Float(size.height), height:0.0) } else { @@ -174,7 +174,7 @@ public class Framebuffer { print("WARNING: Tried to overrelease a framebuffer") } framebufferRetainCount = 0 - cache?.returnFramebufferToCache(self) + cache?.returnToCache(self) } } } diff --git a/framework/Source/FramebufferCache.swift b/framework/Source/FramebufferCache.swift index 3e20f31a..9482aed9 100755 --- a/framework/Source/FramebufferCache.swift +++ b/framework/Source/FramebufferCache.swift @@ -46,7 +46,7 @@ public class FramebufferCache { framebufferCache.removeAll() } - func returnFramebufferToCache(_ framebuffer:Framebuffer) { + func returnToCache(_ framebuffer:Framebuffer) { // print("Returning to cache: \(framebuffer)") context.runOperationSynchronously{ if (self.framebufferCache[framebuffer.hash] != nil) { diff --git a/framework/Source/ImageGenerator.swift b/framework/Source/ImageGenerator.swift index 51f4de36..ea79f99b 100644 --- a/framework/Source/ImageGenerator.swift +++ b/framework/Source/ImageGenerator.swift @@ -13,7 +13,7 @@ public class ImageGenerator: ImageSource { } } - public func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { imageFramebuffer.lock() target.newFramebufferAvailable(imageFramebuffer, fromSourceIndex:atIndex) } diff --git a/framework/Source/Mac/Camera.swift b/framework/Source/Mac/Camera.swift index 3a0876dc..f1db555e 100755 --- a/framework/Source/Mac/Camera.swift +++ b/framework/Source/Mac/Camera.swift @@ -200,7 +200,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } } - public func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { // Not needed for camera inputs } diff --git a/framework/Source/Mac/MovieInput.swift b/framework/Source/Mac/MovieInput.swift index 7ff8b721..1f9daba8 100644 --- a/framework/Source/Mac/MovieInput.swift +++ b/framework/Source/Mac/MovieInput.swift @@ -61,7 +61,7 @@ public class MovieInput: ImageSource { } while (self.assetReader.status == .reading) { - self.readNextVideoFrameFromOutput(readerVideoTrackOutput!) + self.readNextVideoFrame(from:readerVideoTrackOutput!) } if (self.assetReader.status == .completed) { @@ -89,7 +89,7 @@ public class MovieInput: ImageSource { // MARK: - // MARK: Internal processing functions - func readNextVideoFrameFromOutput(_ videoTrackOutput:AVAssetReaderOutput) { + func readNextVideoFrame(from videoTrackOutput:AVAssetReaderOutput) { if ((assetReader.status == .reading) && !videoEncodingIsFinished) { if let sampleBuffer = videoTrackOutput.copyNextSampleBuffer() { if (playAtActualSpeed) { @@ -110,7 +110,7 @@ public class MovieInput: ImageSource { } sharedImageProcessingContext.runOperationSynchronously{ - self.processMovieFrame(sampleBuffer) + self.process(movieFrame:sampleBuffer) CMSampleBufferInvalidate(sampleBuffer) } } else { @@ -130,15 +130,15 @@ public class MovieInput: ImageSource { } - func processMovieFrame(_ frame:CMSampleBuffer) { + func process(movieFrame frame:CMSampleBuffer) { let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(frame) let movieFrame = CMSampleBufferGetImageBuffer(frame)! // processingFrameTime = currentSampleTime - self.processMovieFrame(movieFrame, withSampleTime:currentSampleTime) + self.process(movieFrame:movieFrame, withSampleTime:currentSampleTime) } - func processMovieFrame(_ movieFrame:CVPixelBuffer, withSampleTime:CMTime) { + func process(movieFrame:CVPixelBuffer, withSampleTime:CMTime) { let bufferHeight = CVPixelBufferGetHeight(movieFrame) let bufferWidth = CVPixelBufferGetWidth(movieFrame) CVPixelBufferLockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0))) @@ -186,7 +186,7 @@ public class MovieInput: ImageSource { } } - public func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { // Not needed for movie inputs } } diff --git a/framework/Source/Mac/MovieOutput.swift b/framework/Source/Mac/MovieOutput.swift index 93881e3a..31c7b95e 100644 --- a/framework/Source/Mac/MovieOutput.swift +++ b/framework/Source/Mac/MovieOutput.swift @@ -135,7 +135,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { renderFramebuffer.lock() renderFramebuffer.activateFramebufferForRendering() - clearFramebufferWithColor(Color.Black) + clearFramebufferWithColor(Color.black) renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings:ShaderUniformSettings(), vertices:standardImageVertices, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.noRotation)]) diff --git a/framework/Source/Mac/PictureInput.swift b/framework/Source/Mac/PictureInput.swift index c449fc2a..3c2e2f61 100755 --- a/framework/Source/Mac/PictureInput.swift +++ b/framework/Source/Mac/PictureInput.swift @@ -141,7 +141,7 @@ public class PictureInput: ImageSource { } } - public func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { if hasProcessedImage { imageFramebuffer.lock() target.newFramebufferAvailable(imageFramebuffer, fromSourceIndex:atIndex) diff --git a/framework/Source/Mac/PictureOutput.swift b/framework/Source/Mac/PictureOutput.swift index 59d5d23a..0a0f8773 100644 --- a/framework/Source/Mac/PictureOutput.swift +++ b/framework/Source/Mac/PictureOutput.swift @@ -41,7 +41,7 @@ public class PictureOutput: ImageConsumer { let renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:framebuffer.orientation, size:framebuffer.size) renderFramebuffer.lock() renderFramebuffer.activateFramebufferForRendering() - clearFramebufferWithColor(Color.Red) + clearFramebufferWithColor(Color.red) renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings:ShaderUniformSettings(), vertices:standardImageVertices, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.noRotation)]) framebuffer.unlock() diff --git a/framework/Source/Mac/RenderView.swift b/framework/Source/Mac/RenderView.swift index 1248094d..ddcb502f 100755 --- a/framework/Source/Mac/RenderView.swift +++ b/framework/Source/Mac/RenderView.swift @@ -1,7 +1,7 @@ import Cocoa public class RenderView:NSOpenGLView, ImageConsumer { - public var backgroundColor = Color.Black + public var backgroundColor = Color.black public var fillMode = FillMode.preserveAspectRatio public var sizeInPixels:Size { get { return Size(width:Float(self.frame.size.width), height:Float(self.frame.size.width)) } } diff --git a/framework/Source/Matrix.swift b/framework/Source/Matrix.swift index bf4a50c9..475fff5b 100644 --- a/framework/Source/Matrix.swift +++ b/framework/Source/Matrix.swift @@ -32,7 +32,7 @@ public struct Matrix4x4 { self.m44 = rowMajorValues[15] } - public static let Identity = Matrix4x4(rowMajorValues:[1.0, 0.0, 0.0, 0.0, + public static let identity = Matrix4x4(rowMajorValues:[1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0]) @@ -59,11 +59,11 @@ public struct Matrix3x3 { self.m33 = rowMajorValues[8] } - public static let Identity = Matrix3x3(rowMajorValues:[1.0, 0.0, 0.0, + public static let identity = Matrix3x3(rowMajorValues:[1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0]) - public static let CenterOnly = Matrix3x3(rowMajorValues:[0.0, 0.0, 0.0, + public static let centerOnly = Matrix3x3(rowMajorValues:[0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0]) } diff --git a/framework/Source/OpenGLRendering.swift b/framework/Source/OpenGLRendering.swift index 2313286d..f60e031d 100755 --- a/framework/Source/OpenGLRendering.swift +++ b/framework/Source/OpenGLRendering.swift @@ -78,7 +78,7 @@ func renderQuadWithShader(_ shader:ShaderProgram, uniformSettings:ShaderUniformS } public func clearFramebufferWithColor(_ color:Color) { - glClearColor(GLfloat(color.red), GLfloat(color.green), GLfloat(color.blue), GLfloat(color.alpha)) + glClearColor(GLfloat(color.redComponent), GLfloat(color.greenComponent), GLfloat(color.blueComponent), GLfloat(color.alphaComponent)) glClear(GLenum(GL_COLOR_BUFFER_BIT)) } diff --git a/framework/Source/OperationGroup.swift b/framework/Source/OperationGroup.swift index e578c4e6..c75ac4d6 100644 --- a/framework/Source/OperationGroup.swift +++ b/framework/Source/OperationGroup.swift @@ -17,7 +17,7 @@ public class OperationGroup: ImageProcessingOperation { configurationOperation(input:inputImageRelay, output:outputImageRelay) } - public func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) { - outputImageRelay.transmitPreviousImageToTarget(target, atIndex:atIndex) + public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { + outputImageRelay.transmitPreviousImage(to:target, atIndex:atIndex) } } diff --git a/framework/Source/Operations/BulgeDistortion.swift b/framework/Source/Operations/BulgeDistortion.swift index 641ba6e7..adf3d2b2 100644 --- a/framework/Source/Operations/BulgeDistortion.swift +++ b/framework/Source/Operations/BulgeDistortion.swift @@ -1,13 +1,13 @@ public class BulgeDistortion: BasicOperation { public var radius:Float = 0.25 { didSet { uniformSettings["radius"] = radius } } public var scale:Float = 0.5 { didSet { uniformSettings["scale"] = scale } } - public var center:Position = Position.Center { didSet { uniformSettings["center"] = center } } + public var center:Position = Position.center { didSet { uniformSettings["center"] = center } } public init() { super.init(fragmentShader:BulgeDistortionFragmentShader, numberOfInputs:1) ({radius = 0.25})() ({scale = 0.5})() - ({center = Position.Center})() + ({center = Position.center})() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ChromaKeyBlend.swift b/framework/Source/Operations/ChromaKeyBlend.swift index 79f7267a..940ad0cf 100644 --- a/framework/Source/Operations/ChromaKeyBlend.swift +++ b/framework/Source/Operations/ChromaKeyBlend.swift @@ -1,13 +1,13 @@ public class ChromaKeyBlend: BasicOperation { public var thresholdSensitivity:Float = 0.4 { didSet { uniformSettings["thresholdSensitivity"] = thresholdSensitivity } } public var smoothing:Float = 0.1 { didSet { uniformSettings["smoothing"] = smoothing } } - public var colorToReplace:Color = Color.Green { didSet { uniformSettings["colorToReplace"] = colorToReplace } } + public var colorToReplace:Color = Color.green { didSet { uniformSettings["colorToReplace"] = colorToReplace } } public init() { super.init(fragmentShader:ChromaKeyBlendFragmentShader, numberOfInputs:2) ({thresholdSensitivity = 0.4})() ({smoothing = 0.1})() - ({colorToReplace = Color.Green})() + ({colorToReplace = Color.green})() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ChromaKeying.swift b/framework/Source/Operations/ChromaKeying.swift index ab1deb80..519f29e4 100644 --- a/framework/Source/Operations/ChromaKeying.swift +++ b/framework/Source/Operations/ChromaKeying.swift @@ -1,13 +1,13 @@ public class ChromaKeying: BasicOperation { public var thresholdSensitivity:Float = 0.4 { didSet { uniformSettings["thresholdSensitivity"] = thresholdSensitivity } } public var smoothing:Float = 0.1 { didSet { uniformSettings["smoothing"] = smoothing } } - public var colorToReplace:Color = Color.Green { didSet { uniformSettings["colorToReplace"] = colorToReplace } } + public var colorToReplace:Color = Color.green { didSet { uniformSettings["colorToReplace"] = colorToReplace } } public init() { super.init(fragmentShader:ChromaKeyFragmentShader, numberOfInputs:1) ({thresholdSensitivity = 0.4})() ({smoothing = 0.1})() - ({colorToReplace = Color.Green})() + ({colorToReplace = Color.green})() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/CircleGenerator.swift b/framework/Source/Operations/CircleGenerator.swift index 5bbe8991..84a5f593 100644 --- a/framework/Source/Operations/CircleGenerator.swift +++ b/framework/Source/Operations/CircleGenerator.swift @@ -21,7 +21,7 @@ public class CircleGenerator: ImageGenerator { super.init(size:size) } - public func renderCircleOfRadius(_ radius:Float, center:Position, circleColor:Color = Color.White, backgroundColor:Color = Color.Black) { + public func renderCircleOfRadius(_ radius:Float, center:Position, circleColor:Color = Color.white, backgroundColor:Color = Color.black) { let scaledRadius = radius * 2.0 imageFramebuffer.activateFramebufferForRendering() var uniformSettings = ShaderUniformSettings() diff --git a/framework/Source/Operations/ColorMatrixFilter.swift b/framework/Source/Operations/ColorMatrixFilter.swift index fb64c881..a8a344ff 100644 --- a/framework/Source/Operations/ColorMatrixFilter.swift +++ b/framework/Source/Operations/ColorMatrixFilter.swift @@ -1,12 +1,12 @@ public class ColorMatrixFilter: BasicOperation { public var intensity:Float = 1.0 { didSet { uniformSettings["intensity"] = intensity } } - public var colorMatrix:Matrix4x4 = Matrix4x4.Identity { didSet { uniformSettings["colorMatrix"] = colorMatrix } } + public var colorMatrix:Matrix4x4 = Matrix4x4.identity { didSet { uniformSettings["colorMatrix"] = colorMatrix } } public init() { super.init(fragmentShader:ColorMatrixFragmentShader, numberOfInputs:1) ({intensity = 1.0})() - ({colorMatrix = Matrix4x4.Identity})() + ({colorMatrix = Matrix4x4.identity})() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/Convolution3x3.swift b/framework/Source/Operations/Convolution3x3.swift index 591282f6..2ea7a6bc 100644 --- a/framework/Source/Operations/Convolution3x3.swift +++ b/framework/Source/Operations/Convolution3x3.swift @@ -1,9 +1,9 @@ public class Convolution3x3: TextureSamplingOperation { - public var convolutionKernel:Matrix3x3 = Matrix3x3.CenterOnly { didSet { uniformSettings["convolutionMatrix"] = convolutionKernel } } + public var convolutionKernel:Matrix3x3 = Matrix3x3.centerOnly { didSet { uniformSettings["convolutionMatrix"] = convolutionKernel } } public init() { super.init(fragmentShader:Convolution3x3FragmentShader) - ({convolutionKernel = Matrix3x3.CenterOnly})() + ({convolutionKernel = Matrix3x3.centerOnly})() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/Crop.swift b/framework/Source/Operations/Crop.swift index 2bb6e74c..b8b3bbd0 100644 --- a/framework/Source/Operations/Crop.swift +++ b/framework/Source/Operations/Crop.swift @@ -25,7 +25,7 @@ public class Crop: BasicOperation { normalizedOffsetFromOrigin = Position(Float(inputSize.width / 2 - finalCropSize.width / 2) / Float(inputSize.width), Float(inputSize.height / 2 - finalCropSize.height / 2) / Float(inputSize.height)) } else { finalCropSize = inputSize - normalizedOffsetFromOrigin = Position.Zero + normalizedOffsetFromOrigin = Position.zero } let normalizedCropSize = Size(width:Float(finalCropSize.width) / Float(inputSize.width), height:Float(finalCropSize.height) / Float(inputSize.height)) diff --git a/framework/Source/Operations/CrosshairGenerator.swift b/framework/Source/Operations/CrosshairGenerator.swift index 3b24bfb5..0960a3ae 100644 --- a/framework/Source/Operations/CrosshairGenerator.swift +++ b/framework/Source/Operations/CrosshairGenerator.swift @@ -15,7 +15,7 @@ public class CrosshairGenerator: ImageGenerator { public var crosshairWidth:Float = 5.0 { didSet { uniformSettings["crosshairWidth"] = crosshairWidth } } - public var crosshairColor:Color = Color.Green { didSet { uniformSettings["crosshairColor"] = crosshairColor } } + public var crosshairColor:Color = Color.green { didSet { uniformSettings["crosshairColor"] = crosshairColor } } let crosshairShader:ShaderProgram var uniformSettings = ShaderUniformSettings() @@ -25,7 +25,7 @@ public class CrosshairGenerator: ImageGenerator { super.init(size:size) ({crosshairWidth = 5.0})() - ({crosshairColor = Color.Green})() + ({crosshairColor = Color.green})() } @@ -42,7 +42,7 @@ public class CrosshairGenerator: ImageGenerator { crosshairShader.use() uniformSettings.restoreShaderSettings(crosshairShader) - clearFramebufferWithColor(Color.Transparent) + clearFramebufferWithColor(Color.transparent) guard let positionAttribute = crosshairShader.attributeIndex("position") else { fatalError("A position attribute was missing from the shader program during rendering.") } diff --git a/framework/Source/Operations/FalseColor.swift b/framework/Source/Operations/FalseColor.swift index ddf61bae..36dcc112 100644 --- a/framework/Source/Operations/FalseColor.swift +++ b/framework/Source/Operations/FalseColor.swift @@ -1,11 +1,11 @@ public class FalseColor: BasicOperation { public var firstColor:Color = Color(red:0.0, green:0.0, blue:0.5, alpha:1.0) { didSet { uniformSettings["firstColor"] = firstColor } } - public var secondColor:Color = Color.Red { didSet { uniformSettings["secondColor"] = secondColor } } + public var secondColor:Color = Color.red { didSet { uniformSettings["secondColor"] = secondColor } } public init() { super.init(fragmentShader:FalseColorFragmentShader, numberOfInputs:1) ({firstColor = Color(red:0.0, green:0.0, blue:0.5, alpha:1.0)})() - ({secondColor = Color.Red})() + ({secondColor = Color.red})() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/GlassSphereRefraction.swift b/framework/Source/Operations/GlassSphereRefraction.swift index 39ec3150..4fd2c2e8 100644 --- a/framework/Source/Operations/GlassSphereRefraction.swift +++ b/framework/Source/Operations/GlassSphereRefraction.swift @@ -1,15 +1,15 @@ public class GlassSphereRefraction: BasicOperation { public var radius:Float = 0.25 { didSet { uniformSettings["radius"] = radius } } public var refractiveIndex:Float = 0.71 { didSet { uniformSettings["refractiveIndex"] = refractiveIndex } } - public var center:Position = Position.Center { didSet { uniformSettings["center"] = center } } + public var center:Position = Position.center { didSet { uniformSettings["center"] = center } } public init() { super.init(fragmentShader:GlassSphereFragmentShader, numberOfInputs:1) ({radius = 0.25})() ({refractiveIndex = 0.71})() - ({center = Position.Center})() + ({center = Position.center})() self.backgroundColor = Color(red:0.0, green:0.0, blue:0.0, alpha:0.0) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/HighlightAndShadowTint.swift b/framework/Source/Operations/HighlightAndShadowTint.swift index 59d0b494..54b25d16 100644 --- a/framework/Source/Operations/HighlightAndShadowTint.swift +++ b/framework/Source/Operations/HighlightAndShadowTint.swift @@ -1,15 +1,15 @@ public class HighlightAndShadowTint: BasicOperation { public var shadowTintIntensity:Float = 0.0 { didSet { uniformSettings["shadowTintIntensity"] = shadowTintIntensity } } public var highlightTintIntensity:Float = 0.0 { didSet { uniformSettings["highlightTintIntensity"] = highlightTintIntensity } } - public var shadowTintColor:Color = Color.Red { didSet { uniformSettings["shadowTintColor"] = shadowTintColor } } - public var highlightTintColor:Color = Color.Blue { didSet { uniformSettings["highlightTintColor"] = highlightTintColor } } + public var shadowTintColor:Color = Color.red { didSet { uniformSettings["shadowTintColor"] = shadowTintColor } } + public var highlightTintColor:Color = Color.blue { didSet { uniformSettings["highlightTintColor"] = highlightTintColor } } public init() { super.init(fragmentShader:HighlightShadowTintFragmentShader, numberOfInputs:1) ({shadowTintIntensity = 0.0})() ({highlightTintIntensity = 0.0})() - ({shadowTintColor = Color.Red})() - ({highlightTintColor = Color.Blue})() + ({shadowTintColor = Color.red})() + ({highlightTintColor = Color.blue})() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/Histogram.swift b/framework/Source/Operations/Histogram.swift index 578b47fd..1c98d3b2 100755 --- a/framework/Source/Operations/Histogram.swift +++ b/framework/Source/Operations/Histogram.swift @@ -60,7 +60,7 @@ public class Histogram: BasicOperation { releaseIncomingFramebuffers() renderFramebuffer.activateFramebufferForRendering() - clearFramebufferWithColor(Color.Black) + clearFramebufferWithColor(Color.black) glBlendEquation(GLenum(GL_FUNC_ADD)) glBlendFunc(GLenum(GL_ONE), GLenum(GL_ONE)) diff --git a/framework/Source/Operations/ImageBuffer.swift b/framework/Source/Operations/ImageBuffer.swift index 2325e583..6b5b5675 100644 --- a/framework/Source/Operations/ImageBuffer.swift +++ b/framework/Source/Operations/ImageBuffer.swift @@ -23,7 +23,7 @@ public class ImageBuffer: ImageProcessingOperation { } } - public func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { // Buffers most likely won't need this } } diff --git a/framework/Source/Operations/LanczosResampling.swift b/framework/Source/Operations/LanczosResampling.swift index 7184cd69..8be44c69 100644 --- a/framework/Source/Operations/LanczosResampling.swift +++ b/framework/Source/Operations/LanczosResampling.swift @@ -12,7 +12,7 @@ public class LanczosResampling: BasicOperation { firstStageFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(backgroundColor) - let texelSize = inputFramebuffer.initialStageTexelSizeForRotation(outputRotation) + let texelSize = inputFramebuffer.initialStageTexelSize(for:outputRotation) uniformSettings["texelWidth"] = texelSize.width uniformSettings["texelHeight"] = texelSize.height @@ -20,7 +20,7 @@ public class LanczosResampling: BasicOperation { releaseIncomingFramebuffers() // Shrink the width component of the result - let secondStageTexelSize = firstStageFramebuffer.texelSizeForRotation(.noRotation) + let secondStageTexelSize = firstStageFramebuffer.texelSize(for:.noRotation) uniformSettings["texelWidth"] = secondStageTexelSize.width uniformSettings["texelHeight"] = 0.0 diff --git a/framework/Source/Operations/LineGenerator.swift b/framework/Source/Operations/LineGenerator.swift index a00c0ba7..befe0d45 100644 --- a/framework/Source/Operations/LineGenerator.swift +++ b/framework/Source/Operations/LineGenerator.swift @@ -31,7 +31,7 @@ public enum Line { } public class LineGenerator: ImageGenerator { - public var lineColor:Color = Color.Green { didSet { uniformSettings["lineColor"] = lineColor } } + public var lineColor:Color = Color.green { didSet { uniformSettings["lineColor"] = lineColor } } public var lineWidth:Float = 1.0 { didSet { lineShader.use() @@ -47,7 +47,7 @@ public class LineGenerator: ImageGenerator { super.init(size:size) ({lineWidth = 1.0})() - ({lineColor = Color.Red})() + ({lineColor = Color.red})() } public func renderLines(_ lines:[Line]) { @@ -56,7 +56,7 @@ public class LineGenerator: ImageGenerator { lineShader.use() uniformSettings.restoreShaderSettings(lineShader) - clearFramebufferWithColor(Color.Transparent) + clearFramebufferWithColor(Color.transparent) guard let positionAttribute = lineShader.attributeIndex("position") else { fatalError("A position attribute was missing from the shader program during rendering.") } diff --git a/framework/Source/Operations/MotionBlur.swift b/framework/Source/Operations/MotionBlur.swift index 25f93bf6..6c76fe29 100755 --- a/framework/Source/Operations/MotionBlur.swift +++ b/framework/Source/Operations/MotionBlur.swift @@ -14,7 +14,7 @@ public class MotionBlur: BasicOperation { override func configureFramebufferSpecificUniforms(_ inputFramebuffer:Framebuffer) { let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.portrait) - let texelSize = inputFramebuffer.texelSizeForRotation(outputRotation) + let texelSize = inputFramebuffer.texelSize(for:outputRotation) let aspectRatio = inputFramebuffer.aspectRatioForRotation(outputRotation) let directionalTexelStep:Position diff --git a/framework/Source/Operations/MotionDetector.swift b/framework/Source/Operations/MotionDetector.swift index 96ced644..669c4deb 100644 --- a/framework/Source/Operations/MotionDetector.swift +++ b/framework/Source/Operations/MotionDetector.swift @@ -10,7 +10,7 @@ public class MotionDetector: OperationGroup { super.init() averageColorExtractor.extractedColorCallback = {[weak self] color in - self?.motionDetectedCallback?(position:Position(color.red / color.alpha, color.green / color.alpha), strength:color.alpha) + self?.motionDetectedCallback?(position:Position(color.redComponent / color.alphaComponent, color.greenComponent / color.alphaComponent), strength:color.alphaComponent) } self.configureGroup{input, output in @@ -18,4 +18,4 @@ public class MotionDetector: OperationGroup { input --> self.lowPassFilter --> self.motionComparison } } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/PinchDistortion.swift b/framework/Source/Operations/PinchDistortion.swift index 5417e803..4ab7c4b5 100644 --- a/framework/Source/Operations/PinchDistortion.swift +++ b/framework/Source/Operations/PinchDistortion.swift @@ -1,13 +1,13 @@ public class PinchDistortion: BasicOperation { public var radius:Float = 1.0 { didSet { uniformSettings["radius"] = radius } } public var scale:Float = 0.5 { didSet { uniformSettings["scale"] = scale } } - public var center:Position = Position.Center { didSet { uniformSettings["center"] = center } } + public var center:Position = Position.center { didSet { uniformSettings["center"] = center } } public init() { super.init(fragmentShader:PinchDistortionFragmentShader, numberOfInputs:1) ({radius = 1.0})() ({scale = 0.5})() - ({center = Position.Center})() + ({center = Position.center})() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/PolarPixellate.swift b/framework/Source/Operations/PolarPixellate.swift index ebd24974..230da53a 100644 --- a/framework/Source/Operations/PolarPixellate.swift +++ b/framework/Source/Operations/PolarPixellate.swift @@ -1,11 +1,11 @@ public class PolarPixellate: BasicOperation { public var pixelSize:Size = Size(width:0.05, height:0.05) { didSet { uniformSettings["pixelSize"] = pixelSize } } - public var center:Position = Position.Center { didSet { uniformSettings["center"] = center } } + public var center:Position = Position.center { didSet { uniformSettings["center"] = center } } public init() { super.init(fragmentShader:PolarPixellateFragmentShader, numberOfInputs:1) ({pixelSize = Size(width:0.05, height:0.05)})() - ({center = Position.Center})() + ({center = Position.center})() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/Sharpen.swift b/framework/Source/Operations/Sharpen.swift index 166780c8..3ba518dc 100644 --- a/framework/Source/Operations/Sharpen.swift +++ b/framework/Source/Operations/Sharpen.swift @@ -10,7 +10,7 @@ public class Sharpen: BasicOperation { override func configureFramebufferSpecificUniforms(_ inputFramebuffer:Framebuffer) { let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.portrait) - let texelSize = overriddenTexelSize ?? inputFramebuffer.texelSizeForRotation(outputRotation) + let texelSize = overriddenTexelSize ?? inputFramebuffer.texelSize(for:outputRotation) uniformSettings["texelWidth"] = texelSize.width uniformSettings["texelHeight"] = texelSize.height } diff --git a/framework/Source/Operations/SphereRefraction.swift b/framework/Source/Operations/SphereRefraction.swift index 788876e6..617e142b 100644 --- a/framework/Source/Operations/SphereRefraction.swift +++ b/framework/Source/Operations/SphereRefraction.swift @@ -1,15 +1,15 @@ public class SphereRefraction: BasicOperation { public var radius:Float = 0.25 { didSet { uniformSettings["radius"] = radius } } public var refractiveIndex:Float = 0.71 { didSet { uniformSettings["refractiveIndex"] = refractiveIndex } } - public var center:Position = Position.Center { didSet { uniformSettings["center"] = center } } + public var center:Position = Position.center { didSet { uniformSettings["center"] = center } } public init() { super.init(fragmentShader:SphereRefractionFragmentShader, numberOfInputs:1) ({radius = 0.25})() ({refractiveIndex = 0.71})() - ({center = Position.Center})() + ({center = Position.center})() self.backgroundColor = Color(red:0.0, green:0.0, blue:0.0, alpha:0.0) } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/StretchDistortion.swift b/framework/Source/Operations/StretchDistortion.swift index ad354bd2..4e3a4a93 100644 --- a/framework/Source/Operations/StretchDistortion.swift +++ b/framework/Source/Operations/StretchDistortion.swift @@ -1,9 +1,9 @@ public class StretchDistortion: BasicOperation { - public var center:Position = Position.Center { didSet { uniformSettings["center"] = center } } + public var center:Position = Position.center { didSet { uniformSettings["center"] = center } } public init() { super.init(fragmentShader:StretchDistortionFragmentShader, numberOfInputs:1) - ({center = Position.Center})() + ({center = Position.center})() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/SwirlDistortion.swift b/framework/Source/Operations/SwirlDistortion.swift index d60cbbc9..aea785a9 100644 --- a/framework/Source/Operations/SwirlDistortion.swift +++ b/framework/Source/Operations/SwirlDistortion.swift @@ -1,13 +1,13 @@ public class SwirlDistortion: BasicOperation { public var radius:Float = 0.5 { didSet { uniformSettings["radius"] = radius } } public var angle:Float = 1.0 { didSet { uniformSettings["angle"] = angle } } - public var center:Position = Position.Center { didSet { uniformSettings["center"] = center } } + public var center:Position = Position.center { didSet { uniformSettings["center"] = center } } public init() { super.init(fragmentShader:SwirlFragmentShader, numberOfInputs:1) ({radius = 0.5})() ({angle = 1.0})() - ({center = Position.Center})() + ({center = Position.center})() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/TransformOperation.swift b/framework/Source/Operations/TransformOperation.swift index 01b90360..6b87a377 100644 --- a/framework/Source/Operations/TransformOperation.swift +++ b/framework/Source/Operations/TransformOperation.swift @@ -13,13 +13,13 @@ #endif public class TransformOperation: BasicOperation { - public var transform:Matrix4x4 = Matrix4x4.Identity { didSet { uniformSettings["transformMatrix"] = transform } } + public var transform:Matrix4x4 = Matrix4x4.identity { didSet { uniformSettings["transformMatrix"] = transform } } var normalizedImageVertices:[GLfloat]! public init() { super.init(vertexShader:TransformVertexShader, fragmentShader:PassthroughFragmentShader, numberOfInputs:1) - ({transform = Matrix4x4.Identity})() + ({transform = Matrix4x4.identity})() } override func internalRenderFunction(_ inputFramebuffer:Framebuffer, textureProperties:[InputTextureProperties]) { diff --git a/framework/Source/Operations/Vignette.swift b/framework/Source/Operations/Vignette.swift index 629bfd27..c027a544 100644 --- a/framework/Source/Operations/Vignette.swift +++ b/framework/Source/Operations/Vignette.swift @@ -1,15 +1,15 @@ public class Vignette: BasicOperation { - public var center:Position = Position.Center { didSet { uniformSettings["vignetteCenter"] = center } } - public var color:Color = Color.Black { didSet { uniformSettings["vignetteColor"] = color } } + public var center:Position = Position.center { didSet { uniformSettings["vignetteCenter"] = center } } + public var color:Color = Color.black { didSet { uniformSettings["vignetteColor"] = color } } public var start:Float = 0.3 { didSet { uniformSettings["vignetteStart"] = start } } public var end:Float = 0.75 { didSet { uniformSettings["vignetteEnd"] = end } } public init() { super.init(fragmentShader:VignetteFragmentShader, numberOfInputs:1) - ({center = Position.Center})() - ({color = Color.Black})() + ({center = Position.center})() + ({color = Color.black})() ({start = 0.3})() ({end = 0.75})() } -} \ No newline at end of file +} diff --git a/framework/Source/Operations/ZoomBlur.swift b/framework/Source/Operations/ZoomBlur.swift index 1b893e05..627fe060 100644 --- a/framework/Source/Operations/ZoomBlur.swift +++ b/framework/Source/Operations/ZoomBlur.swift @@ -1,11 +1,11 @@ public class ZoomBlur: BasicOperation { public var blurSize:Float = 1.0 { didSet { uniformSettings["blurSize"] = blurSize } } - public var blurCenter:Position = Position.Center { didSet { uniformSettings["blurCenter"] = blurCenter } } + public var blurCenter:Position = Position.center { didSet { uniformSettings["blurCenter"] = blurCenter } } public init() { super.init(fragmentShader:ZoomBlurFragmentShader, numberOfInputs:1) ({blurSize = 1.0})() - ({blurCenter = Position.Center})() + ({blurCenter = Position.center})() } -} \ No newline at end of file +} diff --git a/framework/Source/Pipeline.swift b/framework/Source/Pipeline.swift index c6ec55e2..e045d084 100755 --- a/framework/Source/Pipeline.swift +++ b/framework/Source/Pipeline.swift @@ -4,7 +4,7 @@ import Foundation public protocol ImageSource { var targets:TargetContainer { get } - func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) + func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) } public protocol ImageConsumer:AnyObject { @@ -32,10 +32,10 @@ public extension ImageSource { if let targetIndex = atTargetIndex { target.setSource(self, atIndex:targetIndex) targets.append(target, indexAtTarget:targetIndex) - transmitPreviousImageToTarget(target, atIndex:targetIndex) + transmitPreviousImage(to:target, atIndex:targetIndex) } else if let indexAtTarget = target.addSource(self) { targets.append(target, indexAtTarget:indexAtTarget) - transmitPreviousImageToTarget(target, atIndex:indexAtTarget) + transmitPreviousImage(to:target, atIndex:indexAtTarget) } else { debugPrint("Warning: tried to add target beyond target's input capacity") } @@ -172,8 +172,8 @@ public class ImageRelay: ImageProcessingOperation { init() { } - public func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) { - sources.sources[0]?.transmitPreviousImageToTarget(self, atIndex:0) + public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { + sources.sources[0]?.transmitPreviousImage(to:self, atIndex:0) } public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { diff --git a/framework/Source/Position.swift b/framework/Source/Position.swift index d345726d..ecfd7b6a 100644 --- a/framework/Source/Position.swift +++ b/framework/Source/Position.swift @@ -19,6 +19,6 @@ public struct Position { } #endif - public static let Center = Position(0.5, 0.5) - public static let Zero = Position(0.0, 0.0) -} \ No newline at end of file + public static let center = Position(0.5, 0.5) + public static let zero = Position(0.0, 0.0) +} diff --git a/framework/Source/RawDataInput.swift b/framework/Source/RawDataInput.swift index 5be5e891..c918045f 100644 --- a/framework/Source/RawDataInput.swift +++ b/framework/Source/RawDataInput.swift @@ -46,7 +46,7 @@ public class RawDataInput: ImageSource { updateTargetsWithFramebuffer(dataFramebuffer) } - public func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { // TODO: Determine if this is necessary for the raw data uploads } } diff --git a/framework/Source/RawDataOutput.swift b/framework/Source/RawDataOutput.swift index a457e0ca..d4327d17 100644 --- a/framework/Source/RawDataOutput.swift +++ b/framework/Source/RawDataOutput.swift @@ -27,7 +27,7 @@ public class RawDataOutput: ImageConsumer { renderFramebuffer.lock() renderFramebuffer.activateFramebufferForRendering() - clearFramebufferWithColor(Color.Black) + clearFramebufferWithColor(Color.black) renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings:ShaderUniformSettings(), vertices:standardImageVertices, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.noRotation)]) framebuffer.unlock() diff --git a/framework/Source/SerialDispatch.swift b/framework/Source/SerialDispatch.swift index 4275e5bc..791451b6 100755 --- a/framework/Source/SerialDispatch.swift +++ b/framework/Source/SerialDispatch.swift @@ -38,7 +38,7 @@ func runAsynchronouslyOnMainQueue(_ mainThreadOperation:() -> ()) { if (Thread.isMainThread) { mainThreadOperation() } else { - DispatchQueue.main.async(execute: mainThreadOperation) + DispatchQueue.main.async(execute:mainThreadOperation) } } @@ -46,7 +46,7 @@ func runOnMainQueue(_ mainThreadOperation:() -> ()) { if (Thread.isMainThread) { mainThreadOperation() } else { - DispatchQueue.main.sync(execute: mainThreadOperation) + DispatchQueue.main.sync(execute:mainThreadOperation) } } diff --git a/framework/Source/ShaderProgram.swift b/framework/Source/ShaderProgram.swift index ae8d693f..09e9bd93 100755 --- a/framework/Source/ShaderProgram.swift +++ b/framework/Source/ShaderProgram.swift @@ -20,8 +20,8 @@ struct ShaderCompileError:ErrorProtocol { } enum ShaderType { - case vertexShader - case fragmentShader + case vertex + case fragment } public class ShaderProgram { @@ -29,7 +29,6 @@ public class ShaderProgram { let program:GLuint var vertexShader:GLuint! // At some point, the Swift compiler will be able to deal with the early throw and we can convert these to lets var fragmentShader:GLuint! - var initialized:Bool = false private var attributeAddresses = [String:GLuint]() private var uniformAddresses = [String:GLint]() private var currentUniformIntValues = [String:GLint]() @@ -42,8 +41,8 @@ public class ShaderProgram { public init(vertexShader:String, fragmentShader:String) throws { program = glCreateProgram() - self.vertexShader = try compileShader(vertexShader, type:.vertexShader) - self.fragmentShader = try compileShader(fragmentShader, type:.fragmentShader) + self.vertexShader = try compileShader(vertexShader, type:.vertex) + self.fragmentShader = try compileShader(fragmentShader, type:.fragment) glAttachShader(program, self.vertexShader) glAttachShader(program, self.fragmentShader) @@ -202,7 +201,6 @@ public class ShaderProgram { throw ShaderCompileError(compileLog:"Link error") } - initialized = true } public func use() { @@ -213,8 +211,8 @@ public class ShaderProgram { func compileShader(_ shaderString:String, type:ShaderType) throws -> GLuint { let shaderHandle:GLuint switch type { - case .vertexShader: shaderHandle = glCreateShader(GLenum(GL_VERTEX_SHADER)) - case .fragmentShader: shaderHandle = glCreateShader(GLenum(GL_FRAGMENT_SHADER)) + case .vertex: shaderHandle = glCreateShader(GLenum(GL_VERTEX_SHADER)) + case .fragment: shaderHandle = glCreateShader(GLenum(GL_FRAGMENT_SHADER)) } shaderString.withGLChar{glString in @@ -236,8 +234,8 @@ func compileShader(_ shaderString:String, type:ShaderType) throws -> GLuint { // let compileLogString = String(bytes:compileLog.map{UInt8($0)}, encoding:NSASCIIStringEncoding) switch type { - case .vertexShader: throw ShaderCompileError(compileLog:"Vertex shader compile error:") - case .fragmentShader: throw ShaderCompileError(compileLog:"Fragment shader compile error:") + case .vertex: throw ShaderCompileError(compileLog:"Vertex shader compile error:") + case .fragment: throw ShaderCompileError(compileLog:"Fragment shader compile error:") } } } diff --git a/framework/Source/ShaderUniformSettings.swift b/framework/Source/ShaderUniformSettings.swift index 9dc49c09..a7f829e7 100644 --- a/framework/Source/ShaderUniformSettings.swift +++ b/framework/Source/ShaderUniformSettings.swift @@ -68,11 +68,11 @@ public struct ShaderUniformSettings { extension Color { func toGLArray() -> [GLfloat] { - return [GLfloat(red), GLfloat(green), GLfloat(blue)] + return [GLfloat(redComponent), GLfloat(greenComponent), GLfloat(blueComponent)] } func toGLArrayWithAlpha() -> [GLfloat] { - return [GLfloat(red), GLfloat(green), GLfloat(blue), GLfloat(alpha)] + return [GLfloat(redComponent), GLfloat(greenComponent), GLfloat(blueComponent), GLfloat(alphaComponent)] } } diff --git a/framework/Source/TextureInput.swift b/framework/Source/TextureInput.swift index 6665d0a1..b2a782bf 100644 --- a/framework/Source/TextureInput.swift +++ b/framework/Source/TextureInput.swift @@ -29,7 +29,7 @@ public class TextureInput: ImageSource { updateTargetsWithFramebuffer(textureFramebuffer) } - public func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { textureFramebuffer.lock() target.newFramebufferAvailable(textureFramebuffer, fromSourceIndex:atIndex) } diff --git a/framework/Source/TextureSamplingOperation.swift b/framework/Source/TextureSamplingOperation.swift index 8534ec6a..189e551f 100644 --- a/framework/Source/TextureSamplingOperation.swift +++ b/framework/Source/TextureSamplingOperation.swift @@ -7,7 +7,7 @@ public class TextureSamplingOperation: BasicOperation { override func configureFramebufferSpecificUniforms(_ inputFramebuffer:Framebuffer) { let outputRotation = overriddenOutputRotation ?? inputFramebuffer.orientation.rotationNeededForOrientation(.portrait) - let texelSize = overriddenTexelSize ?? inputFramebuffer.texelSizeForRotation(outputRotation) + let texelSize = overriddenTexelSize ?? inputFramebuffer.texelSize(for:outputRotation) uniformSettings["texelWidth"] = texelSize.width uniformSettings["texelHeight"] = texelSize.height } diff --git a/framework/Source/Timestamp.swift b/framework/Source/Timestamp.swift index 899d5b5d..4455a898 100644 --- a/framework/Source/Timestamp.swift +++ b/framework/Source/Timestamp.swift @@ -5,11 +5,11 @@ public struct TimestampFlags: OptionSet { public let rawValue:UInt32 public init(rawValue:UInt32) { self.rawValue = rawValue } - public static let Valid = TimestampFlags(rawValue: 1 << 0) - public static let HasBeenRounded = TimestampFlags(rawValue: 1 << 1) - public static let PositiveInfinity = TimestampFlags(rawValue: 1 << 2) - public static let NegativeInfinity = TimestampFlags(rawValue: 1 << 3) - public static let Indefinite = TimestampFlags(rawValue: 1 << 4) + public static let valid = TimestampFlags(rawValue: 1 << 0) + public static let hasBeenRounded = TimestampFlags(rawValue: 1 << 1) + public static let positiveInfinity = TimestampFlags(rawValue: 1 << 2) + public static let negativeInfinity = TimestampFlags(rawValue: 1 << 3) + public static let indefinite = TimestampFlags(rawValue: 1 << 4) } public struct Timestamp: Comparable { diff --git a/framework/Source/TwoStageOperation.swift b/framework/Source/TwoStageOperation.swift index 834317e2..efaac51f 100644 --- a/framework/Source/TwoStageOperation.swift +++ b/framework/Source/TwoStageOperation.swift @@ -35,7 +35,7 @@ public class TwoStageOperation: BasicOperation { firstStageFramebuffer.activateFramebufferForRendering() clearFramebufferWithColor(backgroundColor) - let texelSize = inputFramebuffer.initialStageTexelSizeForRotation(outputRotation) + let texelSize = inputFramebuffer.initialStageTexelSize(for:outputRotation) uniformSettings["texelWidth"] = texelSize.width * (downsamplingFactor ?? 1.0) uniformSettings["texelHeight"] = texelSize.height * (downsamplingFactor ?? 1.0) @@ -46,7 +46,7 @@ public class TwoStageOperation: BasicOperation { releaseIncomingFramebuffers() } - let secondStageTexelSize = renderFramebuffer.texelSizeForRotation(.noRotation) + let secondStageTexelSize = renderFramebuffer.texelSize(for:.noRotation) uniformSettings["texelWidth"] = secondStageTexelSize.width * (downsamplingFactor ?? 1.0) uniformSettings["texelHeight"] = 0.0 diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index 8e377a22..63cd6939 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -280,7 +280,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer } } - public func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { // Not needed for camera inputs } diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 19b2fad9..3c4a1d6e 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -61,7 +61,7 @@ public class MovieInput: ImageSource { } while (self.assetReader.status == .reading) { - self.readNextVideoFrameFromOutput(readerVideoTrackOutput!) + self.readNextVideoFrame(from:readerVideoTrackOutput!) } if (self.assetReader.status == .completed) { @@ -89,7 +89,7 @@ public class MovieInput: ImageSource { // MARK: - // MARK: Internal processing functions - func readNextVideoFrameFromOutput(_ videoTrackOutput:AVAssetReaderOutput) { + func readNextVideoFrame(from videoTrackOutput:AVAssetReaderOutput) { if ((assetReader.status == .reading) && !videoEncodingIsFinished) { if let sampleBuffer = videoTrackOutput.copyNextSampleBuffer() { if (playAtActualSpeed) { @@ -110,7 +110,7 @@ public class MovieInput: ImageSource { } sharedImageProcessingContext.runOperationSynchronously{ - self.processMovieFrame(sampleBuffer) + self.process(movieFrame:sampleBuffer) CMSampleBufferInvalidate(sampleBuffer) } } else { @@ -130,15 +130,15 @@ public class MovieInput: ImageSource { } - func processMovieFrame(_ frame:CMSampleBuffer) { + func process(movieFrame frame:CMSampleBuffer) { let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(frame) let movieFrame = CMSampleBufferGetImageBuffer(frame)! // processingFrameTime = currentSampleTime - self.processMovieFrame(movieFrame, withSampleTime:currentSampleTime) + self.process(movieFrame:movieFrame, withSampleTime:currentSampleTime) } - func processMovieFrame(_ movieFrame:CVPixelBuffer, withSampleTime:CMTime) { + func process(movieFrame:CVPixelBuffer, withSampleTime:CMTime) { let bufferHeight = CVPixelBufferGetHeight(movieFrame) let bufferWidth = CVPixelBufferGetWidth(movieFrame) CVPixelBufferLockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) @@ -186,7 +186,7 @@ public class MovieInput: ImageSource { } } - public func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { // Not needed for movie inputs } } diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index d5651ed6..fe93d97f 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -165,7 +165,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { } renderFramebuffer.activateFramebufferForRendering() - clearFramebufferWithColor(Color.Black) + clearFramebufferWithColor(Color.black) CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue:CVOptionFlags(0))) renderQuadWithShader(colorSwizzlingShader, uniformSettings:ShaderUniformSettings(), vertices:standardImageVertices, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.noRotation)]) diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index 3c1b879e..ee27ae08 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -139,7 +139,7 @@ public class PictureInput: ImageSource { } } - public func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) { + public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { if hasProcessedImage { imageFramebuffer.lock() target.newFramebufferAvailable(imageFramebuffer, fromSourceIndex:atIndex) diff --git a/framework/Source/iOS/PictureOutput.swift b/framework/Source/iOS/PictureOutput.swift index 11ee74a3..34c4a169 100644 --- a/framework/Source/iOS/PictureOutput.swift +++ b/framework/Source/iOS/PictureOutput.swift @@ -41,7 +41,7 @@ public class PictureOutput: ImageConsumer { let renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:framebuffer.orientation, size:framebuffer.size) renderFramebuffer.lock() renderFramebuffer.activateFramebufferForRendering() - clearFramebufferWithColor(Color.Red) + clearFramebufferWithColor(Color.red) renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings:ShaderUniformSettings(), vertices:standardImageVertices, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.noRotation)]) framebuffer.unlock() diff --git a/framework/Source/iOS/RenderView.swift b/framework/Source/iOS/RenderView.swift index b2036b29..3c911b4a 100755 --- a/framework/Source/iOS/RenderView.swift +++ b/framework/Source/iOS/RenderView.swift @@ -3,7 +3,7 @@ import UIKit // TODO: Add support for transparency // TODO: Deal with view resizing public class RenderView:UIView, ImageConsumer { - public var backgroundRenderColor = Color.Black + public var backgroundRenderColor = Color.black public var fillMode = FillMode.preserveAspectRatio public var orientation:ImageOrientation = .portrait public var sizeInPixels:Size { get { return Size(width:Float(frame.size.width * contentScaleFactor), height:Float(frame.size.height * contentScaleFactor))}} diff --git a/framework/Tests/Pipeline_Tests.swift b/framework/Tests/Pipeline_Tests.swift index 23d040bd..4d7d7225 100755 --- a/framework/Tests/Pipeline_Tests.swift +++ b/framework/Tests/Pipeline_Tests.swift @@ -14,7 +14,7 @@ class FakeOperation: ImageProcessingOperation { func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { } - func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) { + func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { } } @@ -29,7 +29,7 @@ class FakeRenderView: ImageConsumer { class FakeCamera: ImageSource { let targets = TargetContainer() - func transmitPreviousImageToTarget(_ target:ImageConsumer, atIndex:UInt) { + func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { } func newCameraFrame() { From 9152a1f3aca64a2a7810081baae04d964a9267ec Mon Sep 17 00:00:00 2001 From: Brad Larson Date: Wed, 27 Jul 2016 16:54:56 -0500 Subject: [PATCH 09/14] Updated the shader converter script to Swift 3. --- framework/Source/Mac/PictureOutput.swift | 17 ++++++++++--- .../Operations/Shaders/ShaderConverter.sh | 24 +++++++++---------- framework/Source/SerialDispatch.swift | 4 ++++ framework/Source/ShaderProgram.swift | 3 +++ 4 files changed, 33 insertions(+), 15 deletions(-) diff --git a/framework/Source/Mac/PictureOutput.swift b/framework/Source/Mac/PictureOutput.swift index 0a0f8773..d6e533ea 100644 --- a/framework/Source/Mac/PictureOutput.swift +++ b/framework/Source/Mac/PictureOutput.swift @@ -28,7 +28,9 @@ public class PictureOutput: ImageConsumer { self.url = url // Create an intentional short-term retain cycle to prevent deallocation before next frame is captured encodedImageAvailableCallback = {imageData in do { +// FIXME: Xcode 8 beta 2 try imageData.write(to: self.url, options:.atomic) +// try imageData.write(to: self.url, options:NSData.WritingOptions.dataWritingAtomic) } catch { // TODO: Handle this better print("WARNING: Couldn't save image with error:\(error)") @@ -41,8 +43,17 @@ public class PictureOutput: ImageConsumer { let renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:framebuffer.orientation, size:framebuffer.size) renderFramebuffer.lock() renderFramebuffer.activateFramebufferForRendering() - clearFramebufferWithColor(Color.red) + clearFramebufferWithColor(Color.transparent) + + // Need the blending here to enable non-1.0 alpha on output image + glBlendEquation(GLenum(GL_FUNC_ADD)) + glBlendFunc(GLenum(GL_ONE), GLenum(GL_ONE)) + glEnable(GLenum(GL_BLEND)) + renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings:ShaderUniformSettings(), vertices:standardImageVertices, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.noRotation)]) + + glDisable(GLenum(GL_BLEND)) + framebuffer.unlock() let imageByteSize = Int(framebuffer.size.width * framebuffer.size.height * 4) @@ -51,8 +62,8 @@ public class PictureOutput: ImageConsumer { renderFramebuffer.unlock() guard let dataProvider = CGDataProvider(dataInfo: nil, data: data, size: imageByteSize, releaseData: dataProviderReleaseCallback) else {fatalError("Could not create CGDataProvider")} let defaultRGBColorSpace = CGColorSpaceCreateDeviceRGB() - return CGImage(width: Int(framebuffer.size.width), height: Int(framebuffer.size.height), bitsPerComponent: 8, bitsPerPixel: 32, bytesPerRow: 4 * Int(framebuffer.size.width), space: defaultRGBColorSpace, bitmapInfo: CGBitmapInfo() /*| CGImageAlphaInfo.Last*/, provider: dataProvider, decode: nil, shouldInterpolate: false, intent: .defaultIntent)! - + + return CGImage(width: Int(framebuffer.size.width), height: Int(framebuffer.size.height), bitsPerComponent:8, bitsPerPixel:32, bytesPerRow:4 * Int(framebuffer.size.width), space:defaultRGBColorSpace, bitmapInfo:CGBitmapInfo() /*| CGImageAlphaInfo.Last*/, provider:dataProvider, decode:nil, shouldInterpolate:false, intent:.defaultIntent)! } public func newFramebufferAvailable(_ framebuffer:Framebuffer, fromSourceIndex:UInt) { diff --git a/framework/Source/Operations/Shaders/ShaderConverter.sh b/framework/Source/Operations/Shaders/ShaderConverter.sh index 6dc09d93..d0a95b6b 100755 --- a/framework/Source/Operations/Shaders/ShaderConverter.sh +++ b/framework/Source/Operations/Shaders/ShaderConverter.sh @@ -15,19 +15,19 @@ var allConvertedGLShaders = "" var allConvertedGLESShaders = "" for fileName in fileNames { - let pathURL = NSURL(fileURLWithPath:fileName) + let pathURL = URL(fileURLWithPath:fileName) guard let pathExtension = pathURL.pathExtension else {continue} - guard let baseName = pathURL.URLByDeletingPathExtension?.lastPathComponent else {continue} + guard let baseName = try! pathURL.deletingPathExtension().lastPathComponent else {continue} - guard (NSFileManager.defaultManager().fileExistsAtPath(pathURL.path!)) else { + guard (FileManager.default.fileExists(atPath:pathURL.path!)) else { print("Error: file \"\(fileName)\" could not be found.") continue } let shaderSuffix:String - if (pathExtension.lowercaseString == "vsh") { + if (pathExtension.lowercased() == "vsh") { shaderSuffix = "VertexShader" - } else if (pathExtension.lowercaseString == "fsh") { + } else if (pathExtension.lowercased() == "fsh") { shaderSuffix = "FragmentShader" } else { continue @@ -36,10 +36,10 @@ for fileName in fileNames { let convertedShaderName:String let shaderPlatform:OpenGLPlatform if baseName.hasSuffix("_GLES") { - convertedShaderName = "\(baseName.stringByReplacingOccurrencesOfString("_GLES", withString:""))\(shaderSuffix)" + convertedShaderName = "\(baseName.replacingOccurrences(of:"_GLES", with:""))\(shaderSuffix)" shaderPlatform = .OpenGLES } else if baseName.hasSuffix("_GL") { - convertedShaderName = "\(baseName.stringByReplacingOccurrencesOfString("_GL", withString:""))\(shaderSuffix)" + convertedShaderName = "\(baseName.replacingOccurrences(of:"_GL", with:""))\(shaderSuffix)" shaderPlatform = .OpenGL } else { convertedShaderName = "\(baseName)\(shaderSuffix)" @@ -47,9 +47,9 @@ for fileName in fileNames { } var accumulatedString = "public let \(convertedShaderName) = \"" - let fileContents = try String(contentsOfFile:fileName, encoding:NSASCIIStringEncoding) + let fileContents = try String(contentsOfFile:fileName, encoding:String.Encoding.ascii) fileContents.enumerateLines {line, stop in - accumulatedString += "\(line.stringByReplacingOccurrencesOfString("\"", withString:"\\\""))\\n " + accumulatedString += "\(line.replacingOccurrences(of:"\"", with:"\\\""))\\n " } accumulatedString += "\"\n" @@ -62,6 +62,6 @@ for fileName in fileNames { } } -let scriptURL = NSURL(fileURLWithPath:Process.arguments.first!) -try allConvertedGLShaders.writeToURL(scriptURL.URLByDeletingLastPathComponent!.URLByAppendingPathComponent("ConvertedShaders_GL.swift"), atomically:true, encoding:NSASCIIStringEncoding) -try allConvertedGLESShaders.writeToURL(scriptURL.URLByDeletingLastPathComponent!.URLByAppendingPathComponent("ConvertedShaders_GLES.swift"), atomically:true, encoding:NSASCIIStringEncoding) +let scriptURL = URL(fileURLWithPath:Process.arguments.first!) +try allConvertedGLShaders.write(to:scriptURL.deletingLastPathComponent().appendingPathComponent("ConvertedShaders_GL.swift"), atomically:true, encoding:String.Encoding.ascii) +try allConvertedGLESShaders.write(to:scriptURL.deletingLastPathComponent().appendingPathComponent("ConvertedShaders_GLES.swift"), atomically:true, encoding:String.Encoding.ascii) diff --git a/framework/Source/SerialDispatch.swift b/framework/Source/SerialDispatch.swift index 791451b6..826de388 100755 --- a/framework/Source/SerialDispatch.swift +++ b/framework/Source/SerialDispatch.swift @@ -35,7 +35,9 @@ public let lowProcessingQueuePriority:DispatchQueue.GlobalAttributes = { }() func runAsynchronouslyOnMainQueue(_ mainThreadOperation:() -> ()) { +// FIXME: Xcode 8 beta 2 if (Thread.isMainThread) { +// if (Thread.isMainThread()) { mainThreadOperation() } else { DispatchQueue.main.async(execute:mainThreadOperation) @@ -43,7 +45,9 @@ func runAsynchronouslyOnMainQueue(_ mainThreadOperation:() -> ()) { } func runOnMainQueue(_ mainThreadOperation:() -> ()) { +// FIXME: Xcode 8 beta 2 if (Thread.isMainThread) { +// if (Thread.isMainThread()) { mainThreadOperation() } else { DispatchQueue.main.sync(execute:mainThreadOperation) diff --git a/framework/Source/ShaderProgram.swift b/framework/Source/ShaderProgram.swift index 09e9bd93..25369b3d 100755 --- a/framework/Source/ShaderProgram.swift +++ b/framework/Source/ShaderProgram.swift @@ -255,7 +255,10 @@ public func crashOnShaderCompileFailure(_ shaderName:String, _ operation:() t public func shaderFromFile(_ file:URL) throws -> String { // Note: this is a hack until Foundation's String initializers are fully functional // let fragmentShaderString = String(contentsOfURL:fragmentShaderFile, encoding:NSASCIIStringEncoding) +// FIXME: Xcode 8 beta 2 guard (FileManager.default.fileExists(atPath: file.path!)) else { throw ShaderCompileError(compileLog:"Shader file \(file) missing")} +// guard (FileManager.default().fileExists(atPath: file.path!)) else { throw ShaderCompileError(compileLog:"Shader file \(file) missing")} + let fragmentShaderString = try NSString(contentsOfFile:file.path!, encoding:String.Encoding.ascii.rawValue) return String(fragmentShaderString) From 6295ec6bd4235500f73b69b0d7a8276f468bebec Mon Sep 17 00:00:00 2001 From: Brad Larson Date: Tue, 2 Aug 2016 15:51:22 -0500 Subject: [PATCH 10/14] Updated for Xcode 8 beta 4. --- .../GPUImage-Mac.xcodeproj/project.pbxproj | 2 ++ .../GPUImage-iOS.xcodeproj/project.pbxproj | 3 +- framework/Source/BasicOperation.swift | 2 +- framework/Source/FillMode.swift | 6 ++-- framework/Source/Framebuffer.swift | 34 +++++++++---------- framework/Source/ImageOrientation.swift | 2 +- framework/Source/Mac/Camera.swift | 6 ++-- framework/Source/Mac/MovieInput.swift | 2 +- framework/Source/Mac/MovieOutput.swift | 2 +- framework/Source/Mac/PictureInput.swift | 4 +-- framework/Source/Mac/PictureOutput.swift | 4 +-- framework/Source/Operations/Crop.swift | 2 +- .../Operations/HarrisCornerDetector.swift | 4 +-- framework/Source/Operations/Histogram.swift | 4 +-- framework/Source/SerialDispatch.swift | 15 ++++---- framework/Source/ShaderProgram.swift | 10 +++--- framework/Source/iOS/Camera.swift | 8 ++--- framework/Source/iOS/MovieInput.swift | 2 +- framework/Source/iOS/PictureInput.swift | 4 +-- framework/Source/iOS/PictureOutput.swift | 4 +-- framework/Source/iOS/RenderView.swift | 8 +++-- 21 files changed, 66 insertions(+), 62 deletions(-) diff --git a/framework/GPUImage-Mac.xcodeproj/project.pbxproj b/framework/GPUImage-Mac.xcodeproj/project.pbxproj index b30a723c..9bbc7fb6 100755 --- a/framework/GPUImage-Mac.xcodeproj/project.pbxproj +++ b/framework/GPUImage-Mac.xcodeproj/project.pbxproj @@ -1262,6 +1262,7 @@ OTHER_SWIFT_FLAGS = "-DGL"; SDKROOT = macosx; SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + SWIFT_VERSION = 3.0; VERSIONING_SYSTEM = "apple-generic"; VERSION_INFO_PREFIX = ""; }; @@ -1302,6 +1303,7 @@ MTL_ENABLE_DEBUG_INFO = NO; OTHER_SWIFT_FLAGS = "-DGL"; SDKROOT = macosx; + SWIFT_VERSION = 3.0; VERSIONING_SYSTEM = "apple-generic"; VERSION_INFO_PREFIX = ""; }; diff --git a/framework/GPUImage-iOS.xcodeproj/project.pbxproj b/framework/GPUImage-iOS.xcodeproj/project.pbxproj index 81b0ef35..5dbe8edc 100755 --- a/framework/GPUImage-iOS.xcodeproj/project.pbxproj +++ b/framework/GPUImage-iOS.xcodeproj/project.pbxproj @@ -1277,7 +1277,7 @@ MTL_ENABLE_DEBUG_INFO = NO; OTHER_SWIFT_FLAGS = "-DGLES"; SDKROOT = iphoneos; - SWIFT_OPTIMIZATION_LEVEL = "-O"; + SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; TARGETED_DEVICE_FAMILY = "1,2"; VALIDATE_PRODUCT = YES; VERSIONING_SYSTEM = "apple-generic"; @@ -1324,7 +1324,6 @@ PRODUCT_NAME = "$(TARGET_NAME)"; PROVISIONING_PROFILE = ""; SKIP_INSTALL = YES; - SWIFT_OPTIMIZATION_LEVEL = "-O"; SWIFT_VERSION = 3.0; }; name = Release; diff --git a/framework/Source/BasicOperation.swift b/framework/Source/BasicOperation.swift index c68eec04..eae70a74 100755 --- a/framework/Source/BasicOperation.swift +++ b/framework/Source/BasicOperation.swift @@ -184,7 +184,7 @@ public class BasicOperation: ImageProcessingOperation { public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { sharedImageProcessingContext.runOperationAsynchronously{ - guard let renderFramebuffer = self.renderFramebuffer where (!renderFramebuffer.timingStyle.isTransient()) else { return } + guard let renderFramebuffer = self.renderFramebuffer, (!renderFramebuffer.timingStyle.isTransient()) else { return } renderFramebuffer.lock() target.newFramebufferAvailable(renderFramebuffer, fromSourceIndex:atIndex) diff --git a/framework/Source/FillMode.swift b/framework/Source/FillMode.swift index 8189ddff..650e9c96 100644 --- a/framework/Source/FillMode.swift +++ b/framework/Source/FillMode.swift @@ -28,8 +28,8 @@ public enum FillMode { let yRatio:GLfloat let xRatio:GLfloat switch self { - case stretch: return vertices - case preserveAspectRatio: + case .stretch: return vertices + case .preserveAspectRatio: if (aspectRatio > targetAspectRatio) { yRatio = 1.0 // xRatio = (GLfloat(toFitSize.height) / GLfloat(fromInputSize.height)) * (GLfloat(fromInputSize.width) / GLfloat(toFitSize.width)) @@ -38,7 +38,7 @@ public enum FillMode { xRatio = 1.0 yRatio = (GLfloat(fromInputSize.height) / GLfloat(toFitSize.height)) * (GLfloat(toFitSize.width) / GLfloat(fromInputSize.width)) } - case preserveAspectRatioAndFill: + case .preserveAspectRatioAndFill: if (aspectRatio > targetAspectRatio) { xRatio = 1.0 yRatio = (GLfloat(fromInputSize.height) / GLfloat(toFitSize.height)) * (GLfloat(toFitSize.width) / GLfloat(fromInputSize.width)) diff --git a/framework/Source/Framebuffer.swift b/framework/Source/Framebuffer.swift index 70872ac3..07a3784a 100755 --- a/framework/Source/Framebuffer.swift +++ b/framework/Source/Framebuffer.swift @@ -17,7 +17,7 @@ import Glibc import Foundation // TODO: Add a good lookup table to this to allow for detailed error messages -struct FramebufferCreationError:ErrorProtocol { +struct FramebufferCreationError:Error { let errorCode:GLenum } @@ -202,14 +202,14 @@ func hashForFramebufferWithProperties(orientation:ImageOrientation, size:GLSize, extension Rotation { func textureCoordinates() -> [GLfloat] { switch self { - case noRotation: return [0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0] - case rotateCounterclockwise: return [0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0] - case rotateClockwise: return [1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0] - case rotate180: return [1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0] - case flipHorizontally: return [1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0] - case flipVertically: return [0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0] - case rotateClockwiseAndFlipVertically: return [0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0] - case rotateClockwiseAndFlipHorizontally: return [1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0] + case .noRotation: return [0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0] + case .rotateCounterclockwise: return [0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0] + case .rotateClockwise: return [1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0] + case .rotate180: return [1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0] + case .flipHorizontally: return [1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0] + case .flipVertically: return [0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0] + case .rotateClockwiseAndFlipVertically: return [0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0] + case .rotateClockwiseAndFlipHorizontally: return [1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0] } } @@ -220,14 +220,14 @@ extension Rotation { let maxY = GLfloat(offsetFromOrigin.y) + GLfloat(cropSize.height) switch self { - case noRotation: return [minX, minY, maxX, minY, minX, maxY, maxX, maxY] - case rotateCounterclockwise: return [minX, maxY, minX, minY, maxX, maxY, maxX, minY] - case rotateClockwise: return [maxX, minY, maxX, maxY, minX, minY, minX, maxY] - case rotate180: return [maxX, maxY, minX, maxY, maxX, minY, minX, minY] - case flipHorizontally: return [maxX, minY, minX, minY, maxX, maxY, minX, maxY] - case flipVertically: return [minX, maxY, maxX, maxY, minX, minY, maxX, minY] - case rotateClockwiseAndFlipVertically: return [minX, minY, minX, maxY, maxX, minY, maxX, maxY] - case rotateClockwiseAndFlipHorizontally: return [maxX, maxY, maxX, minY, minX, maxY, minX, minY] + case .noRotation: return [minX, minY, maxX, minY, minX, maxY, maxX, maxY] + case .rotateCounterclockwise: return [minX, maxY, minX, minY, maxX, maxY, maxX, minY] + case .rotateClockwise: return [maxX, minY, maxX, maxY, minX, minY, minX, maxY] + case .rotate180: return [maxX, maxY, minX, maxY, maxX, minY, minX, minY] + case .flipHorizontally: return [maxX, minY, minX, minY, maxX, maxY, minX, maxY] + case .flipVertically: return [minX, maxY, maxX, maxY, minX, minY, maxX, minY] + case .rotateClockwiseAndFlipVertically: return [minX, minY, minX, maxY, maxX, minY, maxX, maxY] + case .rotateClockwiseAndFlipHorizontally: return [maxX, maxY, maxX, minY, minX, maxY, minX, minY] } } } diff --git a/framework/Source/ImageOrientation.swift b/framework/Source/ImageOrientation.swift index 09ae3b44..59013707 100644 --- a/framework/Source/ImageOrientation.swift +++ b/framework/Source/ImageOrientation.swift @@ -6,7 +6,7 @@ public enum ImageOrientation { func rotationNeededForOrientation(_ targetOrientation:ImageOrientation) -> Rotation { switch (self, targetOrientation) { - case (.portrait, .portrait), (.portraitUpsideDown, .portraitUpsideDown), (.landscapeLeft, .landscapeLeft), (landscapeRight, landscapeRight): return .noRotation + case (.portrait, .portrait), (.portraitUpsideDown, .portraitUpsideDown), (.landscapeLeft, .landscapeLeft), (.landscapeRight, .landscapeRight): return .noRotation case (.portrait, .portraitUpsideDown): return .rotate180 case (.portraitUpsideDown, .portrait): return .rotate180 case (.portrait, .landscapeLeft): return .rotateCounterclockwise diff --git a/framework/Source/Mac/Camera.swift b/framework/Source/Mac/Camera.swift index f1db555e..5a1d8f56 100755 --- a/framework/Source/Mac/Camera.swift +++ b/framework/Source/Mac/Camera.swift @@ -35,8 +35,8 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer let captureAsYUV:Bool let yuvConversionShader:ShaderProgram? let frameRenderingSemaphore = DispatchSemaphore(value:1) - let cameraProcessingQueue = DispatchQueue.global(attributes:standardProcessingQueuePriority) - let audioProcessingQueue = DispatchQueue.global(attributes:lowProcessingQueuePriority) + let cameraProcessingQueue = DispatchQueue.global(priority:standardProcessingQueuePriority) + let audioProcessingQueue = DispatchQueue.global(priority:lowProcessingQueuePriority) var numberOfFramesCaptured = 0 var totalFrameTimeDuringCapture:Double = 0.0 @@ -114,7 +114,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer return } - guard (frameRenderingSemaphore.wait(timeout:DispatchTime.now()) == DispatchTimeoutResult.Success) else { return } + guard (frameRenderingSemaphore.wait(timeout:DispatchTime.now()) == DispatchTimeoutResult.success) else { return } let startTime = CFAbsoluteTimeGetCurrent() let cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer)! diff --git a/framework/Source/Mac/MovieInput.swift b/framework/Source/Mac/MovieInput.swift index 1f9daba8..ec6cec15 100644 --- a/framework/Source/Mac/MovieInput.swift +++ b/framework/Source/Mac/MovieInput.swift @@ -44,7 +44,7 @@ public class MovieInput: ImageSource { public func start() { asset.loadValuesAsynchronously(forKeys: ["tracks"], completionHandler: { - DispatchQueue.global(attributes:standardProcessingQueuePriority).async { + DispatchQueue.global(priority:standardProcessingQueuePriority).async { guard (self.asset.statusOfValue(forKey:"tracks", error:nil) == .loaded) else { return } guard self.assetReader.startReading() else { diff --git a/framework/Source/Mac/MovieOutput.swift b/framework/Source/Mac/MovieOutput.swift index 31c7b95e..05df38d3 100644 --- a/framework/Source/Mac/MovieOutput.swift +++ b/framework/Source/Mac/MovieOutput.swift @@ -117,7 +117,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { var pixelBufferFromPool:CVPixelBuffer? = nil let pixelBufferStatus = CVPixelBufferPoolCreatePixelBuffer(nil, assetWriterPixelBufferInput.pixelBufferPool!, &pixelBufferFromPool) - guard let pixelBuffer = pixelBufferFromPool where (pixelBufferStatus == kCVReturnSuccess) else { return } + guard let pixelBuffer = pixelBufferFromPool, (pixelBufferStatus == kCVReturnSuccess) else { return } diff --git a/framework/Source/Mac/PictureInput.swift b/framework/Source/Mac/PictureInput.swift index 3c2e2f61..b97d6052 100755 --- a/framework/Source/Mac/PictureInput.swift +++ b/framework/Source/Mac/PictureInput.swift @@ -77,7 +77,7 @@ public class PictureInput: ImageSource { if (shouldRedrawUsingCoreGraphics) { // For resized or incompatible image: redraw - imageData = UnsafeMutablePointer(allocatingCapacity: Int(widthToUseForTexture * heightToUseForTexture) * 4) + imageData = UnsafeMutablePointer.allocate(capacity:Int(widthToUseForTexture * heightToUseForTexture) * 4) let genericRGBColorspace = CGColorSpaceCreateDeviceRGB() @@ -112,7 +112,7 @@ public class PictureInput: ImageSource { glBindTexture(GLenum(GL_TEXTURE_2D), 0) if (shouldRedrawUsingCoreGraphics) { - imageData.deallocateCapacity(Int(widthToUseForTexture * heightToUseForTexture) * 4) + imageData.deallocate(capacity:Int(widthToUseForTexture * heightToUseForTexture) * 4) } } diff --git a/framework/Source/Mac/PictureOutput.swift b/framework/Source/Mac/PictureOutput.swift index d6e533ea..2f21f1df 100644 --- a/framework/Source/Mac/PictureOutput.swift +++ b/framework/Source/Mac/PictureOutput.swift @@ -57,7 +57,7 @@ public class PictureOutput: ImageConsumer { framebuffer.unlock() let imageByteSize = Int(framebuffer.size.width * framebuffer.size.height * 4) - let data = UnsafeMutablePointer(allocatingCapacity: imageByteSize) + let data = UnsafeMutablePointer.allocate(capacity:imageByteSize) glReadPixels(0, 0, framebuffer.size.width, framebuffer.size.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), data) renderFramebuffer.unlock() guard let dataProvider = CGDataProvider(dataInfo: nil, data: data, size: imageByteSize, releaseData: dataProviderReleaseCallback) else {fatalError("Could not create CGDataProvider")} @@ -127,5 +127,5 @@ public extension NSImage { // Why are these flipped in the callback definition? func dataProviderReleaseCallback(_ context:UnsafeMutablePointer?, data:UnsafePointer, size:Int) { - UnsafeMutablePointer(data).deallocateCapacity(size) + UnsafeMutablePointer(data).deallocate(capacity:size) } diff --git a/framework/Source/Operations/Crop.swift b/framework/Source/Operations/Crop.swift index b8b3bbd0..0152158d 100644 --- a/framework/Source/Operations/Crop.swift +++ b/framework/Source/Operations/Crop.swift @@ -15,7 +15,7 @@ public class Crop: BasicOperation { let finalCropSize:GLSize let normalizedOffsetFromOrigin:Position - if let cropSize = cropSizeInPixels, locationOfCrop = locationOfCropInPixels { + if let cropSize = cropSizeInPixels, let locationOfCrop = locationOfCropInPixels { let glCropSize = GLSize(cropSize) finalCropSize = GLSize(width:min(inputSize.width, glCropSize.width), height:min(inputSize.height, glCropSize.height)) normalizedOffsetFromOrigin = Position(locationOfCrop.x / Float(inputSize.width), locationOfCrop.y / Float(inputSize.height)) diff --git a/framework/Source/Operations/HarrisCornerDetector.swift b/framework/Source/Operations/HarrisCornerDetector.swift index e64fa0ed..fa067b55 100644 --- a/framework/Source/Operations/HarrisCornerDetector.swift +++ b/framework/Source/Operations/HarrisCornerDetector.swift @@ -62,7 +62,7 @@ func extractCornersFromImage(_ framebuffer:Framebuffer) -> [Position] { // let startTime = CFAbsoluteTimeGetCurrent() - let rawImagePixels = UnsafeMutablePointer(allocatingCapacity: imageByteSize) + let rawImagePixels = UnsafeMutablePointer.allocate(capacity:imageByteSize) // -Onone, [UInt8] array: 30 ms for 720p frame on Retina iMac // -O, [UInt8] array: 4 ms for 720p frame on Retina iMac // -Onone, UnsafeMutablePointer: 7 ms for 720p frame on Retina iMac @@ -88,7 +88,7 @@ func extractCornersFromImage(_ framebuffer:Framebuffer) -> [Position] { currentByte += 4 } - rawImagePixels.deallocateCapacity(imageByteSize) + rawImagePixels.deallocate(capacity:imageByteSize) // print("Harris extraction frame time: \(CFAbsoluteTimeGetCurrent() - startTime)") diff --git a/framework/Source/Operations/Histogram.swift b/framework/Source/Operations/Histogram.swift index 1c98d3b2..4a926748 100755 --- a/framework/Source/Operations/Histogram.swift +++ b/framework/Source/Operations/Histogram.swift @@ -53,7 +53,7 @@ public class Histogram: BasicOperation { override func renderFrame() { let inputSize = sizeOfInitialStageBasedOnFramebuffer(inputFramebuffers[0]!) let inputByteSize = Int(inputSize.width * inputSize.height * 4) - let data = UnsafeMutablePointer(allocatingCapacity:inputByteSize) + let data = UnsafeMutablePointer.allocate(capacity:inputByteSize) glReadPixels(0, 0, inputSize.width, inputSize.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), data) renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:256, height:3), stencil:mask != nil) @@ -86,6 +86,6 @@ public class Histogram: BasicOperation { } glDisable(GLenum(GL_BLEND)) - data.deallocateCapacity(inputByteSize) + data.deallocate(capacity:inputByteSize) } } diff --git a/framework/Source/SerialDispatch.swift b/framework/Source/SerialDispatch.swift index 826de388..c2e17dab 100755 --- a/framework/Source/SerialDispatch.swift +++ b/framework/Source/SerialDispatch.swift @@ -18,19 +18,20 @@ extension SerialDispatch { #else -public let standardProcessingQueuePriority:DispatchQueue.GlobalAttributes = { +public let standardProcessingQueuePriority:DispatchQueue.GlobalQueuePriority = { + // DispatchQueue.QoSClass.default if #available(iOS 10, OSX 10.10, *) { - return DispatchQueue.GlobalAttributes.qosDefault + return DispatchQueue.GlobalQueuePriority.default } else { - return DispatchQueue.GlobalAttributes.priorityDefault + return DispatchQueue.GlobalQueuePriority.default } }() -public let lowProcessingQueuePriority:DispatchQueue.GlobalAttributes = { +public let lowProcessingQueuePriority:DispatchQueue.GlobalQueuePriority = { if #available(iOS 10, OSX 10.10, *) { - return DispatchQueue.GlobalAttributes.qosBackground + return DispatchQueue.GlobalQueuePriority.low } else { - return DispatchQueue.GlobalAttributes.priorityLow + return DispatchQueue.GlobalQueuePriority.low } }() @@ -92,7 +93,7 @@ public extension SerialDispatch { } public func runOperationSynchronously(_ operation:() throws -> ()) throws { - var caughtError:ErrorProtocol? = nil + var caughtError:Error? = nil runOperationSynchronously { do { try operation() diff --git a/framework/Source/ShaderProgram.swift b/framework/Source/ShaderProgram.swift index 25369b3d..63ace397 100755 --- a/framework/Source/ShaderProgram.swift +++ b/framework/Source/ShaderProgram.swift @@ -15,7 +15,7 @@ import Foundation -struct ShaderCompileError:ErrorProtocol { +struct ShaderCompileError:Error { let compileLog:String } @@ -148,7 +148,7 @@ public class ShaderProgram { debugPrint("Warning: Tried to set a uniform (\(forUniform)) that was missing or optimized out by the compiler") return } - if let previousValue = currentUniformFloatArrayValues[forUniform] where previousValue == value{ + if let previousValue = currentUniformFloatArrayValues[forUniform], previousValue == value{ } else { if (value.count == 2) { glUniform2fv(uniformAddress, 1, value) @@ -168,7 +168,7 @@ public class ShaderProgram { debugPrint("Warning: Tried to set a uniform (\(forUniform)) that was missing or optimized out by the compiler") return } - if let previousValue = currentUniformFloatArrayValues[forUniform] where previousValue == value{ + if let previousValue = currentUniformFloatArrayValues[forUniform], previousValue == value{ } else { if (value.count == 9) { glUniformMatrix3fv(uniformAddress, 1, GLboolean(GL_FALSE), value) @@ -256,10 +256,10 @@ public func shaderFromFile(_ file:URL) throws -> String { // Note: this is a hack until Foundation's String initializers are fully functional // let fragmentShaderString = String(contentsOfURL:fragmentShaderFile, encoding:NSASCIIStringEncoding) // FIXME: Xcode 8 beta 2 - guard (FileManager.default.fileExists(atPath: file.path!)) else { throw ShaderCompileError(compileLog:"Shader file \(file) missing")} + guard (FileManager.default.fileExists(atPath: file.path)) else { throw ShaderCompileError(compileLog:"Shader file \(file) missing")} // guard (FileManager.default().fileExists(atPath: file.path!)) else { throw ShaderCompileError(compileLog:"Shader file \(file) missing")} - let fragmentShaderString = try NSString(contentsOfFile:file.path!, encoding:String.Encoding.ascii.rawValue) + let fragmentShaderString = try NSString(contentsOfFile:file.path, encoding:String.Encoding.ascii.rawValue) return String(fragmentShaderString) } diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index 63cd6939..e6a32fd1 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -35,7 +35,7 @@ public enum PhysicalCameraLocation { } } -struct CameraError: ErrorProtocol { +struct CameraError: Error { } let initialBenchmarkFramesToIgnore = 5 @@ -77,8 +77,8 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer let captureAsYUV:Bool let yuvConversionShader:ShaderProgram? let frameRenderingSemaphore = DispatchSemaphore(value:1) - let cameraProcessingQueue = DispatchQueue.global(attributes:DispatchQueue.GlobalAttributes.qosUserInitiated) - let audioProcessingQueue = DispatchQueue.global(attributes:DispatchQueue.GlobalAttributes.qosUtility) + let cameraProcessingQueue = DispatchQueue.global(priority:DispatchQueue.GlobalQueuePriority.default) + let audioProcessingQueue = DispatchQueue.global(priority:DispatchQueue.GlobalQueuePriority.default) let framesToIgnore = 5 var numberOfFramesCaptured = 0 @@ -172,7 +172,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer return } - guard (frameRenderingSemaphore.wait(timeout:DispatchTime.now()) == DispatchTimeoutResult.Success) else { return } + guard (frameRenderingSemaphore.wait(timeout:DispatchTime.now()) == DispatchTimeoutResult.success) else { return } let startTime = CFAbsoluteTimeGetCurrent() diff --git a/framework/Source/iOS/MovieInput.swift b/framework/Source/iOS/MovieInput.swift index 3c4a1d6e..8db20754 100644 --- a/framework/Source/iOS/MovieInput.swift +++ b/framework/Source/iOS/MovieInput.swift @@ -44,7 +44,7 @@ public class MovieInput: ImageSource { public func start() { asset.loadValuesAsynchronously(forKeys:["tracks"], completionHandler:{ - DispatchQueue.global(attributes: DispatchQueue.GlobalAttributes.qosDefault).async(execute: { + DispatchQueue.global(priority:DispatchQueue.GlobalQueuePriority.default).async(execute: { guard (self.asset.statusOfValue(forKey: "tracks", error:nil) == .loaded) else { return } guard self.assetReader.startReading() else { diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index ee27ae08..3b22a887 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -77,7 +77,7 @@ public class PictureInput: ImageSource { if (shouldRedrawUsingCoreGraphics) { // For resized or incompatible image: redraw - imageData = UnsafeMutablePointer(allocatingCapacity: Int(widthToUseForTexture * heightToUseForTexture) * 4) + imageData = UnsafeMutablePointer.allocate(capacity:Int(widthToUseForTexture * heightToUseForTexture) * 4) let genericRGBColorspace = CGColorSpaceCreateDeviceRGB() @@ -112,7 +112,7 @@ public class PictureInput: ImageSource { } if (shouldRedrawUsingCoreGraphics) { - imageData.deallocateCapacity(Int(widthToUseForTexture * heightToUseForTexture) * 4) + imageData.deallocate(capacity:Int(widthToUseForTexture * heightToUseForTexture) * 4) } } diff --git a/framework/Source/iOS/PictureOutput.swift b/framework/Source/iOS/PictureOutput.swift index 34c4a169..96310cf9 100644 --- a/framework/Source/iOS/PictureOutput.swift +++ b/framework/Source/iOS/PictureOutput.swift @@ -46,7 +46,7 @@ public class PictureOutput: ImageConsumer { framebuffer.unlock() let imageByteSize = Int(framebuffer.size.width * framebuffer.size.height * 4) - let data = UnsafeMutablePointer(allocatingCapacity: imageByteSize) + let data = UnsafeMutablePointer.allocate(capacity: imageByteSize) glReadPixels(0, 0, framebuffer.size.width, framebuffer.size.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), data) renderFramebuffer.unlock() guard let dataProvider = CGDataProvider(dataInfo:nil, data:data, size:imageByteSize, releaseData: dataProviderReleaseCallback) else {fatalError("Could not allocate a CGDataProvider")} @@ -117,5 +117,5 @@ public extension UIImage { // Why are these flipped in the callback definition? func dataProviderReleaseCallback(_ context:UnsafeMutablePointer?, data:UnsafePointer, size:Int) { - UnsafeMutablePointer(data).deallocateCapacity(size) + UnsafeMutablePointer(data).deallocate(capacity:size) } diff --git a/framework/Source/iOS/RenderView.swift b/framework/Source/iOS/RenderView.swift index 3c911b4a..3bc4f382 100755 --- a/framework/Source/iOS/RenderView.swift +++ b/framework/Source/iOS/RenderView.swift @@ -30,12 +30,14 @@ public class RenderView:UIView, ImageConsumer { self.commonInit() } - override public class func layerClass() -> AnyClass { - return CAEAGLLayer.self + override public class var layerClass:Swift.AnyClass { + get { + return CAEAGLLayer.self + } } func commonInit() { - self.contentScaleFactor = UIScreen.main().scale + self.contentScaleFactor = UIScreen.main.scale let eaglLayer = self.layer as! CAEAGLLayer eaglLayer.isOpaque = true From 6e5a7777880a5f72b2591c46ea766c5eb6949b2d Mon Sep 17 00:00:00 2001 From: Brad Larson Date: Fri, 5 Aug 2016 16:36:36 -0500 Subject: [PATCH 11/14] Updating to match latest OSS Swift compiler. --- framework/Source/FramebufferCache.swift | 2 +- framework/Source/Mac/Camera.swift | 6 +++--- framework/Source/Mac/MovieOutput.swift | 2 +- framework/Source/Mac/OpenGLContext.swift | 2 +- framework/Source/Mac/PictureInput.swift | 4 ++-- framework/Source/Mac/PictureOutput.swift | 10 ++++++---- framework/Source/OpenGLContext_Shared.swift | 2 +- framework/Source/OperationGroup.swift | 4 ++-- framework/Source/Operations/MotionDetector.swift | 4 ++-- framework/Source/Pipeline.swift | 7 +++++-- framework/Source/SerialDispatch.swift | 12 ++++++------ framework/Source/ShaderProgram.swift | 2 +- 12 files changed, 31 insertions(+), 26 deletions(-) diff --git a/framework/Source/FramebufferCache.swift b/framework/Source/FramebufferCache.swift index 9482aed9..f62575c7 100755 --- a/framework/Source/FramebufferCache.swift +++ b/framework/Source/FramebufferCache.swift @@ -25,7 +25,7 @@ public class FramebufferCache { public func requestFramebufferWithProperties(orientation:ImageOrientation, size:GLSize, textureOnly:Bool = false, minFilter:Int32 = GL_LINEAR, magFilter:Int32 = GL_LINEAR, wrapS:Int32 = GL_CLAMP_TO_EDGE, wrapT:Int32 = GL_CLAMP_TO_EDGE, internalFormat:Int32 = GL_RGBA, format:Int32 = GL_BGRA, type:Int32 = GL_UNSIGNED_BYTE, stencil:Bool = false) -> Framebuffer { let hash = hashForFramebufferWithProperties(orientation:orientation, size:size, textureOnly:textureOnly, minFilter:minFilter, magFilter:magFilter, wrapS:wrapS, wrapT:wrapT, internalFormat:internalFormat, format:format, type:type, stencil:stencil) let framebuffer:Framebuffer - if (framebufferCache[hash]?.count > 0) { + if ((framebufferCache[hash]?.count ?? -1) > 0) { // print("Restoring previous framebuffer") framebuffer = framebufferCache[hash]!.removeLast() framebuffer.orientation = orientation diff --git a/framework/Source/Mac/Camera.swift b/framework/Source/Mac/Camera.swift index 5a1d8f56..b8f02ae6 100755 --- a/framework/Source/Mac/Camera.swift +++ b/framework/Source/Mac/Camera.swift @@ -79,14 +79,14 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer if (supportsFullYUVRange) { yuvConversionShader = crashOnShaderCompileFailure("Camera"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionFullRangeFragmentShader)} - videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey:NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] + videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable:NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] } else { yuvConversionShader = crashOnShaderCompileFailure("Camera"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionVideoRangeFragmentShader)} - videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey:NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange))] + videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable:NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange))] } } else { yuvConversionShader = nil - videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey:NSNumber(value:Int32(kCVPixelFormatType_32BGRA))] + videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable:NSNumber(value:Int32(kCVPixelFormatType_32BGRA))] } if (captureSession.canAddOutput(videoOutput)) { diff --git a/framework/Source/Mac/MovieOutput.swift b/framework/Source/Mac/MovieOutput.swift index 05df38d3..3377dbc3 100644 --- a/framework/Source/Mac/MovieOutput.swift +++ b/framework/Source/Mac/MovieOutput.swift @@ -37,7 +37,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { localSettings[AVVideoWidthKey] = localSettings[AVVideoWidthKey] ?? NSNumber(value:size.width) localSettings[AVVideoHeightKey] = localSettings[AVVideoHeightKey] ?? NSNumber(value:size.height) - localSettings[AVVideoCodecKey] = localSettings[AVVideoCodecKey] ?? AVVideoCodecH264 + localSettings[AVVideoCodecKey] = localSettings[AVVideoCodecKey] ?? AVVideoCodecH264 as NSString assetWriterVideoInput = AVAssetWriterInput(mediaType:AVMediaTypeVideo, outputSettings:localSettings) assetWriterVideoInput.expectsMediaDataInRealTime = liveVideo diff --git a/framework/Source/Mac/OpenGLContext.swift b/framework/Source/Mac/OpenGLContext.swift index 49416c93..8eaf7fb8 100755 --- a/framework/Source/Mac/OpenGLContext.swift +++ b/framework/Source/Mac/OpenGLContext.swift @@ -77,7 +77,7 @@ public class OpenGLContext: SerialDispatch { lazy var extensionString:String = { return self.runOperationSynchronously{ self.makeCurrentContext() - return String(cString:UnsafePointer(glGetString(GLenum(GL_EXTENSIONS)))) + return String(cString:unsafeBitCast(glGetString(GLenum(GL_EXTENSIONS)), to:UnsafePointer.self)) } }() } diff --git a/framework/Source/Mac/PictureInput.swift b/framework/Source/Mac/PictureInput.swift index b97d6052..fae3511d 100755 --- a/framework/Source/Mac/PictureInput.swift +++ b/framework/Source/Mac/PictureInput.swift @@ -83,11 +83,11 @@ public class PictureInput: ImageSource { let imageContext = CGContext(data:imageData, width:Int(widthToUseForTexture), height:Int(heightToUseForTexture), bitsPerComponent:8, bytesPerRow:Int(widthToUseForTexture) * 4, space:genericRGBColorspace, bitmapInfo:CGImageAlphaInfo.premultipliedFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue) // CGContextSetBlendMode(imageContext, kCGBlendModeCopy); // From Technical Q&A QA1708: http://developer.apple.com/library/ios/#qa/qa1708/_index.html - imageContext?.draw(in:CGRect(x:0.0, y:0.0, width:CGFloat(widthToUseForTexture), height:CGFloat(heightToUseForTexture)), image:image) + imageContext?.draw(image, in:CGRect(x:0.0, y:0.0, width:CGFloat(widthToUseForTexture), height:CGFloat(heightToUseForTexture))) } else { // Access the raw image bytes directly dataFromImageDataProvider = image.dataProvider?.data - imageData = UnsafeMutablePointer(CFDataGetBytePtr(dataFromImageDataProvider)) + imageData = UnsafeMutablePointer(mutating:CFDataGetBytePtr(dataFromImageDataProvider)!) } sharedImageProcessingContext.makeCurrentContext() diff --git a/framework/Source/Mac/PictureOutput.swift b/framework/Source/Mac/PictureOutput.swift index 2f21f1df..202b235a 100644 --- a/framework/Source/Mac/PictureOutput.swift +++ b/framework/Source/Mac/PictureOutput.swift @@ -111,7 +111,7 @@ public extension NSImage { } } - public func filterWithPipeline(_ pipeline:(input:PictureInput, output:PictureOutput) -> ()) -> NSImage { + public func filterWithPipeline(_ pipeline:(PictureInput, PictureOutput) -> ()) -> NSImage { let picture = PictureInput(image:self) var outputImage:NSImage? let pictureOutput = PictureOutput() @@ -119,13 +119,15 @@ public extension NSImage { pictureOutput.imageAvailableCallback = {image in outputImage = image } - pipeline(input:picture, output:pictureOutput) + pipeline(picture, pictureOutput) picture.processImage(synchronously:true) return outputImage! } } // Why are these flipped in the callback definition? -func dataProviderReleaseCallback(_ context:UnsafeMutablePointer?, data:UnsafePointer, size:Int) { - UnsafeMutablePointer(data).deallocate(capacity:size) +func dataProviderReleaseCallback(_ context:UnsafeMutableRawPointer?, data:UnsafeRawPointer, size:Int) { +// UnsafeMutablePointer(data).deallocate(capacity:size) + // FIXME: Verify this is correct + data.deallocate(bytes:size, alignedTo:1) } diff --git a/framework/Source/OpenGLContext_Shared.swift b/framework/Source/OpenGLContext_Shared.swift index 3f108752..4d0b37f4 100755 --- a/framework/Source/OpenGLContext_Shared.swift +++ b/framework/Source/OpenGLContext_Shared.swift @@ -85,6 +85,6 @@ extension OpenGLContext { @_semantics("sil.optimize.never") public func debugPrint(_ stringToPrint:String, file: StaticString = #file, line: UInt = #line, function: StaticString = #function) { #if DEBUG - print("\(stringToPrint) --> \((String(file) as NSString).lastPathComponent): \(function): \(line)") + print("\(stringToPrint) --> \((String(describing:file) as NSString).lastPathComponent): \(function): \(line)") #endif } diff --git a/framework/Source/OperationGroup.swift b/framework/Source/OperationGroup.swift index c75ac4d6..55888b57 100644 --- a/framework/Source/OperationGroup.swift +++ b/framework/Source/OperationGroup.swift @@ -13,8 +13,8 @@ public class OperationGroup: ImageProcessingOperation { inputImageRelay.newFramebufferAvailable(framebuffer, fromSourceIndex:fromSourceIndex) } - public func configureGroup(_ configurationOperation:(input:ImageRelay, output:ImageRelay) -> ()) { - configurationOperation(input:inputImageRelay, output:outputImageRelay) + public func configureGroup(_ configurationOperation:(_ input:ImageRelay, _ output:ImageRelay) -> ()) { + configurationOperation(inputImageRelay, outputImageRelay) } public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) { diff --git a/framework/Source/Operations/MotionDetector.swift b/framework/Source/Operations/MotionDetector.swift index 669c4deb..50c64030 100644 --- a/framework/Source/Operations/MotionDetector.swift +++ b/framework/Source/Operations/MotionDetector.swift @@ -1,6 +1,6 @@ public class MotionDetector: OperationGroup { public var lowPassStrength:Float = 1.0 { didSet {lowPassFilter.strength = lowPassStrength}} - public var motionDetectedCallback:((position:Position, strength:Float) -> ())? + public var motionDetectedCallback:((Position, Float) -> ())? let lowPassFilter = LowPassFilter() let motionComparison = BasicOperation(fragmentShader:MotionComparisonFragmentShader, numberOfInputs:2) @@ -10,7 +10,7 @@ public class MotionDetector: OperationGroup { super.init() averageColorExtractor.extractedColorCallback = {[weak self] color in - self?.motionDetectedCallback?(position:Position(color.redComponent / color.alphaComponent, color.greenComponent / color.alphaComponent), strength:color.alphaComponent) + self?.motionDetectedCallback?(Position(color.redComponent / color.alphaComponent, color.greenComponent / color.alphaComponent), color.alphaComponent) } self.configureGroup{input, output in diff --git a/framework/Source/Pipeline.swift b/framework/Source/Pipeline.swift index e045d084..71fbb4c4 100755 --- a/framework/Source/Pipeline.swift +++ b/framework/Source/Pipeline.swift @@ -17,8 +17,11 @@ public protocol ImageConsumer:AnyObject { public protocol ImageProcessingOperation: ImageConsumer, ImageSource { } -infix operator --> { associativity left precedence 140 } - +infix operator --> : AdditionPrecedence +//precedencegroup ProcessingOperationPrecedence { +// associativity: left +//// higherThan: Multiplicative +//} @discardableResult public func -->(source:ImageSource, destination:T) -> T { source.addTarget(destination) return destination diff --git a/framework/Source/SerialDispatch.swift b/framework/Source/SerialDispatch.swift index c2e17dab..2ef28ddf 100755 --- a/framework/Source/SerialDispatch.swift +++ b/framework/Source/SerialDispatch.swift @@ -35,10 +35,10 @@ public let lowProcessingQueuePriority:DispatchQueue.GlobalQueuePriority = { } }() -func runAsynchronouslyOnMainQueue(_ mainThreadOperation:() -> ()) { +func runAsynchronouslyOnMainQueue(_ mainThreadOperation:@escaping () -> ()) { // FIXME: Xcode 8 beta 2 - if (Thread.isMainThread) { -// if (Thread.isMainThread()) { +// if (Thread.isMainThread) { + if (Thread.isMainThread()) { mainThreadOperation() } else { DispatchQueue.main.async(execute:mainThreadOperation) @@ -47,8 +47,8 @@ func runAsynchronouslyOnMainQueue(_ mainThreadOperation:() -> ()) { func runOnMainQueue(_ mainThreadOperation:() -> ()) { // FIXME: Xcode 8 beta 2 - if (Thread.isMainThread) { -// if (Thread.isMainThread()) { +// if (Thread.isMainThread) { + if (Thread.isMainThread()) { mainThreadOperation() } else { DispatchQueue.main.sync(execute:mainThreadOperation) @@ -73,7 +73,7 @@ public protocol SerialDispatch { } public extension SerialDispatch { - public func runOperationAsynchronously(_ operation:() -> ()) { + public func runOperationAsynchronously(_ operation:@escaping () -> ()) { self.serialDispatchQueue.async { self.makeCurrentContext() operation() diff --git a/framework/Source/ShaderProgram.swift b/framework/Source/ShaderProgram.swift index 63ace397..dc335e1b 100755 --- a/framework/Source/ShaderProgram.swift +++ b/framework/Source/ShaderProgram.swift @@ -256,7 +256,7 @@ public func shaderFromFile(_ file:URL) throws -> String { // Note: this is a hack until Foundation's String initializers are fully functional // let fragmentShaderString = String(contentsOfURL:fragmentShaderFile, encoding:NSASCIIStringEncoding) // FIXME: Xcode 8 beta 2 - guard (FileManager.default.fileExists(atPath: file.path)) else { throw ShaderCompileError(compileLog:"Shader file \(file) missing")} + guard (FileManager.default().fileExists(atPath: file.path)) else { throw ShaderCompileError(compileLog:"Shader file \(file) missing")} // guard (FileManager.default().fileExists(atPath: file.path!)) else { throw ShaderCompileError(compileLog:"Shader file \(file) missing")} let fragmentShaderString = try NSString(contentsOfFile:file.path, encoding:String.Encoding.ascii.rawValue) From c9326aed2553f04753062d40efe4fa7e80e9e8c6 Mon Sep 17 00:00:00 2001 From: Brad Larson Date: Wed, 17 Aug 2016 11:17:12 -0500 Subject: [PATCH 12/14] Updated for Xcode 8 beta 6. --- framework/Source/OpenGLRendering.swift | 4 ++-- framework/Source/SerialDispatch.swift | 8 ++------ framework/Source/ShaderProgram.swift | 4 +--- 3 files changed, 5 insertions(+), 11 deletions(-) diff --git a/framework/Source/OpenGLRendering.swift b/framework/Source/OpenGLRendering.swift index f60e031d..506dc67b 100755 --- a/framework/Source/OpenGLRendering.swift +++ b/framework/Source/OpenGLRendering.swift @@ -206,7 +206,7 @@ extension String { #if os(Linux) // cStringUsingEncoding isn't yet defined in the Linux Foundation. // This approach is roughly 35X slower than the cStringUsingEncoding one. - let bufferCString = UnsafeMutablePointer.alloc(self.characters.count+1) + let bufferCString = UnsafeMutablePointer.allocate(capacity:self.characters.count+1) for (index, characterValue) in self.utf8.enumerate() { bufferCString[index] = characterValue } @@ -214,7 +214,7 @@ extension String { operation(UnsafePointer(bufferCString)) - bufferCString.dealloc(self.characters.count) + bufferCString.deallocate(capacity:self.characters.count) #else if let value = self.cString(using:String.Encoding.utf8) { operation(UnsafePointer(value)) diff --git a/framework/Source/SerialDispatch.swift b/framework/Source/SerialDispatch.swift index 2ef28ddf..bdf32f1d 100755 --- a/framework/Source/SerialDispatch.swift +++ b/framework/Source/SerialDispatch.swift @@ -36,9 +36,7 @@ public let lowProcessingQueuePriority:DispatchQueue.GlobalQueuePriority = { }() func runAsynchronouslyOnMainQueue(_ mainThreadOperation:@escaping () -> ()) { -// FIXME: Xcode 8 beta 2 -// if (Thread.isMainThread) { - if (Thread.isMainThread()) { + if (Thread.isMainThread) { mainThreadOperation() } else { DispatchQueue.main.async(execute:mainThreadOperation) @@ -46,9 +44,7 @@ func runAsynchronouslyOnMainQueue(_ mainThreadOperation:@escaping () -> ()) { } func runOnMainQueue(_ mainThreadOperation:() -> ()) { -// FIXME: Xcode 8 beta 2 -// if (Thread.isMainThread) { - if (Thread.isMainThread()) { + if (Thread.isMainThread) { mainThreadOperation() } else { DispatchQueue.main.sync(execute:mainThreadOperation) diff --git a/framework/Source/ShaderProgram.swift b/framework/Source/ShaderProgram.swift index dc335e1b..2295bf5c 100755 --- a/framework/Source/ShaderProgram.swift +++ b/framework/Source/ShaderProgram.swift @@ -255,9 +255,7 @@ public func crashOnShaderCompileFailure(_ shaderName:String, _ operation:() t public func shaderFromFile(_ file:URL) throws -> String { // Note: this is a hack until Foundation's String initializers are fully functional // let fragmentShaderString = String(contentsOfURL:fragmentShaderFile, encoding:NSASCIIStringEncoding) -// FIXME: Xcode 8 beta 2 - guard (FileManager.default().fileExists(atPath: file.path)) else { throw ShaderCompileError(compileLog:"Shader file \(file) missing")} -// guard (FileManager.default().fileExists(atPath: file.path!)) else { throw ShaderCompileError(compileLog:"Shader file \(file) missing")} + guard (FileManager.default.fileExists(atPath: file.path)) else { throw ShaderCompileError(compileLog:"Shader file \(file) missing")} let fragmentShaderString = try NSString(contentsOfFile:file.path, encoding:String.Encoding.ascii.rawValue) From 6941f98decbfd51a39d8d9c3b7b2e1a3e9b0a429 Mon Sep 17 00:00:00 2001 From: Brad Larson Date: Thu, 8 Sep 2016 12:47:50 -0500 Subject: [PATCH 13/14] Updated the iOS side for compatibility with the Xcode 8 GM. --- framework/Source/iOS/Camera.swift | 10 +++++----- framework/Source/iOS/MovieOutput.swift | 2 +- framework/Source/iOS/OpenGLContext.swift | 2 +- framework/Source/iOS/PictureInput.swift | 4 ++-- framework/Source/iOS/PictureOutput.swift | 8 ++++---- 5 files changed, 13 insertions(+), 13 deletions(-) diff --git a/framework/Source/iOS/Camera.swift b/framework/Source/iOS/Camera.swift index e6a32fd1..38eaa0a1 100755 --- a/framework/Source/iOS/Camera.swift +++ b/framework/Source/iOS/Camera.swift @@ -25,9 +25,9 @@ public enum PhysicalCameraLocation { func device() -> AVCaptureDevice? { let devices = AVCaptureDevice.devices(withMediaType:AVMediaTypeVideo) - for device in devices! { + for case let device as AVCaptureDevice in devices! { if (device.position == self.captureDevicePosition()) { - return device as? AVCaptureDevice + return device } } @@ -137,14 +137,14 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer if (supportsFullYUVRange) { yuvConversionShader = crashOnShaderCompileFailure("Camera"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionFullRangeFragmentShader)} - videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey:NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] + videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable:NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))] } else { yuvConversionShader = crashOnShaderCompileFailure("Camera"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionVideoRangeFragmentShader)} - videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey:NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange))] + videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable:NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange))] } } else { yuvConversionShader = nil - videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey:NSNumber(value:Int32(kCVPixelFormatType_32BGRA))] + videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable:NSNumber(value:Int32(kCVPixelFormatType_32BGRA))] } if (captureSession.canAddOutput(videoOutput)) { diff --git a/framework/Source/iOS/MovieOutput.swift b/framework/Source/iOS/MovieOutput.swift index fe93d97f..0d2d289b 100644 --- a/framework/Source/iOS/MovieOutput.swift +++ b/framework/Source/iOS/MovieOutput.swift @@ -47,7 +47,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget { localSettings[AVVideoWidthKey] = localSettings[AVVideoWidthKey] ?? NSNumber(value:size.width) localSettings[AVVideoHeightKey] = localSettings[AVVideoHeightKey] ?? NSNumber(value:size.height) - localSettings[AVVideoCodecKey] = localSettings[AVVideoCodecKey] ?? AVVideoCodecH264 + localSettings[AVVideoCodecKey] = localSettings[AVVideoCodecKey] ?? AVVideoCodecH264 as NSString assetWriterVideoInput = AVAssetWriterInput(mediaType:AVMediaTypeVideo, outputSettings:localSettings) assetWriterVideoInput.expectsMediaDataInRealTime = liveVideo diff --git a/framework/Source/iOS/OpenGLContext.swift b/framework/Source/iOS/OpenGLContext.swift index 81b3f9e3..2aedb06b 100755 --- a/framework/Source/iOS/OpenGLContext.swift +++ b/framework/Source/iOS/OpenGLContext.swift @@ -87,7 +87,7 @@ public class OpenGLContext: SerialDispatch { lazy var extensionString:String = { return self.runOperationSynchronously{ self.makeCurrentContext() - return String(cString:UnsafePointer(glGetString(GLenum(GL_EXTENSIONS)))) + return String(cString:unsafeBitCast(glGetString(GLenum(GL_EXTENSIONS)), to:UnsafePointer.self)) } }() } diff --git a/framework/Source/iOS/PictureInput.swift b/framework/Source/iOS/PictureInput.swift index 3b22a887..3c6aedf9 100755 --- a/framework/Source/iOS/PictureInput.swift +++ b/framework/Source/iOS/PictureInput.swift @@ -83,11 +83,11 @@ public class PictureInput: ImageSource { let imageContext = CGContext(data: imageData, width: Int(widthToUseForTexture), height: Int(heightToUseForTexture), bitsPerComponent: 8, bytesPerRow: Int(widthToUseForTexture) * 4, space: genericRGBColorspace, bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue) // CGContextSetBlendMode(imageContext, kCGBlendModeCopy); // From Technical Q&A QA1708: http://developer.apple.com/library/ios/#qa/qa1708/_index.html - imageContext?.draw(in: CGRect(x: 0.0, y: 0.0, width: CGFloat(widthToUseForTexture), height: CGFloat(heightToUseForTexture)), image: image) + imageContext?.draw(image, in:CGRect(x:0.0, y:0.0, width:CGFloat(widthToUseForTexture), height:CGFloat(heightToUseForTexture))) } else { // Access the raw image bytes directly dataFromImageDataProvider = image.dataProvider?.data - imageData = UnsafeMutablePointer(CFDataGetBytePtr(dataFromImageDataProvider)) + imageData = UnsafeMutablePointer(mutating:CFDataGetBytePtr(dataFromImageDataProvider)) } sharedImageProcessingContext.runOperationSynchronously{ diff --git a/framework/Source/iOS/PictureOutput.swift b/framework/Source/iOS/PictureOutput.swift index 96310cf9..c3066a79 100644 --- a/framework/Source/iOS/PictureOutput.swift +++ b/framework/Source/iOS/PictureOutput.swift @@ -101,7 +101,7 @@ public extension UIImage { } } - public func filterWithPipeline(_ pipeline:(input:PictureInput, output:PictureOutput) -> ()) -> UIImage { + public func filterWithPipeline(_ pipeline:(PictureInput, PictureOutput) -> ()) -> UIImage { let picture = PictureInput(image:self) var outputImage:UIImage? let pictureOutput = PictureOutput() @@ -109,13 +109,13 @@ public extension UIImage { pictureOutput.imageAvailableCallback = {image in outputImage = image } - pipeline(input:picture, output:pictureOutput) + pipeline(picture, pictureOutput) picture.processImage(synchronously:true) return outputImage! } } // Why are these flipped in the callback definition? -func dataProviderReleaseCallback(_ context:UnsafeMutablePointer?, data:UnsafePointer, size:Int) { - UnsafeMutablePointer(data).deallocate(capacity:size) +func dataProviderReleaseCallback(_ context:UnsafeMutableRawPointer?, data:UnsafeRawPointer, size:Int) { + data.deallocate(bytes:size, alignedTo:1) } From fa7ee67528fc87e89a2d99297a102ff81ba07f30 Mon Sep 17 00:00:00 2001 From: Brad Larson Date: Thu, 8 Sep 2016 13:11:29 -0500 Subject: [PATCH 14/14] Updated the FilterShowcase base code for Xcode 8 GM. --- .../FilterShowcase/FilterOperationTypes.swift | 8 ++++---- .../FilterShowcase/FilterShowcaseWindowController.swift | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/examples/Mac/FilterShowcase/FilterShowcase/FilterOperationTypes.swift b/examples/Mac/FilterShowcase/FilterShowcase/FilterOperationTypes.swift index 760bcbe8..7a341054 100755 --- a/examples/Mac/FilterShowcase/FilterShowcase/FilterOperationTypes.swift +++ b/examples/Mac/FilterShowcase/FilterShowcase/FilterOperationTypes.swift @@ -6,7 +6,7 @@ enum FilterSliderSetting { case enabled(minimumValue:Float, maximumValue:Float, initialValue:Float) } -typealias FilterSetupFunction = (camera:Camera, filter:ImageProcessingOperation, outputView:RenderView) -> ImageSource? +typealias FilterSetupFunction = (Camera, ImageProcessingOperation, RenderView) -> ImageSource? enum FilterOperationType { case singleInput @@ -36,8 +36,8 @@ class FilterOperation: FilterOperationInt let titleName:String let sliderConfiguration:FilterSliderSetting let filterOperationType:FilterOperationType - let sliderUpdateCallback: ((filter:FilterClass, sliderValue:Float) -> ())? - init(filter:() -> FilterClass, listName: String, titleName: String, sliderConfiguration: FilterSliderSetting, sliderUpdateCallback:((filter:FilterClass, sliderValue:Float) -> ())?, filterOperationType: FilterOperationType) { + let sliderUpdateCallback: ((FilterClass, Float) -> ())? + init(filter:@escaping () -> FilterClass, listName: String, titleName: String, sliderConfiguration: FilterSliderSetting, sliderUpdateCallback:((FilterClass, Float) -> ())?, filterOperationType: FilterOperationType) { self.listName = listName self.titleName = titleName self.sliderConfiguration = sliderConfiguration @@ -55,6 +55,6 @@ class FilterOperation: FilterOperationInt } func updateBasedOnSliderValue(_ sliderValue:Float) { - sliderUpdateCallback?(filter:internalFilter, sliderValue:sliderValue) + sliderUpdateCallback?(internalFilter, sliderValue) } } diff --git a/examples/Mac/FilterShowcase/FilterShowcase/FilterShowcaseWindowController.swift b/examples/Mac/FilterShowcase/FilterShowcase/FilterShowcaseWindowController.swift index 3b8fd055..a65d8091 100755 --- a/examples/Mac/FilterShowcase/FilterShowcase/FilterShowcaseWindowController.swift +++ b/examples/Mac/FilterShowcase/FilterShowcase/FilterShowcaseWindowController.swift @@ -61,7 +61,7 @@ class FilterShowcaseWindowController: NSWindowController { currentFilterOperation!.filter.addTarget(filterView!) self.blendImage.processImage() case let .custom(filterSetupFunction:setupFunction): - currentFilterOperation!.configureCustomFilter(setupFunction(camera:videoCamera!, filter:currentFilterOperation!.filter, outputView:filterView!)) + currentFilterOperation!.configureCustomFilter(setupFunction(videoCamera!, currentFilterOperation!.filter, filterView!)) } switch currentFilterOperation!.sliderConfiguration { @@ -87,7 +87,7 @@ class FilterShowcaseWindowController: NSWindowController { func tableView(_ aTableView:NSTableView!, objectValueForTableColumn aTableColumn:NSTableColumn!, row rowIndex:Int) -> AnyObject! { let filterInList:FilterOperationInterface = filterOperations[rowIndex] - return filterInList.listName + return filterInList.listName as NSString } func tableViewSelectionDidChange(_ aNotification: Notification!) {