Skip to content

Commit

Permalink
Updated the iOS side for compatibility with the Xcode 8 GM.
Browse files Browse the repository at this point in the history
  • Loading branch information
BradLarson committed Sep 8, 2016
1 parent c9326ae commit 6941f98
Show file tree
Hide file tree
Showing 5 changed files with 13 additions and 13 deletions.
10 changes: 5 additions & 5 deletions framework/Source/iOS/Camera.swift
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,9 @@ public enum PhysicalCameraLocation {

func device() -> AVCaptureDevice? {
let devices = AVCaptureDevice.devices(withMediaType:AVMediaTypeVideo)
for device in devices! {
for case let device as AVCaptureDevice in devices! {
if (device.position == self.captureDevicePosition()) {
return device as? AVCaptureDevice
return device
}
}

Expand Down Expand Up @@ -137,14 +137,14 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer

if (supportsFullYUVRange) {
yuvConversionShader = crashOnShaderCompileFailure("Camera"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionFullRangeFragmentShader)}
videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey:NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))]
videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable:NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))]
} else {
yuvConversionShader = crashOnShaderCompileFailure("Camera"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader:YUVConversionVideoRangeFragmentShader)}
videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey:NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange))]
videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable:NSNumber(value:Int32(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange))]
}
} else {
yuvConversionShader = nil
videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey:NSNumber(value:Int32(kCVPixelFormatType_32BGRA))]
videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable:NSNumber(value:Int32(kCVPixelFormatType_32BGRA))]
}

if (captureSession.canAddOutput(videoOutput)) {
Expand Down
2 changes: 1 addition & 1 deletion framework/Source/iOS/MovieOutput.swift
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget {

localSettings[AVVideoWidthKey] = localSettings[AVVideoWidthKey] ?? NSNumber(value:size.width)
localSettings[AVVideoHeightKey] = localSettings[AVVideoHeightKey] ?? NSNumber(value:size.height)
localSettings[AVVideoCodecKey] = localSettings[AVVideoCodecKey] ?? AVVideoCodecH264
localSettings[AVVideoCodecKey] = localSettings[AVVideoCodecKey] ?? AVVideoCodecH264 as NSString

assetWriterVideoInput = AVAssetWriterInput(mediaType:AVMediaTypeVideo, outputSettings:localSettings)
assetWriterVideoInput.expectsMediaDataInRealTime = liveVideo
Expand Down
2 changes: 1 addition & 1 deletion framework/Source/iOS/OpenGLContext.swift
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ public class OpenGLContext: SerialDispatch {
lazy var extensionString:String = {
return self.runOperationSynchronously{
self.makeCurrentContext()
return String(cString:UnsafePointer<CChar>(glGetString(GLenum(GL_EXTENSIONS))))
return String(cString:unsafeBitCast(glGetString(GLenum(GL_EXTENSIONS)), to:UnsafePointer<CChar>.self))
}
}()
}
4 changes: 2 additions & 2 deletions framework/Source/iOS/PictureInput.swift
Original file line number Diff line number Diff line change
Expand Up @@ -83,11 +83,11 @@ public class PictureInput: ImageSource {

let imageContext = CGContext(data: imageData, width: Int(widthToUseForTexture), height: Int(heightToUseForTexture), bitsPerComponent: 8, bytesPerRow: Int(widthToUseForTexture) * 4, space: genericRGBColorspace, bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue)
// CGContextSetBlendMode(imageContext, kCGBlendModeCopy); // From Technical Q&A QA1708: http://developer.apple.com/library/ios/#qa/qa1708/_index.html
imageContext?.draw(in: CGRect(x: 0.0, y: 0.0, width: CGFloat(widthToUseForTexture), height: CGFloat(heightToUseForTexture)), image: image)
imageContext?.draw(image, in:CGRect(x:0.0, y:0.0, width:CGFloat(widthToUseForTexture), height:CGFloat(heightToUseForTexture)))
} else {
// Access the raw image bytes directly
dataFromImageDataProvider = image.dataProvider?.data
imageData = UnsafeMutablePointer<GLubyte>(CFDataGetBytePtr(dataFromImageDataProvider))
imageData = UnsafeMutablePointer<GLubyte>(mutating:CFDataGetBytePtr(dataFromImageDataProvider))
}

sharedImageProcessingContext.runOperationSynchronously{
Expand Down
8 changes: 4 additions & 4 deletions framework/Source/iOS/PictureOutput.swift
Original file line number Diff line number Diff line change
Expand Up @@ -101,21 +101,21 @@ public extension UIImage {
}
}

public func filterWithPipeline(_ pipeline:(input:PictureInput, output:PictureOutput) -> ()) -> UIImage {
public func filterWithPipeline(_ pipeline:(PictureInput, PictureOutput) -> ()) -> UIImage {
let picture = PictureInput(image:self)
var outputImage:UIImage?
let pictureOutput = PictureOutput()
pictureOutput.onlyCaptureNextFrame = true
pictureOutput.imageAvailableCallback = {image in
outputImage = image
}
pipeline(input:picture, output:pictureOutput)
pipeline(picture, pictureOutput)
picture.processImage(synchronously:true)
return outputImage!
}
}

// Why are these flipped in the callback definition?
func dataProviderReleaseCallback(_ context:UnsafeMutablePointer<Void>?, data:UnsafePointer<Void>, size:Int) {
UnsafeMutablePointer<UInt8>(data).deallocate(capacity:size)
func dataProviderReleaseCallback(_ context:UnsafeMutableRawPointer?, data:UnsafeRawPointer, size:Int) {
data.deallocate(bytes:size, alignedTo:1)
}

0 comments on commit 6941f98

Please sign in to comment.