Skip to content

Commit

Permalink
Adding iOS support to Swift Package Manager builds. Had to restructur…
Browse files Browse the repository at this point in the history
…e the Apple-specific files to do this.
  • Loading branch information
BradLarson committed Jun 10, 2019
1 parent ffd7b5a commit a5389e8
Show file tree
Hide file tree
Showing 17 changed files with 193 additions and 1,060 deletions.
20 changes: 6 additions & 14 deletions Package.swift
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,9 @@

import PackageDescription

#if os(macOS)
let platformDepedencies: [Package.Dependency] = []
let platformExcludes = ["iOS", "Linux", "Operations/Shaders"]
let platformTargets: [Target] = [
.target(
name: "GPUImage",
path: "framework/Source",
exclude: platformExcludes)]
#elseif os(iOS)
let platformDepedencies: [Package.Dependency] = []
let platformExcludes = ["Linux", "Mac", "Operations/Shaders"]
#if os(macOS) // This fires for both macOS and iOS targets, because it's based on build platform
let platformDependencies: [Package.Dependency] = []
let platformExcludes = ["Linux", "Operations/Shaders"]
let platformTargets: [Target] = [
.target(
name: "GPUImage",
Expand All @@ -21,11 +13,11 @@ let platformTargets: [Target] = [
#elseif os(Linux)
// TODO: Add back in RPi support
// TODO: Move the remote system library packages into this project
let platformDepedencies: [Package.Dependency] = [
let platformDependencies: [Package.Dependency] = [
.package(url: "https://github.com/BradLarson/COpenGL.git", from: "1.0.2"),
.package(url: "https://github.com/BradLarson/CFreeGLUT.git", from: "1.0.1"),
.package(url: "https://github.com/BradLarson/CVideo4Linux.git", from: "1.0.2")]
let platformExcludes = ["iOS", "Mac", "Operations/Shaders", "Linux/RPiRenderWindow.swift", "Linux/OpenGLContext-RPi.swift", "Linux/V4LSupplement"]
let platformExcludes = ["Apple", "Operations/Shaders", "Linux/RPiRenderWindow.swift", "Linux/OpenGLContext-RPi.swift", "Linux/V4LSupplement"]
let platformTargets: [Target] = [
.target(
name: "V4LSupplement",
Expand All @@ -45,7 +37,7 @@ let package = Package(
name: "GPUImage",
targets: ["GPUImage"]),
],
dependencies: platformDepedencies,
dependencies: platformDependencies,
targets: platformTargets,
swiftLanguageVersions: [.v4]
)
138 changes: 54 additions & 84 deletions framework/GPUImage.xcodeproj/project.pbxproj

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -77,8 +77,8 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer
let captureAsYUV:Bool
let yuvConversionShader:ShaderProgram?
let frameRenderingSemaphore = DispatchSemaphore(value:1)
let cameraProcessingQueue = DispatchQueue.global()
let audioProcessingQueue = DispatchQueue.global()
let cameraProcessingQueue:DispatchQueue = standardProcessingQueue
let audioProcessingQueue:DispatchQueue = lowProcessingQueue

let framesToIgnore = 5
var numberOfFramesCaptured = 0
Expand Down Expand Up @@ -203,6 +203,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer
let luminanceFramebuffer:Framebuffer
let chrominanceFramebuffer:Framebuffer
if sharedImageProcessingContext.supportsTextureCaches() {
#if os(iOS)
var luminanceTextureRef:CVOpenGLESTexture? = nil
let _ = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, cameraFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE, GLsizei(bufferWidth), GLsizei(bufferHeight), GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), 0, &luminanceTextureRef)
let luminanceTexture = CVOpenGLESTextureGetName(luminanceTextureRef!)
Expand All @@ -220,6 +221,9 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer
glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GL_CLAMP_TO_EDGE)
glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GL_CLAMP_TO_EDGE)
chrominanceFramebuffer = try! Framebuffer(context:sharedImageProcessingContext, orientation:self.location.imageOrientation(), size:GLSize(width:GLint(bufferWidth / 2), height:GLint(bufferHeight / 2)), textureOnly:true, overriddenTexture:chrominanceTexture)
#else
fatalError("Texture cache processing isn't available on macOS")
#endif
} else {
glActiveTexture(GLenum(GL_TEXTURE4))
luminanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:self.location.imageOrientation(), size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:true)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ public class MovieInput: ImageSource {

public func start() {
asset.loadValuesAsynchronously(forKeys:["tracks"], completionHandler:{
DispatchQueue.global().async(execute: {
standardProcessingQueue.async(execute: {
guard (self.asset.statusOfValue(forKey: "tracks", error:nil) == .loaded) else { return }

guard self.assetReader.startReading() else {
Expand Down Expand Up @@ -157,6 +157,7 @@ public class MovieInput: ImageSource {

let startTime = CFAbsoluteTimeGetCurrent()

#if os(iOS)
var luminanceGLTexture: CVOpenGLESTexture?

glActiveTexture(GLenum(GL_TEXTURE0))
Expand Down Expand Up @@ -205,7 +206,19 @@ public class MovieInput: ImageSource {

// chrominanceFramebuffer.cache = sharedImageProcessingContext.framebufferCache
chrominanceFramebuffer.lock()
#else
let luminanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:true)
luminanceFramebuffer.lock()
glActiveTexture(GLenum(GL_TEXTURE0))
glBindTexture(GLenum(GL_TEXTURE_2D), luminanceFramebuffer.texture)
glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_LUMINANCE, GLsizei(bufferWidth), GLsizei(bufferHeight), 0, GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddressOfPlane(movieFrame, 0))

let chrominanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:true)
chrominanceFramebuffer.lock()
glActiveTexture(GLenum(GL_TEXTURE1))
glBindTexture(GLenum(GL_TEXTURE_2D), chrominanceFramebuffer.texture)
glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_LUMINANCE_ALPHA, GLsizei(bufferWidth / 2), GLsizei(bufferHeight / 2), 0, GLenum(GL_LUMINANCE_ALPHA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddressOfPlane(movieFrame, 1))
#endif
let movieFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(bufferWidth), height:GLint(bufferHeight)), textureOnly:false)

convertYUVToRGB(shader:self.yuvConversionShader, luminanceFramebuffer:luminanceFramebuffer, chrominanceFramebuffer:chrominanceFramebuffer, resultFramebuffer:movieFramebuffer, colorConversionMatrix:conversionMatrix)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget {
return assetWriterVideoInput.transform
}
set {
assetWriterVideoInput.transform = transform
assetWriterVideoInput.transform = newValue
}
}

Expand Down Expand Up @@ -79,6 +79,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget {
sharedImageProcessingContext.runOperationSynchronously{
self.isRecording = self.assetWriter.startWriting()

#if os(iOS)
CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput.pixelBufferPool!, &self.pixelBuffer)

/* AVAssetWriter will use BT.601 conversion matrix for RGB to YCbCr conversion
Expand All @@ -96,6 +97,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget {
let cachedTexture = CVOpenGLESTextureGetName(cachedTextureRef!)

self.renderFramebuffer = try! Framebuffer(context:sharedImageProcessingContext, orientation:.portrait, size:bufferSize, textureOnly:false, overriddenTexture:cachedTexture)
#endif
}
}

Expand Down Expand Up @@ -153,6 +155,7 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget {
return
}

#if os(iOS)
if !sharedImageProcessingContext.supportsTextureCaches() {
let pixelBufferStatus = CVPixelBufferPoolCreatePixelBuffer(nil, assetWriterPixelBufferInput.pixelBufferPool!, &pixelBuffer)
guard ((pixelBuffer != nil) && (pixelBufferStatus == kCVReturnSuccess)) else { return }
Expand All @@ -168,9 +171,24 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget {
if !sharedImageProcessingContext.supportsTextureCaches() {
pixelBuffer = nil
}
#else
var pixelBufferFromPool:CVPixelBuffer? = nil

let pixelBufferStatus = CVPixelBufferPoolCreatePixelBuffer(nil, assetWriterPixelBufferInput.pixelBufferPool!, &pixelBufferFromPool)
guard let pixelBuffer = pixelBufferFromPool, (pixelBufferStatus == kCVReturnSuccess) else { return }

renderIntoPixelBuffer(pixelBuffer, framebuffer:framebuffer)

if (!assetWriterPixelBufferInput.append(pixelBuffer, withPresentationTime:frameTime)) {
print("Problem appending pixel buffer at time: \(frameTime)")
}

CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
#endif
}

func renderIntoPixelBuffer(_ pixelBuffer:CVPixelBuffer, framebuffer:Framebuffer) {
#if os(iOS)
if !sharedImageProcessingContext.supportsTextureCaches() {
renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:framebuffer.orientation, size:GLSize(self.size))
renderFramebuffer.lock()
Expand All @@ -187,6 +205,19 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget {
glReadPixels(0, 0, renderFramebuffer.size.width, renderFramebuffer.size.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddress(pixelBuffer))
renderFramebuffer.unlock()
}
#else
let renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:framebuffer.orientation, size:GLSize(self.size))
renderFramebuffer.lock()

renderFramebuffer.activateFramebufferForRendering()
clearFramebufferWithColor(Color.black)

renderQuadWithShader(sharedImageProcessingContext.passthroughShader, uniformSettings:ShaderUniformSettings(), vertexBufferObject:sharedImageProcessingContext.standardImageVBO, inputTextures:[framebuffer.texturePropertiesForOutputRotation(.noRotation)])

CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
glReadPixels(0, 0, renderFramebuffer.size.width, renderFramebuffer.size.height, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), CVPixelBufferGetBaseAddress(pixelBuffer))
renderFramebuffer.unlock()
#endif
}

// MARK: -
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#if canImport(OpenGL)

import OpenGL.GL
import Cocoa

Expand Down Expand Up @@ -65,6 +67,10 @@ public class OpenGLContext: SerialDispatch {
// MARK: -
// MARK: Device capabilities

func supportsTextureCaches() -> Bool {
return false
}

public var maximumTextureSizeForThisDevice:GLint {get { return _maximumTextureSizeForThisDevice } }
private lazy var _maximumTextureSizeForThisDevice:GLint = {
return self.openGLDeviceSettingForOption(GL_MAX_TEXTURE_SIZE)
Expand All @@ -87,3 +93,4 @@ public class OpenGLContext: SerialDispatch {
}
}()
}
#endif
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
#if canImport(OpenGLES)

import OpenGLES
import UIKit

Expand Down Expand Up @@ -103,3 +105,4 @@ public class OpenGLContext: SerialDispatch {
}
}()
}
#endif
Original file line number Diff line number Diff line change
@@ -1,11 +1,20 @@
#if canImport(OpenGL)
import OpenGL.GL3
#else
import OpenGLES
#endif

#if canImport(UIKit)
import UIKit
#else
import Cocoa
#endif

public class PictureInput: ImageSource {
public let targets = TargetContainer()
var imageFramebuffer:Framebuffer!
var hasProcessedImage:Bool = false

public init(image:CGImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) {
// TODO: Dispatch this whole thing asynchronously to move image loading off main thread
let widthOfImage = GLint(image.width)
Expand Down Expand Up @@ -81,13 +90,17 @@ public class PictureInput: ImageSource {

let genericRGBColorspace = CGColorSpaceCreateDeviceRGB()

let imageContext = CGContext(data: imageData, width: Int(widthToUseForTexture), height: Int(heightToUseForTexture), bitsPerComponent: 8, bytesPerRow: Int(widthToUseForTexture) * 4, space: genericRGBColorspace, bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue)
let imageContext = CGContext(data:imageData, width:Int(widthToUseForTexture), height:Int(heightToUseForTexture), bitsPerComponent:8, bytesPerRow:Int(widthToUseForTexture) * 4, space:genericRGBColorspace, bitmapInfo:CGImageAlphaInfo.premultipliedFirst.rawValue | CGBitmapInfo.byteOrder32Little.rawValue)
// CGContextSetBlendMode(imageContext, kCGBlendModeCopy); // From Technical Q&A QA1708: http://developer.apple.com/library/ios/#qa/qa1708/_index.html
imageContext?.draw(image, in:CGRect(x:0.0, y:0.0, width:CGFloat(widthToUseForTexture), height:CGFloat(heightToUseForTexture)))
} else {
// Access the raw image bytes directly
dataFromImageDataProvider = image.dataProvider?.data
#if os(iOS)
imageData = UnsafeMutablePointer<GLubyte>(mutating:CFDataGetBytePtr(dataFromImageDataProvider))
#else
imageData = UnsafeMutablePointer<GLubyte>(mutating:CFDataGetBytePtr(dataFromImageDataProvider)!)
#endif
}

sharedImageProcessingContext.runOperationSynchronously{
Expand All @@ -110,29 +123,42 @@ public class PictureInput: ImageSource {
}
glBindTexture(GLenum(GL_TEXTURE_2D), 0)
}

if (shouldRedrawUsingCoreGraphics) {
imageData.deallocate()
}
}


#if canImport(UIKit)
public convenience init(image:UIImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) {
self.init(image:image.cgImage!, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation)
}
#else
public convenience init(image:NSImage, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) {
self.init(image:image.cgImage(forProposedRect:nil, context:nil, hints:nil)!, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation)
}
#endif

public convenience init(imageName:String, smoothlyScaleOutput:Bool = false, orientation:ImageOrientation = .portrait) {
#if canImport(UIKit)
guard let image = UIImage(named:imageName) else { fatalError("No such image named: \(imageName) in your application bundle") }
self.init(image:image.cgImage!, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation)
#else
guard let image = NSImage(named:NSImage.Name(imageName)) else { fatalError("No such image named: \(imageName) in your application bundle") }
self.init(image:image.cgImage(forProposedRect:nil, context:nil, hints:nil)!, smoothlyScaleOutput:smoothlyScaleOutput, orientation:orientation)
#endif
}

public func processImage(synchronously:Bool = false) {
if synchronously {
sharedImageProcessingContext.runOperationSynchronously{
sharedImageProcessingContext.makeCurrentContext()
self.updateTargetsWithFramebuffer(self.imageFramebuffer)
self.hasProcessedImage = true
}
} else {
sharedImageProcessingContext.runOperationAsynchronously{
sharedImageProcessingContext.makeCurrentContext()
self.updateTargetsWithFramebuffer(self.imageFramebuffer)
self.hasProcessedImage = true
}
Expand Down
Loading

0 comments on commit a5389e8

Please sign in to comment.