Skip to content

Commit

Permalink
Add Camera delegate for camera frame processing
Browse files Browse the repository at this point in the history
  • Loading branch information
datskos committed Jun 10, 2016
1 parent 7779905 commit c43aa78
Show file tree
Hide file tree
Showing 4 changed files with 113 additions and 10 deletions.
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="10116" systemVersion="15E65" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" initialViewController="BYZ-38-t0r">
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="10117" systemVersion="15E65" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" initialViewController="BYZ-38-t0r">
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="10085"/>
Expand All @@ -22,7 +22,7 @@
<rect key="frame" x="0.0" y="28" width="600" height="572"/>
<subviews>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="roundedRect" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="cTJ-MM-c4Z">
<rect key="frame" x="253" y="494" width="95" height="70"/>
<rect key="frame" x="505" y="494" width="95" height="70"/>
<constraints>
<constraint firstAttribute="height" constant="70" id="iKd-sJ-ABw"/>
<constraint firstAttribute="width" constant="95" id="rF1-FD-Rxo"/>
Expand All @@ -32,11 +32,28 @@
<action selector="capture:" destination="BYZ-38-t0r" eventType="touchUpInside" id="BE1-gy-8jg"/>
</connections>
</button>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Detect Faces" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="nJ9-JV-UXc">
<rect key="frame" x="65" y="524" width="80" height="16"/>
<color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="calibratedRGB"/>
<fontDescription key="fontDescription" type="system" pointSize="13"/>
<color key="textColor" red="0.0" green="0.0" blue="0.0" alpha="1" colorSpace="calibratedRGB"/>
<nil key="highlightedColor"/>
</label>
<switch opaque="NO" contentMode="scaleToFill" horizontalHuggingPriority="750" verticalHuggingPriority="750" contentHorizontalAlignment="center" contentVerticalAlignment="center" on="YES" translatesAutoresizingMaskIntoConstraints="NO" id="UZf-tP-6yu">
<rect key="frame" x="8" y="515" width="51" height="31"/>
<connections>
<action selector="didSwitch:" destination="BYZ-38-t0r" eventType="valueChanged" id="0w2-0I-oBP"/>
</connections>
</switch>
</subviews>
<color key="backgroundColor" white="1" alpha="1" colorSpace="calibratedWhite"/>
<constraints>
<constraint firstAttribute="bottom" secondItem="UZf-tP-6yu" secondAttribute="bottom" constant="26" id="QYe-Rb-9UI"/>
<constraint firstAttribute="bottom" secondItem="nJ9-JV-UXc" secondAttribute="bottom" constant="32" id="gaO-oh-agg"/>
<constraint firstItem="UZf-tP-6yu" firstAttribute="leading" secondItem="MIZ-bY-Kwk" secondAttribute="leading" constant="8" id="p9I-FX-whb"/>
<constraint firstItem="nJ9-JV-UXc" firstAttribute="leading" secondItem="UZf-tP-6yu" secondAttribute="trailing" constant="8" id="qeV-wy-tZf"/>
<constraint firstItem="cTJ-MM-c4Z" firstAttribute="bottom" secondItem="MIZ-bY-Kwk" secondAttribute="bottomMargin" id="w5J-tE-eCN"/>
<constraint firstItem="cTJ-MM-c4Z" firstAttribute="centerX" secondItem="MIZ-bY-Kwk" secondAttribute="centerX" id="y7f-cV-kkm"/>
<constraint firstItem="cTJ-MM-c4Z" firstAttribute="trailing" secondItem="MIZ-bY-Kwk" secondAttribute="trailing" id="y7f-cV-kkm"/>
</constraints>
</view>
</subviews>
Expand All @@ -49,6 +66,7 @@
</constraints>
</view>
<connections>
<outlet property="faceDetectSwitch" destination="UZf-tP-6yu" id="kac-wE-Nc7"/>
<outlet property="renderView" destination="MIZ-bY-Kwk" id="2Sj-7n-kNT"/>
</connections>
</viewController>
Expand Down
Original file line number Diff line number Diff line change
@@ -1,20 +1,34 @@
import UIKit
import CoreImage
import GPUImage
import AVFoundation

class ViewController: UIViewController {
@IBOutlet weak var renderView: RenderView!
@IBOutlet weak var faceDetectSwitch: UISwitch!

let fbSize = Size(width: 640, height: 480)
let faceDetector = CIDetector(ofType: CIDetectorTypeFace, context: nil, options: [CIDetectorAccuracy: CIDetectorAccuracyLow])
var shouldDetectFaces = true
lazy var lineGenerator: LineGenerator = {
let gen = LineGenerator(size: self.fbSize)
gen.lineWidth = 5
return gen
}()
let saturationFilter = SaturationAdjustment()
let blendFilter = AlphaBlend()
var camera:Camera!
var filter:SaturationAdjustment!

override func viewDidLoad() {
super.viewDidLoad()

do {
camera = try Camera(sessionPreset:AVCaptureSessionPreset640x480)
camera.runBenchmark = true
filter = SaturationAdjustment()
camera --> filter --> renderView
camera.delegate = self
camera --> saturationFilter --> blendFilter --> renderView
lineGenerator --> blendFilter
shouldDetectFaces = faceDetectSwitch.on
camera.startCapture()
} catch {
fatalError("Could not initialize rendering pipeline: \(error)")
Expand All @@ -25,14 +39,61 @@ class ViewController: UIViewController {
super.viewDidLayoutSubviews()
}

@IBAction func didSwitch(sender: UISwitch) {
shouldDetectFaces = sender.on
}

@IBAction func capture(sender: AnyObject) {
print("Capture")
do {
let documentsDir = try NSFileManager.defaultManager().URLForDirectory(.DocumentDirectory, inDomain:.UserDomainMask, appropriateForURL:nil, create:true)
filter.saveNextFrameToURL(NSURL(string:"TestImage.png", relativeToURL:documentsDir)!, format:.PNG)
saturationFilter.saveNextFrameToURL(NSURL(string:"TestImage.png", relativeToURL:documentsDir)!, format:.PNG)
} catch {
print("Couldn't save image: \(error)")
}
}
}

extension ViewController: CameraDelegate {
func didCaptureBuffer(sampleBuffer: CMSampleBuffer) {
guard shouldDetectFaces else {
lineGenerator.renderLines([]) // clear
return
}
if let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) {
let attachments = CMCopyDictionaryOfAttachments(kCFAllocatorDefault, sampleBuffer, CMAttachmentMode(kCMAttachmentMode_ShouldPropagate))!
let img = CIImage(CVPixelBuffer: pixelBuffer, options: attachments as? [String: AnyObject])
var lines = [LineProtocol]()
for feature in faceDetector.featuresInImage(img, options: [CIDetectorImageOrientation: 6]) {
if feature is CIFaceFeature {
lines = lines + faceLines(feature.bounds)
}
}
lineGenerator.renderLines(lines)
}
}

func faceLines(bounds: CGRect) -> [LineProtocol] {
// convert from CoreImage to GL coords
let flip = CGAffineTransformMakeScale(1, -1)
let rotate = CGAffineTransformRotate(flip, CGFloat(-M_PI_2))
let translate = CGAffineTransformTranslate(rotate, -1, -1)
let xform = CGAffineTransformScale(translate, CGFloat(2/fbSize.width), CGFloat(2/fbSize.height))
let glRect = CGRectApplyAffineTransform(bounds, xform)

let x = glRect.origin.x
let y = glRect.origin.y
let width = glRect.size.width
let height = glRect.size.height

let tl = CGPoint(x: x, y: y)
let tr = CGPoint(x: x + width, y: y)
let bl = CGPoint(x: x, y: y + height)
let br = CGPoint(x: x + width, y: y + height)

return [LineSegment(p1: tl, p2: tr), // top
LineSegment(p1: tr, p2: br), // right
LineSegment(p1: br, p2: bl), // bottom
LineSegment(p1: bl, p2: tl)] // left
}
}
25 changes: 22 additions & 3 deletions framework/Source/Operations/LineGenerator.swift
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,11 @@
#endif
#endif

public struct Line {
public protocol LineProtocol {
func toGLEndpoints() -> [GLfloat]
}

public struct Line: LineProtocol {
public let slope:Float
public let intercept:Float

Expand All @@ -21,7 +25,7 @@ public struct Line {
self.intercept = intercept
}

func toGLEndpoints() -> [GLfloat] {
public func toGLEndpoints() -> [GLfloat] {
if (slope > 9000.0) {// Vertical line
return [intercept, -1.0, intercept, 1.0]
} else {
Expand All @@ -30,6 +34,21 @@ public struct Line {
}
}

public struct LineSegment: LineProtocol {
public let p1: CGPoint
public let p2: CGPoint

public init(p1:CGPoint, p2:CGPoint) {
self.p1 = p1
self.p2 = p2
}

public func toGLEndpoints() -> [GLfloat] {
return [p1.x, p1.y, p2.x, p2.y].map {GLfloat($0)}
}
}


public class LineGenerator: ImageGenerator {
public var lineColor:Color = Color.Green { didSet { uniformSettings["lineColor"] = lineColor } }
public var lineWidth:Float = 1.0 {
Expand All @@ -50,7 +69,7 @@ public class LineGenerator: ImageGenerator {
({lineColor = Color.Red})()
}

public func renderLines(lines:[Line]) {
public func renderLines(lines:[LineProtocol]) {
imageFramebuffer.activateFramebufferForRendering()

lineShader.use()
Expand Down
5 changes: 5 additions & 0 deletions framework/Source/iOS/Camera.swift
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
import Foundation
import AVFoundation

public protocol CameraDelegate {
func didCaptureBuffer(sampleBuffer: CMSampleBuffer)
}
public enum PhysicalCameraLocation {
case BackFacing
case FrontFacing
Expand Down Expand Up @@ -61,6 +64,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer
}

public let targets = TargetContainer()
public var delegate: CameraDelegate?
let captureSession:AVCaptureSession
let inputCamera:AVCaptureDevice!
let videoInput:AVCaptureDeviceInput!
Expand Down Expand Up @@ -181,6 +185,7 @@ public class Camera: NSObject, ImageSource, AVCaptureVideoDataOutputSampleBuffer
sharedImageProcessingContext.runOperationAsynchronously{
let cameraFramebuffer:Framebuffer

self.delegate?.didCaptureBuffer(sampleBuffer)
if self.captureAsYUV {
let luminanceFramebuffer:Framebuffer
let chrominanceFramebuffer:Framebuffer
Expand Down

0 comments on commit c43aa78

Please sign in to comment.