Skip to content

Commit

Permalink
Added color to the YUV camera input on Linux, fixed the double buffer…
Browse files Browse the repository at this point in the history
…ing for Linux OpenGL.
  • Loading branch information
BradLarson committed Apr 19, 2016
1 parent 47e74d2 commit 38e5078
Show file tree
Hide file tree
Showing 14 changed files with 125 additions and 37 deletions.
3 changes: 2 additions & 1 deletion examples/Linux-OpenGL/SimpleVideoFilter/Source/main.swift
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import GPUImage

let camera = V4LCamera(size:Size(width:1280.0, height:720.0))
// For now, GLUT initialization is done in the render window, so that must come first in sequence
let renderWindow = GLUTRenderWindow(width:1280, height:720, title:"Simple Video Filter")
let camera = V4LCamera(size:Size(width:1280.0, height:720.0))
let edgeDetection = SobelEdgeDetection()

camera --> edgeDetection --> renderWindow
Expand Down
3 changes: 2 additions & 1 deletion examples/Linux-RPi/SimpleVideoFilter/Source/main.swift
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import GPUImage

let camera = V4LCamera(size:Size(width:1280.0, height:720.0))
// For now, rendering requires the window to be created first
let renderWindow = RPiRenderWindow(width:1280, height:720)
let camera = V4LCamera(size:Size(width:1280.0, height:720.0))
let edgeDetection = SobelEdgeDetection()

camera --> edgeDetection --> renderWindow
Expand Down
10 changes: 8 additions & 2 deletions framework/Source/CameraConversion.swift
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,19 @@ let colorConversionMatrix709Default = Matrix3x3(rowMajorValues:[
1.793, -0.533, 0.0,
])

func convertYUVToRGB(shader shader:ShaderProgram, luminanceFramebuffer:Framebuffer, chrominanceFramebuffer:Framebuffer, resultFramebuffer:Framebuffer, colorConversionMatrix:Matrix3x3) {
let textureProperties = [luminanceFramebuffer.texturePropertiesForTargetOrientation(resultFramebuffer.orientation), chrominanceFramebuffer.texturePropertiesForTargetOrientation(resultFramebuffer.orientation)]
func convertYUVToRGB(shader shader:ShaderProgram, luminanceFramebuffer:Framebuffer, chrominanceFramebuffer:Framebuffer, secondChrominanceFramebuffer:Framebuffer? = nil, resultFramebuffer:Framebuffer, colorConversionMatrix:Matrix3x3) {
let textureProperties:[InputTextureProperties]
if let secondChrominanceFramebuffer = secondChrominanceFramebuffer {
textureProperties = [luminanceFramebuffer.texturePropertiesForTargetOrientation(resultFramebuffer.orientation), chrominanceFramebuffer.texturePropertiesForTargetOrientation(resultFramebuffer.orientation), secondChrominanceFramebuffer.texturePropertiesForTargetOrientation(resultFramebuffer.orientation)]
} else {
textureProperties = [luminanceFramebuffer.texturePropertiesForTargetOrientation(resultFramebuffer.orientation), chrominanceFramebuffer.texturePropertiesForTargetOrientation(resultFramebuffer.orientation)]
}
resultFramebuffer.activateFramebufferForRendering()
clearFramebufferWithColor(Color.Black)
var uniformSettings = ShaderUniformSettings()
uniformSettings["colorConversionMatrix"] = colorConversionMatrix
renderQuadWithShader(shader, uniformSettings:uniformSettings, vertices:standardImageVertices, inputTextures:textureProperties)
luminanceFramebuffer.unlock()
chrominanceFramebuffer.unlock()
secondChrominanceFramebuffer?.unlock()
}
2 changes: 1 addition & 1 deletion framework/Source/Linux/GLUTRenderWindow.swift
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ public class GLUTRenderWindow: ImageConsumer {
public init(width:UInt32, height:UInt32, title:String) {
var localArgc = Process.argc
glutInit(&localArgc, Process.unsafeArgv)
glutInitDisplayMode(UInt32(GLUT_SINGLE))
glutInitDisplayMode(UInt32(GLUT_DOUBLE))
glutInitWindowSize(Int32(width), Int32(height))
glutInitWindowPosition(100,100)
glutCreateWindow(title)
Expand Down
2 changes: 2 additions & 0 deletions framework/Source/Linux/OpenGLContext-RPi.swift
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import COpenGLES.gles2
import CVideoCore

class OpenGLContext {
lazy var framebufferCache:FramebufferCache = {
Expand All @@ -14,6 +15,7 @@ class OpenGLContext {
// MARK: Initialization and teardown

init() {
bcm_host_init()

glDisable(GLenum(GL_DEPTH_TEST))
glEnable(GLenum(GL_TEXTURE_2D))
Expand Down
2 changes: 1 addition & 1 deletion framework/Source/Linux/RPiRenderWindow.swift
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ public class RPiRenderWindow: ImageConsumer {
let windowHeight:UInt32

public init(width:UInt32, height:UInt32) {
bcm_host_init()
sharedImageProcessingContext.makeCurrentContext()
display = eglGetDisplay(nil /* EGL_DEFAULT_DISPLAY */)
// guard (display != EGL_NO_DISPLAY) else {throw renderingError(errorString:"Could not obtain display")}
// guard (eglInitialize(display, nil, nil) != EGL_FALSE) else {throw renderingError(errorString:"Could not initialize display")}
Expand Down
70 changes: 44 additions & 26 deletions framework/Source/Linux/V4LCamera.swift
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#if GLES
import COpenGLES.gles2
import COpenGLES.gles2
#else
import COpenGL
import COpenGL
#endif
import CVideo4Linux
import Glibc
Expand All @@ -12,33 +12,30 @@ public class V4LCamera:ImageSource {

let devicePath:String
let device:Int32
let size:Size
let size:Size
var cameraOutputTexture:GLuint = 0
var buffers = [buffer]()
var currentBuffer:Int32 = 0

public var runBenchmark:Bool = false
var numberOfFramesCaptured = 0
var totalFrameTimeDuringCapture:Double = 0.0

let yuvConversionShader:ShaderProgram?

public init(devicePath:String = "/dev/video0", size:Size) {
self.devicePath = devicePath
self.size = size
device = v4l2_open_swift(devicePath, O_RDWR, 0) // Maybe switch to O_RDWR | O_NONBLOCK with the ability to kick out if there's no new frame
print("Device: \(device)")

var capabilities:v4l2_capability = v4l2_capability()

var format:v4l2_format = v4l2_generate_YUV420_format(Int32(round(Double(size.width))), Int32(round(Double(size.height))))

print("Device resolution: \(format.fmt.pix.width) x \(format.fmt.pix.height)")

let result = v4l2_ioctl_S_FMT(device, &format)
let result2 = v4l2_ioctl_QUERYCAP(device, &capabilities)
print("Format: \(format), result: \(result)")
v4l2_ioctl_S_FMT(device, &format)
v4l2_ioctl_QUERYCAP(device, &capabilities)

print("Capabilities: \(capabilities), result: \(result2)")
yuvConversionShader = crashOnShaderCompileFailure("V4LCamera"){try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(3), fragmentShader:YUVConversionFullRangeUVPlanarFragmentShader)}
}

deinit {
Expand All @@ -47,8 +44,7 @@ public class V4LCamera:ImageSource {

public func startCapture() {
let numberOfBuffers:Int32 = 2
let requestBuffers = v4l2_request_buffer_size(device, numberOfBuffers)
print("Request buffers: \(requestBuffers)")
v4l2_request_buffer_size(device, numberOfBuffers)

for index in 0..<numberOfBuffers {
buffers.append(v4l2_generate_buffer(device, index))
Expand All @@ -64,30 +60,52 @@ public class V4LCamera:ImageSource {

public func grabFrame() {
v4l2_dequeue_buffer(device, currentBuffer)

let startTime = NSDate()

let startTime = NSDate()
let luminanceFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.Portrait, size:GLSize(size), textureOnly:true)

luminanceFramebuffer.lock()

glActiveTexture(GLenum(GL_TEXTURE0))
glBindTexture(GLenum(GL_TEXTURE_2D), luminanceFramebuffer.texture)
glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_LUMINANCE, GLsizei(round(Double(size.width))), GLsizei(round(Double(size.height))), 0, GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), buffers[Int(currentBuffer)].start)

// YUV 420 chrominance is split into two planes in V4L
let chrominanceFramebuffer1 = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.Portrait, size:GLSize(width:GLint(round(Double(size.width) / 2.0)), height:GLint(round(Double(size.height) / 2.0))), textureOnly:true)
chrominanceFramebuffer1.lock()

glActiveTexture(GLenum(GL_TEXTURE1))
glBindTexture(GLenum(GL_TEXTURE_2D), chrominanceFramebuffer1.texture)
glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_LUMINANCE, GLsizei(round(Double(size.width) / 2.0)), GLsizei(round(Double(size.height) / 2.0)), 0, GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), buffers[Int(currentBuffer)].start + (Int(round(Double(size.width))) * Int(round(Double(size.height)))))

let chrominanceFramebuffer2 = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.Portrait, size:GLSize(width:GLint(round(Double(size.width) / 2.0)), height:GLint(round(Double(size.height) / 2.0))), textureOnly:true)
chrominanceFramebuffer2.lock()

glActiveTexture(GLenum(GL_TEXTURE2))
glBindTexture(GLenum(GL_TEXTURE_2D), chrominanceFramebuffer2.texture)
glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_LUMINANCE, GLsizei(round(Double(size.width) / 2.0)), GLsizei(round(Double(size.height) / 2.0)), 0, GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), buffers[Int(currentBuffer)].start + (Int(round(Double(size.width * size.height + size.width * size.height / 4.0)))))

v4l2_enqueue_buffer(device, currentBuffer)
if (currentBuffer == 0) {
currentBuffer = 1
} else {
currentBuffer = 0
}

updateTargetsWithFramebuffer(luminanceFramebuffer)
if runBenchmark {
let elapsedTime = -startTime.timeIntervalSinceNow
print("Current: \(elapsedTime * 1000.0) ms")
numberOfFramesCaptured += 1
totalFrameTimeDuringCapture += elapsedTime
print("Average: \(1000.0 * totalFrameTimeDuringCapture / Double(numberOfFramesCaptured))")
}
let cameraFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.Portrait, size:luminanceFramebuffer.sizeForTargetOrientation(.Portrait), textureOnly:false)

let conversionMatrix = colorConversionMatrix601FullRangeDefault
convertYUVToRGB(shader:self.yuvConversionShader!, luminanceFramebuffer:luminanceFramebuffer, chrominanceFramebuffer:chrominanceFramebuffer1, secondChrominanceFramebuffer:chrominanceFramebuffer2, resultFramebuffer:cameraFramebuffer, colorConversionMatrix:conversionMatrix)

updateTargetsWithFramebuffer(cameraFramebuffer)

if runBenchmark {
let elapsedTime = -startTime.timeIntervalSinceNow
print("Current: \(elapsedTime * 1000.0) ms")
numberOfFramesCaptured += 1
totalFrameTimeDuringCapture += elapsedTime
print("Average: \(1000.0 * totalFrameTimeDuringCapture / Double(numberOfFramesCaptured))")
}
}

func stopCapture() {
Expand Down
16 changes: 14 additions & 2 deletions framework/Source/Linux/v4lfuncs.c
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,19 @@ struct v4l2_format v4l2_generate_YUV420_format(int width, int height)
fmt.fmt.pix.width = width;
fmt.fmt.pix.height = height;
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420;
fmt.fmt.pix.field = V4L2_FIELD_ANY;
fmt.fmt.pix.field = V4L2_FIELD_SEQ_TB;

return fmt;
}

struct v4l2_format v4l2_generate_YUV422_format(int width, int height)
{
struct v4l2_format fmt;
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = width;
fmt.fmt.pix.height = height;
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV422P;
fmt.fmt.pix.field = V4L2_FIELD_SEQ_TB;

return fmt;
}
Expand Down Expand Up @@ -127,4 +139,4 @@ void v4l2_enqueue_buffer(int fd, int index)
buf.memory = V4L2_MEMORY_MMAP;
buf.index = index;
v4l2_ioctl(fd, VIDIOC_QBUF, &buf);
}
}
3 changes: 2 additions & 1 deletion framework/Source/Linux/v4lfuncs.h
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,9 @@ int v4l2_streamon(int fd);
int v4l2_streamoff(int fd);
struct v4l2_format v4l2_generate_RGB24_format(int width, int height);
struct v4l2_format v4l2_generate_YUV420_format(int width, int height);
struct v4l2_format v4l2_generate_YUV422_format(int width, int height);
struct buffer v4l2_generate_buffer(int fd, int index);
struct v4l2_requestbuffers v4l2_request_buffer_size(int fd, int buffers);
// void v4l2_enqueue_initial_buffers(int fd, int buffers);
struct v4l2_buffer v4l2_dequeue_buffer(int fd, int index);
void v4l2_enqueue_buffer(int fd, int index);
void v4l2_enqueue_buffer(int fd, int index);
7 changes: 5 additions & 2 deletions framework/Source/OpenGLRendering.swift
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,11 @@ func renderQuadWithShader(shader:ShaderProgram, uniformSettings:ShaderUniformSet
glVertexAttribPointer(positionAttribute, 2, GLenum(GL_FLOAT), 0, 0, vertices)

for (index, inputTexture) in inputTextures.enumerate() {
guard let textureCoordinateAttribute = shader.attributeIndex("inputTextureCoordinate".withNonZeroSuffix(index)) else { fatalError("An attribute named \("inputTextureCoordinate".withNonZeroSuffix(index)) was missing from the shader program during rendering.") }
glVertexAttribPointer(textureCoordinateAttribute, 2, GLenum(GL_FLOAT), 0, 0, inputTexture.textureCoordinates)
if let textureCoordinateAttribute = shader.attributeIndex("inputTextureCoordinate".withNonZeroSuffix(index)) {
glVertexAttribPointer(textureCoordinateAttribute, 2, GLenum(GL_FLOAT), 0, 0, inputTexture.textureCoordinates)
} else if (index == 0) {
fatalError("The required attribute named inputTextureCoordinate was missing from the shader program during rendering.")
}

glActiveTexture(textureUnitForIndex(index))
glBindTexture(GLenum(GL_TEXTURE_2D), inputTexture.texture)
Expand Down
Loading

0 comments on commit 38e5078

Please sign in to comment.