Skip to content

Commit

Permalink
yuv rgb render
Browse files Browse the repository at this point in the history
  • Loading branch information
FMYang committed Sep 20, 2021
1 parent 587e89a commit bde9c4e
Show file tree
Hide file tree
Showing 62 changed files with 1,431 additions and 447 deletions.
5 changes: 2 additions & 3 deletions GPUImage/GPUImage/Source/GPUImageFramebuffer.m
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@ @interface GPUImageFramebuffer() {
// 帧缓存的唯一ID
GLuint framebuffer;

// 原始图像
// 渲染的目标
CVPixelBufferRef renderTarget;
// 生成的纹理
// 渲染的纹理
CVOpenGLESTextureRef renderTexture;
NSUInteger readLockCount;

Expand Down Expand Up @@ -77,7 +77,6 @@ - (void)generateFramebuffer {
NSAssert(NO, @"Error at CVPixelBufferCreate %d", err);
}

// 通过原始图像renderTarget生成纹理对象renderTexture
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
coreVideoTextureCache,
renderTarget,
Expand Down
66 changes: 57 additions & 9 deletions LearnOpenGL/LearnOpenGL.xcodeproj/project.pbxproj
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,15 @@
archiveVersion = 1;
classes = {
};
objectVersion = 50;
objectVersion = 51;
objects = {

/* Begin PBXBuildFile section */
BE586CAE016A1B2B49F8B952 /* libPods-LearnOpenGL.a in Frameworks */ = {isa = PBXBuildFile; fileRef = DB170A91AB84CF3EACA3C1B5 /* libPods-LearnOpenGL.a */; };
260892096C6FCF96118B0BFF /* Pods_LearnOpenGL.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 0BE84A36989D8BD717CCF62B /* Pods_LearnOpenGL.framework */; };
880C26AB26F62B95009140CC /* FMCameraContext.m in Sources */ = {isa = PBXBuildFile; fileRef = 880C26AA26F62B95009140CC /* FMCameraContext.m */; };
880C26AF26F632F5009140CC /* CoreVideo.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 880C26AE26F632F5009140CC /* CoreVideo.framework */; };
880C26BD26F71696009140CC /* FMCameraOpenGLRGBView.m in Sources */ = {isa = PBXBuildFile; fileRef = 880C26BC26F71696009140CC /* FMCameraOpenGLRGBView.m */; };
880C26C126F7454A009140CC /* FMCameraLutView.m in Sources */ = {isa = PBXBuildFile; fileRef = 880C26C026F7454A009140CC /* FMCameraLutView.m */; };
E406E72526E2188300C35047 /* FMOpenGLLutView.m in Sources */ = {isa = PBXBuildFile; fileRef = E406E72426E2188300C35047 /* FMOpenGLLutView.m */; };
E406E72726E218B800C35047 /* lookup.png in Resources */ = {isa = PBXBuildFile; fileRef = E406E72626E218B800C35047 /* lookup.png */; };
E406E74C26E2210A00C35047 /* F01.png in Resources */ = {isa = PBXBuildFile; fileRef = E406E72A26E2210A00C35047 /* F01.png */; };
Expand Down Expand Up @@ -121,9 +125,16 @@
/* End PBXBuildFile section */

/* Begin PBXFileReference section */
0BE84A36989D8BD717CCF62B /* Pods_LearnOpenGL.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_LearnOpenGL.framework; sourceTree = BUILT_PRODUCTS_DIR; };
74D965FBC4E43AD6F4CE8E1C /* Pods-LearnOpenGL.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-LearnOpenGL.debug.xcconfig"; path = "Target Support Files/Pods-LearnOpenGL/Pods-LearnOpenGL.debug.xcconfig"; sourceTree = "<group>"; };
880C26A926F62B95009140CC /* FMCameraContext.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FMCameraContext.h; sourceTree = "<group>"; };
880C26AA26F62B95009140CC /* FMCameraContext.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FMCameraContext.m; sourceTree = "<group>"; };
880C26AE26F632F5009140CC /* CoreVideo.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreVideo.framework; path = System/Library/Frameworks/CoreVideo.framework; sourceTree = SDKROOT; };
880C26BB26F71696009140CC /* FMCameraOpenGLRGBView.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FMCameraOpenGLRGBView.h; sourceTree = "<group>"; };
880C26BC26F71696009140CC /* FMCameraOpenGLRGBView.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FMCameraOpenGLRGBView.m; sourceTree = "<group>"; };
880C26BF26F7454A009140CC /* FMCameraLutView.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FMCameraLutView.h; sourceTree = "<group>"; };
880C26C026F7454A009140CC /* FMCameraLutView.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FMCameraLutView.m; sourceTree = "<group>"; };
ABF1E9628FD39D92374D8DB6 /* Pods-LearnOpenGL.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-LearnOpenGL.release.xcconfig"; path = "Target Support Files/Pods-LearnOpenGL/Pods-LearnOpenGL.release.xcconfig"; sourceTree = "<group>"; };
DB170A91AB84CF3EACA3C1B5 /* libPods-LearnOpenGL.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-LearnOpenGL.a"; sourceTree = BUILT_PRODUCTS_DIR; };
E406E72326E2188300C35047 /* FMOpenGLLutView.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FMOpenGLLutView.h; sourceTree = "<group>"; };
E406E72426E2188300C35047 /* FMOpenGLLutView.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FMOpenGLLutView.m; sourceTree = "<group>"; };
E406E72626E218B800C35047 /* lookup.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = lookup.png; sourceTree = "<group>"; };
Expand Down Expand Up @@ -259,14 +270,32 @@
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
880C26AF26F632F5009140CC /* CoreVideo.framework in Frameworks */,
E4BED5FF26D33CC20033F2EB /* OpenGLES.framework in Frameworks */,
BE586CAE016A1B2B49F8B952 /* libPods-LearnOpenGL.a in Frameworks */,
260892096C6FCF96118B0BFF /* Pods_LearnOpenGL.framework in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */

/* Begin PBXGroup section */
880C26BA26F71664009140CC /* Camera */ = {
isa = PBXGroup;
children = (
E49275AE26F58B2500834D03 /* FMCameraFilterVC.h */,
E49275AF26F58B2500834D03 /* FMCameraFilterVC.m */,
E49275B126F592B000834D03 /* FMCameraOpenGLView.h */,
E49275B226F592B000834D03 /* FMCameraOpenGLView.m */,
880C26A926F62B95009140CC /* FMCameraContext.h */,
880C26AA26F62B95009140CC /* FMCameraContext.m */,
880C26BB26F71696009140CC /* FMCameraOpenGLRGBView.h */,
880C26BC26F71696009140CC /* FMCameraOpenGLRGBView.m */,
880C26BF26F7454A009140CC /* FMCameraLutView.h */,
880C26C026F7454A009140CC /* FMCameraLutView.m */,
);
path = Camera;
sourceTree = "<group>";
};
CF57A69DD5413014FD49607A /* Pods */ = {
isa = PBXGroup;
children = (
Expand Down Expand Up @@ -497,10 +526,6 @@
E4BED5E726D334390033F2EB /* ViewController.m */,
E44A4D9726D79EED00BDB0A3 /* FMOpenGLVC.h */,
E44A4D9826D79EED00BDB0A3 /* FMOpenGLVC.m */,
E49275AE26F58B2500834D03 /* FMCameraFilterVC.h */,
E49275AF26F58B2500834D03 /* FMCameraFilterVC.m */,
E49275B126F592B000834D03 /* FMCameraOpenGLView.h */,
E49275B226F592B000834D03 /* FMCameraOpenGLView.m */,
E4BED5F926D3346D0033F2EB /* Source */,
E44A4D9126D7894F00BDB0A3 /* Resources */,
E44A4D9626D79E0C00BDB0A3 /* SupportingFiles */,
Expand All @@ -525,6 +550,7 @@
E44A4D8B26D779B200BDB0A3 /* FMOpenGLTexture.m */,
E406E72326E2188300C35047 /* FMOpenGLLutView.h */,
E406E72426E2188300C35047 /* FMOpenGLLutView.m */,
880C26BA26F71664009140CC /* Camera */,
E44A4D8326D5F56B00BDB0A3 /* remain */,
E44A4D9A26D891AA00BDB0A3 /* Matrix */,
);
Expand All @@ -534,8 +560,9 @@
E4BED5FD26D33CC20033F2EB /* Frameworks */ = {
isa = PBXGroup;
children = (
880C26AE26F632F5009140CC /* CoreVideo.framework */,
E4BED5FE26D33CC20033F2EB /* OpenGLES.framework */,
DB170A91AB84CF3EACA3C1B5 /* libPods-LearnOpenGL.a */,
0BE84A36989D8BD717CCF62B /* Pods_LearnOpenGL.framework */,
);
name = Frameworks;
sourceTree = "<group>";
Expand All @@ -551,6 +578,7 @@
E4BED5D926D334390033F2EB /* Sources */,
E4BED5DA26D334390033F2EB /* Frameworks */,
E4BED5DB26D334390033F2EB /* Resources */,
8809C2CC31759392C4D2D2D6 /* [CP] Embed Pods Frameworks */,
);
buildRules = (
);
Expand Down Expand Up @@ -696,6 +724,23 @@
/* End PBXResourcesBuildPhase section */

/* Begin PBXShellScriptBuildPhase section */
8809C2CC31759392C4D2D2D6 /* [CP] Embed Pods Frameworks */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputFileListPaths = (
"${PODS_ROOT}/Target Support Files/Pods-LearnOpenGL/Pods-LearnOpenGL-frameworks-${CONFIGURATION}-input-files.xcfilelist",
);
name = "[CP] Embed Pods Frameworks";
outputFileListPaths = (
"${PODS_ROOT}/Target Support Files/Pods-LearnOpenGL/Pods-LearnOpenGL-frameworks-${CONFIGURATION}-output-files.xcfilelist",
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-LearnOpenGL/Pods-LearnOpenGL-frameworks.sh\"\n";
showEnvVarsInLog = 0;
};
CD9C9A4B6662C7CEE7DAD21E /* [CP] Check Pods Manifest.lock */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
Expand Down Expand Up @@ -728,6 +773,7 @@
E44A4D9026D77AAC00BDB0A3 /* FMOpenGLView.m in Sources */,
E4BED6AD26D3AFB10033F2EB /* FMOpenGLTriangleView.m in Sources */,
E406E72526E2188300C35047 /* FMOpenGLLutView.m in Sources */,
880C26C126F7454A009140CC /* FMCameraLutView.m in Sources */,
E44A4D8C26D779B200BDB0A3 /* FMOpenGLTexture.m in Sources */,
E49275B026F58B2500834D03 /* FMCameraFilterVC.m in Sources */,
E4BED5E826D334390033F2EB /* ViewController.m in Sources */,
Expand All @@ -739,8 +785,10 @@
E49275B326F592B000834D03 /* FMCameraOpenGLView.m in Sources */,
E44A4D7F26D5D8B700BDB0A3 /* FMOpenGLWindow.m in Sources */,
E4BED5FC26D33B400033F2EB /* FMTriangleView.m in Sources */,
880C26AB26F62B95009140CC /* FMCameraContext.m in Sources */,
E4BED5F326D334400033F2EB /* main.m in Sources */,
E44A4D9D26D891B200BDB0A3 /* matrix.c in Sources */,
880C26BD26F71696009140CC /* FMCameraOpenGLRGBView.m in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
Expand Down
34 changes: 34 additions & 0 deletions LearnOpenGL/LearnOpenGL/Source/Camera/FMCameraContext.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
//
// FMCameraContext.h
// LearnOpenGL
//
// Created by yfm on 2021/9/18.
//

#import <Foundation/Foundation.h>

#import <OpenGLES/ES2/gl.h>
#import <OpenGLES/ES2/glext.h>
#import <OpenGLES/EAGLDrawable.h>

#import <CoreMedia/CoreMedia.h>

NS_ASSUME_NONNULL_BEGIN

@interface FMCameraContext : NSObject

// 当前使用的上下文,相机输出回调中,表示当前线程激活的上下文(重要:上下文不一致的话什么都画不出来)
@property(readonly, retain, nonatomic) EAGLContext *context;
// 上下文队列
@property(readonly, nonatomic) dispatch_queue_t contextQueue;
// 纹理缓存对象
@property(readonly) CVOpenGLESTextureCacheRef coreVideoTextureCache;

+ (FMCameraContext *)shared;
+ (void)useImageProcessingContext;

- (void)presentBufferForDisplay;

@end

NS_ASSUME_NONNULL_END
69 changes: 69 additions & 0 deletions LearnOpenGL/LearnOpenGL/Source/Camera/FMCameraContext.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
//
// FMCameraContext.m
// LearnOpenGL
//
// Created by yfm on 2021/9/18.
//

#import "FMCameraContext.h"

@implementation FMCameraContext

@synthesize context = _context;
@synthesize coreVideoTextureCache = _coreVideoTextureCache;

- (instancetype)init {
if(self = [super init]) {
_contextQueue = dispatch_queue_create("com.sunsetlakesoftware.GPUImage.openGLESContextQueue", DISPATCH_QUEUE_SERIAL);
}
return self;
}

+ (FMCameraContext *)shared {
static FMCameraContext *instance = nil;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
instance = [[[self class] alloc] init];
});
return instance;
}

+ (void)useImageProcessingContext {
[[FMCameraContext shared] useAsCurrentContext];
}

- (void)useAsCurrentContext {
EAGLContext *currentContext = [self context];
if ([EAGLContext currentContext] != currentContext)
{
[EAGLContext setCurrentContext:currentContext];
}
}

- (EAGLContext *)context {
if (_context == nil) {
EAGLContext *eaglContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
_context = eaglContext;
[EAGLContext setCurrentContext:_context];
glDisable(GL_DEPTH_TEST);
}

return _context;
}

- (CVOpenGLESTextureCacheRef)coreVideoTextureCache {
if (_coreVideoTextureCache == NULL) {
CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, [self context], NULL, &_coreVideoTextureCache);

if (err){
NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreate %d", err);
}
}
return _coreVideoTextureCache;
}

- (void)presentBufferForDisplay {
[self.context presentRenderbuffer:GL_RENDERBUFFER];
}

@end
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -7,16 +7,22 @@

#import "FMCameraFilterVC.h"
#import "FMCameraOpenGLView.h"
#import "FMCameraOpenGLRGBView.h"
#import "FMCameraLutView.h"

@interface FMCameraFilterVC() <AVCaptureVideoDataOutputSampleBufferDelegate> {
dispatch_semaphore_t frameRenderingSemaphore;

BOOL _captureAsYUV;
}
@property (nonatomic) AVCaptureSession *captureSession;
@property (nonatomic) AVCaptureDeviceInput *videoInput;
@property (nonatomic) AVCaptureVideoDataOutput *videoOutput;
@property (nonatomic) dispatch_queue_t cameraOutputQueue;
@property (nonatomic) dispatch_queue_t videoProcessQueue;
@property (nonatomic) FMCameraOpenGLView *glView;
@property (nonatomic) FMCameraOpenGLRGBView *rgbGlView;
@property (nonatomic) FMCameraLutView *lutView;

@end

Expand All @@ -28,6 +34,7 @@ - (void)dealloc {

- (instancetype)init {
if(self = [super init]) {
_captureAsYUV = NO;
frameRenderingSemaphore = dispatch_semaphore_create(1);

_cameraOutputQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0);
Expand All @@ -46,7 +53,11 @@ - (instancetype)init {

_videoOutput = [[AVCaptureVideoDataOutput alloc] init];
_videoOutput.alwaysDiscardsLateVideoFrames = NO;
[_videoOutput setVideoSettings:@{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)}]; // yuv 420f fullRange
if(_captureAsYUV) {
[_videoOutput setVideoSettings:@{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)}]; // yuv 420f fullRange
} else {
[_videoOutput setVideoSettings:@{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA)}]; // 32RGBA不支持,
}
[_videoOutput setSampleBufferDelegate:self queue:_cameraOutputQueue];

if([_captureSession canAddOutput:_videoOutput]) {
Expand All @@ -61,8 +72,17 @@ - (instancetype)init {
- (void)viewDidLoad {
[super viewDidLoad];

_glView = [[FMCameraOpenGLView alloc] initWithFrame:self.view.bounds];
[self.view addSubview:_glView];
// if(_captureAsYUV) {
// _glView = [[FMCameraOpenGLView alloc] initWithFrame:self.view.bounds];
// [self.view addSubview:_glView];
// } else {
// _rgbGlView = [[FMCameraOpenGLRGBView alloc] initWithFrame:self.view.bounds];
// [self.view addSubview:_rgbGlView];
// }

// 基于RGB的数据
_lutView = [[FMCameraLutView alloc] initWithFrame:self.view.bounds];
[self.view addSubview:_lutView];

[self startRunning];
}
Expand Down Expand Up @@ -101,7 +121,13 @@ - (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleB
#pragma mark - process sampleBuffer
- (void)processSampleBuffer:(CMSampleBufferRef)sampleBuffer {
CVPixelBufferRef videoFrame = CMSampleBufferGetImageBuffer(sampleBuffer);
[self.glView renderPixelBuffer:videoFrame];
// if(_captureAsYUV) {
// [self.glView renderPixelBuffer:videoFrame];
// } else {
// [self.rgbGlView renderPixelBuffer:videoFrame];
// }

[_lutView renderPixelBuffer:videoFrame];
}

#pragma mark -
Expand Down
18 changes: 18 additions & 0 deletions LearnOpenGL/LearnOpenGL/Source/Camera/FMCameraLutView.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
//
// FMCameraLutView.h
// LearnOpenGL
//
// Created by yfm on 2021/9/19.
//

#import <UIKit/UIKit.h>

NS_ASSUME_NONNULL_BEGIN

@interface FMCameraLutView : UIView

- (void)renderPixelBuffer:(CVPixelBufferRef)pixelBuffer;

@end

NS_ASSUME_NONNULL_END
Loading

0 comments on commit bde9c4e

Please sign in to comment.