[关闭]
@qidiandasheng 2020-07-13T06:45:18.000000Z 字数 12513 阅读 1504

GPUImage解析(一):GPUImageOutput

音视频


GPUImageOutput

GPUImage响应链条上最先需要有输入源输入图片数据,这些输入源主要继承自GPUImageOutput。我们常用到的主要是以下几个:

这里我们将以GPUImageVideoCamera为例来解析一下响应链上的源数据的一个处理。

GPUImageVideoCamera

回调函数获取图像数据

GPUImageVideoCamera主要使用AVCaptureSession类从AV输入设备的采集数据到制定的输出。关于AVCaptureSession的使用这里就不讲了,主要说一下对应的采集数据输出回调函数之后的处理。

以下方法为AVCaptureVideoDataOutput的输出回调函数,省去部分代码,我们主要看处理图像部分的代码:

  1. - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
  2. // 帧渲染的信号量,用于等待处理完一帧后,再接着处理下一帧
  3. // 初始信号为1,调用wait后为0,继续往下执行任务加入队列
  4. if (dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0)
  5. {
  6. return;
  7. }
  8. CFRetain(sampleBuffer);
  9. // 把获取到的每一帧图像放入到异步队列中进行处理
  10. runAsynchronouslyOnVideoProcessingQueue(^{
  11. // 回调(调用方可获取摄像头捕获的回调数据CMSampleBufferRef)
  12. if (self.delegate)
  13. {
  14. [self.delegate willOutputSampleBuffer:sampleBuffer];
  15. }
  16. // 核心处理图像数据的代码
  17. [self processVideoSampleBuffer:sampleBuffer];
  18. CFRelease(sampleBuffer);
  19. dispatch_semaphore_signal(frameRenderingSemaphore);
  20. });
  21. }

图像处理

  1. - (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;
  2. {
  3. if (capturePaused)
  4. {
  5. return;
  6. }
  7. //获取当前时间
  8. CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
  9. //获取当前视频帧
  10. CVImageBufferRef cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer);
  11. //获取视频的宽高
  12. int bufferWidth = (int) CVPixelBufferGetWidth(cameraFrame);
  13. int bufferHeight = (int) CVPixelBufferGetHeight(cameraFrame);
  14. //获取颜色空间格式
  15. CFTypeRef colorAttachments = CVBufferGetAttachment(cameraFrame, kCVImageBufferYCbCrMatrixKey, NULL);
  16. //选择对应的颜色转换矩阵(YUV颜色空间到RGB颜色空间的转换矩阵)
  17. if (colorAttachments != NULL)
  18. {
  19. if(CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo)
  20. {
  21. if (isFullYUVRange)
  22. {
  23. _preferredConversion = kColorConversion601FullRange;
  24. }
  25. else
  26. {
  27. _preferredConversion = kColorConversion601;
  28. }
  29. }
  30. else
  31. {
  32. _preferredConversion = kColorConversion709;
  33. }
  34. }
  35. else
  36. {
  37. if (isFullYUVRange)
  38. {
  39. _preferredConversion = kColorConversion601FullRange;
  40. }
  41. else
  42. {
  43. _preferredConversion = kColorConversion601;
  44. }
  45. }
  46. //获取样本时间戳
  47. CMTime currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
  48. //创建上下文EAGLContext,并设置为当前上下文环境
  49. [GPUImageContext useImageProcessingContext];
  50. /*是否支持快速读取CVPixelBufferRef,是否是YUV格式
  51. 模拟器不支持
  52. 真机iOS5开始支持的一种CVOpenGLESTextureCacheRef和CVImageBufferRef的映射
  53. */
  54. if ([GPUImageContext supportsFastTextureUpload] && captureAsYUV)
  55. {
  56. //CV纹理的缓存(亮度纹理和色度纹理)
  57. CVOpenGLESTextureRef luminanceTextureRef = NULL;
  58. CVOpenGLESTextureRef chrominanceTextureRef = NULL;
  59. //得到YUV像素缓冲区平面数量
  60. if (CVPixelBufferGetPlaneCount(cameraFrame) > 0) // Check for YUV planar inputs to do RGB conversion
  61. {
  62. //锁定缓存
  63. CVPixelBufferLockBaseAddress(cameraFrame, 0);
  64. //根据CVImageBufferRef纹理得到的宽高赋值
  65. if ( (imageBufferWidth != bufferWidth) && (imageBufferHeight != bufferHeight) )
  66. {
  67. imageBufferWidth = bufferWidth;
  68. imageBufferHeight = bufferHeight;
  69. }
  70. CVReturn err;
  71. // Y-plane
  72. //设置纹理单元,为了给着色器程序绑定纹理ID
  73. glActiveTexture(GL_TEXTURE4);
  74. //判断本设备是否支持红色的纹理,创建亮度纹理引用对象
  75. if ([GPUImageContext deviceSupportsRedTextures])
  76. {
  77. //将BGRA的数据转换到RGBA的纹理上
  78. err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
  79. }
  80. else
  81. {
  82. err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
  83. }
  84. if (err)
  85. {
  86. NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
  87. }
  88. // 获取亮度纹理对象
  89. luminanceTexture = CVOpenGLESTextureGetName(luminanceTextureRef);
  90. // 绑定纹理
  91. glBindTexture(GL_TEXTURE_2D, luminanceTexture);
  92. // 设置纹理滤波
  93. glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
  94. glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
  95. // UV-plane
  96. glActiveTexture(GL_TEXTURE5);
  97. //判断本设备是否支持红色的纹理,创建色度纹理引用对象
  98. if ([GPUImageContext deviceSupportsRedTextures])
  99. {
  100. err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
  101. }
  102. else
  103. {
  104. err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
  105. }
  106. if (err)
  107. {
  108. NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
  109. }
  110. // 获取色度纹理对象
  111. chrominanceTexture = CVOpenGLESTextureGetName(chrominanceTextureRef);
  112. glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
  113. glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
  114. glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
  115. //YUV转换成RGB输出
  116. [self convertYUVToRGBOutput];
  117. //旋转后的宽高
  118. int rotatedImageBufferWidth = bufferWidth, rotatedImageBufferHeight = bufferHeight;
  119. if (GPUImageRotationSwapsWidthAndHeight(internalRotation))
  120. {
  121. rotatedImageBufferWidth = bufferHeight;
  122. rotatedImageBufferHeight = bufferWidth;
  123. }
  124. //通过纹理缓存更新target
  125. [self updateTargetsForVideoCameraUsingCacheTextureAtWidth:rotatedImageBufferWidth height:rotatedImageBufferHeight time:currentTime];
  126. CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
  127. CFRelease(luminanceTextureRef);
  128. CFRelease(chrominanceTextureRef);
  129. }
  130. else
  131. {
  132. }
  133. if (_runBenchmark)
  134. {
  135. numberOfFramesCaptured++;
  136. if (numberOfFramesCaptured > INITIALFRAMESTOIGNOREFORBENCHMARK)
  137. {
  138. CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
  139. totalFrameTimeDuringCapture += currentFrameTime;
  140. NSLog(@"Average frame time : %f ms", [self averageFrameDurationDuringCapture]);
  141. NSLog(@"Current frame time : %f ms", 1000.0 * currentFrameTime);
  142. }
  143. }
  144. }
  145. else
  146. {
  147. CVPixelBufferLockBaseAddress(cameraFrame, 0);
  148. int bytesPerRow = (int) CVPixelBufferGetBytesPerRow(cameraFrame);
  149. outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(bytesPerRow / 4, bufferHeight) onlyTexture:YES];
  150. [outputFramebuffer activateFramebuffer];
  151. glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
  152. // glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bufferWidth, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(cameraFrame));
  153. // Using BGRA extension to pull in video frame data directly
  154. // The use of bytesPerRow / 4 accounts for a display glitch present in preview video frames when using the photo preset on the camera
  155. glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bytesPerRow / 4, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(cameraFrame));
  156. [self updateTargetsForVideoCameraUsingCacheTextureAtWidth:bytesPerRow / 4 height:bufferHeight time:currentTime];
  157. CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
  158. if (_runBenchmark)
  159. {
  160. numberOfFramesCaptured++;
  161. if (numberOfFramesCaptured > INITIALFRAMESTOIGNOREFORBENCHMARK)
  162. {
  163. CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
  164. totalFrameTimeDuringCapture += currentFrameTime;
  165. }
  166. }
  167. }
  168. }
  1. // YUV转换成RGB输出
  2. - (void)convertYUVToRGBOutput;
  3. {
  4. // 设置顶点着色器
  5. [GPUImageContext setActiveShaderProgram:yuvConversionProgram];
  6. int rotatedImageBufferWidth = imageBufferWidth, rotatedImageBufferHeight = imageBufferHeight;
  7. if (GPUImageRotationSwapsWidthAndHeight(internalRotation))
  8. {
  9. rotatedImageBufferWidth = imageBufferHeight;
  10. rotatedImageBufferHeight = imageBufferWidth;
  11. }
  12. // 从当前上下文中获取输出帧缓冲
  13. outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(rotatedImageBufferWidth, rotatedImageBufferHeight) textureOptions:self.outputTextureOptions onlyTexture:NO];
  14. [outputFramebuffer activateFramebuffer];
  15. glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
  16. glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
  17. static const GLfloat squareVertices[] = {
  18. -1.0f, -1.0f,
  19. 1.0f, -1.0f,
  20. -1.0f, 1.0f,
  21. 1.0f, 1.0f,
  22. };
  23. // 绑定亮度纹理到纹理单元4
  24. glActiveTexture(GL_TEXTURE4);
  25. glBindTexture(GL_TEXTURE_2D, luminanceTexture);
  26. // 把位置4的纹理单元赋值给采样器
  27. glUniform1i(yuvConversionLuminanceTextureUniform, 4);
  28. // 绑定色度纹理到纹理单元5
  29. glActiveTexture(GL_TEXTURE5);
  30. glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
  31. // 把位置5的纹理单元赋值给采样器
  32. glUniform1i(yuvConversionChrominanceTextureUniform, 5);
  33. glUniformMatrix3fv(yuvConversionMatrixUniform, 1, GL_FALSE, _preferredConversion);
  34. //设置顶点数据
  35. glVertexAttribPointer(yuvConversionPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);
  36. //设置纹理数据
  37. glVertexAttribPointer(yuvConversionTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [GPUImageFilter textureCoordinatesForRotation:internalRotation]);
  38. //开始绘制
  39. glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
  40. }

更新targets的输入帧缓冲并通知渲染

以下代码遍历了两次当前所有的targets

第一次给target设置各种参数:旋转、大小、帧缓冲(绑定的附件为纹理缓冲)。

第二次通知所有的target开始重新绘制渲染纹理。

  1. - (void)updateTargetsForVideoCameraUsingCacheTextureAtWidth:(int)bufferWidth height:(int)bufferHeight time:(CMTime)currentTime;
  2. {
  3. // First, update all the framebuffers in the targets
  4. for (id<GPUImageInput> currentTarget in targets)
  5. {
  6. if ([currentTarget enabled])
  7. {
  8. NSInteger indexOfObject = [targets indexOfObject:currentTarget];
  9. NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
  10. if (currentTarget != self.targetToIgnoreForUpdates)
  11. {
  12. [currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget];
  13. [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:textureIndexOfTarget];
  14. if ([currentTarget wantsMonochromeInput] && captureAsYUV)
  15. {
  16. [currentTarget setCurrentlyReceivingMonochromeInput:YES];
  17. // TODO: Replace optimization for monochrome output
  18. [currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
  19. }
  20. else
  21. {
  22. [currentTarget setCurrentlyReceivingMonochromeInput:NO];
  23. [currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
  24. }
  25. }
  26. else
  27. {
  28. [currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget];
  29. [currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
  30. }
  31. }
  32. }
  33. // Then release our hold on the local framebuffer to send it back to the cache as soon as it's no longer needed
  34. [outputFramebuffer unlock];
  35. outputFramebuffer = nil;
  36. // Finally, trigger rendering as needed
  37. for (id<GPUImageInput> currentTarget in targets)
  38. {
  39. if ([currentTarget enabled])
  40. {
  41. NSInteger indexOfObject = [targets indexOfObject:currentTarget];
  42. NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
  43. if (currentTarget != self.targetToIgnoreForUpdates)
  44. {
  45. [currentTarget newFrameReadyAtTime:currentTime atIndex:textureIndexOfTarget];
  46. }
  47. }
  48. }
  49. }

GPUImageFilter

上面讲了输入链条里的输入源,这里讲的就是输入链条中间的处理部分。

GPUImageFilter就是用来接收源图像,通过自定义的顶点、片元着色器来渲染新的图像,并在绘制完成后通知响应链的下一个对象。

GPUImageFilter和响应链的其他元素实现了GPUImageInput协议,他们都可以提供纹理参与响应链,或者从响应链的前面接收并处理纹理。响应链的下一个对象是target,响应链可能有多个分支(添加多个targets)。

响应链上一级通知渲染

上面说过输入源输入缓冲帧准备好之后通过调用- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex方法来通知target准备渲染。

  1. - (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex
  2. {
  3. // 顶点坐标系矩阵
  4. static const GLfloat imageVertices[] = {
  5. -1.0f, -1.0f,
  6. 1.0f, -1.0f,
  7. -1.0f, 1.0f,
  8. 1.0f, 1.0f,
  9. };
  10. // 绘制结果输出
  11. [self renderToTextureWithVertices:imageVertices textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation]];
  12. // 通知targets
  13. [self informTargetsAboutNewFrameAtTime:frameTime];
  14. }

获取顶点坐标和纹理坐标

以下为固定的顶点坐标:

  1. static const GLfloat imageVertices[] = {
  2. -1.0f, -1.0f,
  3. 1.0f, -1.0f,
  4. -1.0f, 1.0f,
  5. 1.0f, 1.0f,
  6. };

通过这个方法获取纹理坐标,根据传进来的选择类型选择纹理坐标:

  1. + (const GLfloat *)textureCoordinatesForRotation:(GPUImageRotationMode)rotationMode{
  2. // 正常不旋转
  3. static const GLfloat noRotationTextureCoordinates[] = {
  4. 0.0f, 0.0f,
  5. 1.0f, 0.0f,
  6. 0.0f, 1.0f,
  7. 1.0f, 1.0f,
  8. };
  9. // 图片向右旋转
  10. static const GLfloat rotateLeftTextureCoordinates[] = {
  11. 1.0f, 0.0f,
  12. 1.0f, 1.0f,
  13. 0.0f, 0.0f,
  14. 0.0f, 1.0f,
  15. };
  16. .........
  17. }

绘制结果输出

  1. // 渲染到帧缓存
  2. - (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
  3. {
  4. // 是否阻止渲染
  5. if (self.preventRendering)
  6. {
  7. // 解锁帧缓存
  8. [firstInputFramebuffer unlock];
  9. return;
  10. }
  11. [GPUImageContext setActiveShaderProgram:filterProgram];
  12. // 自己的输出帧缓存
  13. outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
  14. [outputFramebuffer activateFramebuffer];
  15. if (usingNextFrameForImageCapture)
  16. {
  17. [outputFramebuffer lock];
  18. }
  19. [self setUniformsForProgramAtIndex:0];
  20. glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);
  21. glClear(GL_COLOR_BUFFER_BIT);
  22. // 绑定输入纹理
  23. glActiveTexture(GL_TEXTURE2);
  24. glBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);
  25. // 把位置2的纹理单元赋值给采样器(告诉GLSL选择的纹理单元是2)
  26. glUniform1i(filterInputTextureUniform, 2);
  27. // 绑定顶点和纹理坐标并绘制图元
  28. glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
  29. glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
  30. glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
  31. // 解锁输入帧缓存对象
  32. [firstInputFramebuffer unlock];
  33. // 需要等待绘制完成才去生成图像(默认为NO)
  34. if (usingNextFrameForImageCapture)
  35. {
  36. // 发送绘制完成信号
  37. dispatch_semaphore_signal(imageCaptureSemaphore);
  38. }
  39. }

通知下一级targets绘制渲染

这一步基本就跟上面输入源更新所有的targets的输入参数,并通知渲染差不多了,一样的步骤:

  1. - (void)informTargetsAboutNewFrameAtTime:(CMTime)frameTime;

参考

GPUImage
GPUImage源码阅读

添加新批注
在作者公开此批注前,只有你和作者可见。
回复批注