随笔安卓平台YUV数据(NV12/I420)渲染
Posted i-am-normal
tags:
篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了随笔安卓平台YUV数据(NV12/I420)渲染相关的知识,希望对你有一定的参考价值。
【场景】
为安卓应用增加解码h264和显示YUV的功能。解码用的是AMediacodec,此处不展开。
渲染用的是GLES 2.0,网上很多方案,包括webRTC的渲染都是针对I420(YUV420 三平面数据),比较少针对NV12的(可能我搜索能力比较辣鸡),
花了两天时间才找到正确的办法,特此记录。
【方案描述】
Opengl ES渲染 NV12的过程:
创建两个surface,分别代表Y平面和UV平面;
在shader中实现NV12转RGB,经过GPU渲染,最终呈现到安卓GLSurfaceView上面。
Opengl ES 渲染 I420的过程:
创建三个surface,分别代表Y平面,U平面,和V平面;
在shader中实现 I420转RGB,经过GPU渲染,最终呈现到安卓GLSurfaceView上面。
* 关于GLSurfaceView如何使用,这里不展开,仅仅记录jni层的render如何编写。
【代码】
代码基于安卓源码WebRTC的YUV渲染部分,加了YUV数据类型的判断和NV12相关的逻辑(标红)。
/* * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source * tree. An additional intellectual property rights grant can be found * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ #include <GLES2/gl2.h> #include <GLES2/gl2ext.h> #include <stdio.h> #include <stdlib.h> #include <stdint.h> #include <stdio.h> extern "C" { #include "w_log.h" } #include "render_init.h" #include "render_opengles20.h" int32_t localColorFMT; const char RenderOpenGles20::g_indices[] = { 0, 3, 2, 0, 2, 1 }; const char RenderOpenGles20::g_vertextShader[] = { "attribute vec4 aPosition; " "attribute vec2 aTextureCoord; " "varying vec2 vTextureCoord; " "void main() { " " gl_Position = aPosition; " " vTextureCoord = aTextureCoord; " "} " }; // The fragment shader. // Do YUV to RGB565 conversion. const char RenderOpenGles20::g_fragmentShader[] = { "precision mediump float; " "uniform sampler2D Ytex; " "uniform sampler2D Utex,Vtex; " "varying vec2 vTextureCoord; " "uniform int colorFMT;" "void main(void) { " " float nx,ny,r,g,b,y,u,v; " " mediump vec4 txl,ux,vx;" " nx=vTextureCoord[0]; " " ny=vTextureCoord[1]; " " if (colorFMT == 21){" " y=texture2D(Ytex,vec2(nx,ny)).r; " " u=texture2D(Utex,vec2(nx,ny)).r; " " v=texture2D(Utex,vec2(nx,ny)).a; " " }" " else {" " y=texture2D(Ytex,vec2(nx,ny)).r; " " u=texture2D(Utex,vec2(nx,ny)).r; " " v=texture2D(Vtex,vec2(nx,ny)).r; " " }" //" y = v; "+ " y=1.1643*(y-0.0625); " " u=u-0.5; " " v=v-0.5; " " r=y+1.5958*v; " " g=y-0.39173*u-0.81290*v; " " b=y+2.017*u; " " gl_FragColor=vec4(r,g,b,1.0); " "} " }; RenderOpenGles20::RenderOpenGles20() : _id(0), _textureWidth(-1), _textureHeight(-1), _colorFMT(-1) { LOGI("%s: id %d", __FUNCTION__, (int) _id); const GLfloat vertices[20] = { // X, Y, Z, U, V 1, -1, 0, 1, 0, // Bottom Left -1, -1, 0, 0, 0, //Bottom Right -1, 1, 0, 0, 1, //Top Right 1, 1, 0, 1, 1 }; //Top Left memcpy(_vertices, vertices, sizeof(_vertices)); } RenderOpenGles20::~RenderOpenGles20() { glDeleteTextures(3, _textureIds); } int32_t RenderOpenGles20::SetRotateMode(){ int32_t zOrder = 0; GLfloat left = 1; GLfloat right = 0; GLfloat top = 0; GLfloat bottom = 1; // rotate LOGI("Should rotate"); SetCoordinates(zOrder, left, top, right, bottom); // set the vertices array in the shader // _vertices contains 4 vertices with 5 coordinates. // 3 for (xyz) for the vertices and 2 for the texture glVertexAttribPointer(_positionHandle, 3, GL_FLOAT, false, 5 * sizeof(GLfloat), _vertices); checkGlError("glVertexAttribPointer aPosition"); glEnableVertexAttribArray(_positionHandle); checkGlError("glEnableVertexAttribArray positionHandle"); // set the texture coordinate array in the shader // _vertices contains 4 vertices with 5 coordinates. // 3 for (xyz) for the vertices and 2 for the texture glVertexAttribPointer(_textureHandle, 2, GL_FLOAT, false, 5 * sizeof(GLfloat), &_vertices[3]); checkGlError("glVertexAttribPointer maTextureHandle"); glEnableVertexAttribArray(_textureHandle); checkGlError("glEnableVertexAttribArray textureHandle"); LOGI("Rotate Done"); } int32_t RenderOpenGles20::SetFlags(uint32_t flags){ LOGI("Flags: %d", flags); if (0 == (flags & FLAG_ROTATE)){ SetRotateMode(); } } int32_t RenderOpenGles20::Setup(int32_t width, int32_t height) { LOGE("%s: width %d, height %d", __FUNCTION__, (int) width, (int) height); printGLString("Version", GL_VERSION); printGLString("Vendor", GL_VENDOR); printGLString("Renderer", GL_RENDERER); printGLString("Extensions", GL_EXTENSIONS); int maxTextureImageUnits[2]; int maxTextureSize[2]; glGetIntegerv(GL_MAX_TEXTURE_IMAGE_UNITS, maxTextureImageUnits); glGetIntegerv(GL_MAX_TEXTURE_SIZE, maxTextureSize); LOGE("%s: number of textures %d, size %d", __FUNCTION__, (int) maxTextureImageUnits[0], (int) maxTextureSize[0]); _program = createProgram(g_vertextShader, g_fragmentShader); if (!_program) { LOGE("%s: Could not create program", __FUNCTION__); return -1; } int positionHandle = glGetAttribLocation(_program, "aPosition"); checkGlError("glGetAttribLocation aPosition"); if (positionHandle == -1) { LOGE("%s: Could not get aPosition handle", __FUNCTION__); return -1; } _positionHandle = positionHandle; int textureHandle = glGetAttribLocation(_program, "aTextureCoord"); checkGlError("glGetAttribLocation aTextureCoord"); if (textureHandle == -1) { LOGE("%s: Could not get aTextureCoord handle", __FUNCTION__); return -1; } _textureHandle = textureHandle; // set the vertices array in the shader // _vertices contains 4 vertices with 5 coordinates. // 3 for (xyz) for the vertices and 2 for the texture glVertexAttribPointer(positionHandle, 3, GL_FLOAT, false, 5 * sizeof(GLfloat), _vertices); checkGlError("glVertexAttribPointer aPosition"); glEnableVertexAttribArray(positionHandle); checkGlError("glEnableVertexAttribArray positionHandle"); // set the texture coordinate array in the shader // _vertices contains 4 vertices with 5 coordinates. // 3 for (xyz) for the vertices and 2 for the texture glVertexAttribPointer(textureHandle, 2, GL_FLOAT, false, 5 * sizeof(GLfloat), &_vertices[3]); checkGlError("glVertexAttribPointer maTextureHandle"); glEnableVertexAttribArray(textureHandle); checkGlError("glEnableVertexAttribArray textureHandle"); glUseProgram(_program); int i = glGetUniformLocation(_program, "Ytex"); checkGlError("glGetUniformLocation"); glUniform1i(i, 0); /* Bind Ytex to texture unit 0 */ // 给shader里面的Ytex变量赋值 checkGlError("glUniform1i Ytex"); i = glGetUniformLocation(_program, "Utex"); checkGlError("glGetUniformLocation Utex"); glUniform1i(i, 1); /* Bind Utex to texture unit 1 */ // 给shader里面的Utex变量赋值 checkGlError("glUniform1i Utex"); i = glGetUniformLocation(_program, "Vtex"); checkGlError("glGetUniformLocation"); glUniform1i(i, 2); /* Bind Vtex to texture unit 2 */ // 给shader里面的Vtex变量赋值 checkGlError("glUniform1i Vtex"); glViewport(0, 0, width, height);// 视窗大小,只在setup函数的时候执行一次。 LOGE("ViewPort:%d %d", width , height); checkGlError("glViewport"); return 0; } // SetCoordinates // Sets the coordinates where the stream shall be rendered. // Values must be between 0 and 1. int32_t RenderOpenGles20::SetCoordinates(int32_t zOrder, const float left, const float top, const float right, const float bottom) { if ((top > 1 || top < 0) || (right > 1 || right < 0) || (bottom > 1 || bottom < 0) || (left > 1 || left < 0)) { LOGE("%s: Wrong coordinates", __FUNCTION__); return -1; } // X, Y, Z, U, V // -1, -1, 0, 0, 1, // Bottom Left // 1, -1, 0, 1, 1, //Bottom Right // 1, 1, 0, 1, 0, //Top Right // -1, 1, 0, 0, 0 //Top Left // Bottom Left _vertices[0] = (left * 2) - 1; _vertices[1] = -1 * (2 * bottom) + 1; _vertices[2] = zOrder; //Bottom Right _vertices[5] = (right * 2) - 1; _vertices[6] = -1 * (2 * bottom) + 1; _vertices[7] = zOrder; //Top Right _vertices[10] = (right * 2) - 1; _vertices[11] = -1 * (2 * top) + 1; _vertices[12] = zOrder; //Top Left _vertices[15] = (left * 2) - 1; _vertices[16] = -1 * (2 * top) + 1; _vertices[17] = zOrder; return 0; } GLuint RenderOpenGles20::loadShader(GLenum shaderType, const char* pSource) { GLuint shader = glCreateShader(shaderType); if (shader) { glShaderSource(shader, 1, &pSource, NULL); glCompileShader(shader); GLint compiled = 0; glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled); if (!compiled) { GLint infoLen = 0; glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen); if (infoLen) { char* buf = (char*) malloc(infoLen); if (buf) { glGetShaderInfoLog(shader, infoLen, NULL, buf); LOGE("%s: Could not compile shader %d: %s", __FUNCTION__, shaderType, buf); free(buf); } glDeleteShader(shader); shader = 0; } } } return shader; } GLuint RenderOpenGles20::createProgram(const char* pVertexSource, const char* pFragmentSource) { GLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource); if (!vertexShader) { return 0; } GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource); if (!pixelShader) { return 0; } GLuint program = glCreateProgram(); if (program) { glAttachShader(program, vertexShader); checkGlError("glAttachShader"); glAttachShader(program, pixelShader); checkGlError("glAttachShader"); glLinkProgram(program); GLint linkStatus = GL_FALSE; glGetProgramiv(program, GL_LINK_STATUS, &linkStatus); if (linkStatus != GL_TRUE) { GLint bufLength = 0; glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength); if (bufLength) { char* buf = (char*) malloc(bufLength); if (buf) { glGetProgramInfoLog(program, bufLength, NULL, buf); LOGE("%s: Could not link program: %s", __FUNCTION__, buf); free(buf); } } glDeleteProgram(program); program = 0; } } return program; } void RenderOpenGles20::printGLString(const char *name, GLenum s) { const char *v = (const char *) glGetString(s); LOGI("GL %s = %s ", name, v); } void RenderOpenGles20::checkGlError(const char* op) { #ifdef android_LOG for (GLint error = glGetError(); error; error = glGetError()) { LOGE("after %s() glError (0x%x) ", op, error); } #else return; #endif } static void InitializeTexture(int name, int id, int width, int height, uint32_t format) { glActiveTexture(name); glBindTexture(GL_TEXTURE_2D, id); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); glTexImage2D(GL_TEXTURE_2D, 0, format, width, height, 0, format, GL_UNSIGNED_BYTE, NULL); } // Uploads a plane of pixel data, accounting for stride != width*bpp.
// 没用到的函数 static void GlTexSubImage2D(GLsizei width, GLsizei height, int stride, const uint8_t* plane) { if (stride == width) { // Yay! We can upload the entire plane in a single GL call. glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_LUMINANCE, GL_UNSIGNED_BYTE, static_cast<const GLvoid*>(plane)); } else { // Boo! Since GLES2 doesn‘t have GL_UNPACK_ROW_LENGTH and Android doesn‘t // have GL_EXT_unpack_subimage we have to upload a row at a time. Ick. for (int row = 0; row < height; ++row) { glTexSubImage2D(GL_TEXTURE_2D, 0, 0, row, width, 1, GL_LUMINANCE, GL_UNSIGNED_BYTE, static_cast<const GLvoid*>(plane + (row * stride))); } } } int32_t RenderOpenGles20::Render(void * data, int32_t widht, int32_t height) { LOGI("%s: id %d", __FUNCTION__, (int) _id); glUseProgram(_program); checkGlError("glUseProgram"); if (_colorFMT != localColorFMT || _textureWidth != (GLsizei) widht || _textureHeight != (GLsizei) height) { SetupTextures(widht, height); } UpdateTextures(data, widht, height); glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, g_indices); checkGlError("glDrawArrays"); return 0; } void RenderOpenGles20::SetupTextures(int32_t width, int32_t height) { glDeleteTextures(3, _textureIds); glGenTextures(3, _textureIds); //Generate the Y, U and V texture InitializeTexture(GL_TEXTURE0, _textureIds[0], width, height, GL_LUMINANCE); GLint i = glGetUniformLocation(_program, "colorFMT"); checkGlError("glGetUniformLocation colorFMT"); glUniform1i(i, localColorFMT); // 给shader里面的coloFMT变量赋值 checkGlError("glUniform1i colorFMT"); _colorFMT = localColorFMT; LOGI("localColorFMT:%d", localColorFMT); if (localColorFMT == COLOR_FormatYUV420Planar){ InitializeTexture(GL_TEXTURE1, _textureIds[1], width / 2, height / 2, GL_LUMINANCE); InitializeTexture(GL_TEXTURE2, _textureIds[2], width / 2, height / 2, GL_LUMINANCE); //注意这里体现了I420三平面 } else if (localColorFMT == COLOR_FormatYUV420SemiPlanar) { InitializeTexture(GL_TEXTURE1, _textureIds[1], width / 2, height / 2, GL_LUMINANCE_ALPHA); // 这里体现了NV12两平面 }
/* 与直接使用RBGA数据不同,这里的参数采用的是GL_LUMINANCE,与GL_LUMINANCE_ALPHA,
GL_RGBA单独保存R、G、B、A四个数据,而GL_LUMINANCE将这四个数据合并成一个,
因为这样1个Y就可以与1个RGBA对应。GL_LUMINANCE_ALPHA代表首先是亮度,然后是alpha值,
这样我们就能将U值与V值分别取出。参考[2]*/ checkGlError("SetupTextures"); _textureWidth = width; _textureHeight = height; } void RenderOpenGles20::UpdateTextures(void* data, int32_t widht, int32_t height) { glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, _textureIds[0]); glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, widht, height, GL_LUMINANCE, GL_UNSIGNED_BYTE, data); LOGI("localColorFMT:%d", localColorFMT); if (localColorFMT == COLOR_FormatYUV420Planar){ glActiveTexture(GL_TEXTURE1); glBindTexture(GL_TEXTURE_2D, _textureIds[1]); glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, widht / 2, height / 2, GL_LUMINANCE, GL_UNSIGNED_BYTE, (char *)data + widht * height); glActiveTexture(GL_TEXTURE2); glBindTexture(GL_TEXTURE_2D, _textureIds[2]); glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, widht / 2, height / 2, GL_LUMINANCE, GL_UNSIGNED_BYTE, (char *)data + widht * height * 5 / 4); } else if (localColorFMT == COLOR_FormatYUV420SemiPlanar){ glActiveTexture(GL_TEXTURE1); glBindTexture(GL_TEXTURE_2D, _textureIds[1]); glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, widht / 2, height / 2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, (char *)data + widht * height); } checkGlError("UpdateTextures"); }
【参考】
[1] 关于shader的编写,这部分可以参考: https://www.jianshu.com/p/39cde80d60e2 (这份资料的 glTexImage2D 似乎不正确,这部分并未参考)
[2] 关于 glTexImage2D 的操作,NV12和I420是不一样的,这篇讲渲染NV12的文章讲的很清楚:https://www.cnblogs.com/jukan/p/6994048.html(shader部分则未用作参考)
以上是关于随笔安卓平台YUV数据(NV12/I420)渲染的主要内容,如果未能解决你的问题,请参考以下文章
C++ YUV420文件读取与显示,绘制矩形框,绘制线段(绘制直线),绘制多边形(常用YUV数据格式——YUV420P中的YU12与YUV420SP中的NV21)
C++ YUV420文件读取与显示,绘制矩形框,绘制线段(绘制直线),绘制多边形(常用YUV数据格式——YUV420P中的YU12与YUV420SP中的NV21)
详解 YUV 格式(I420/YUV420/NV12/NV12/YUV422)