作者:位元組流動
來源:
https://blog.csdn.net/Kennethdroid/article/details/97153407YUV 渲染原理
前面文章
一文掌握 YUV 圖像的基本處理介紹了 YUV 常用的基本格式,本文以實作 NV21/NV12 的渲染為例。
前文提到,YUV 圖不能直接用于顯示,需要轉換為 RGB 格式,而 YUV 轉 RGB 是一個逐像素處理的耗時操作,在 CPU 端進行轉換效率過低,這時正好可以利用 GPU 強大的并行處理能力來實作 YUV 到 RGB 的轉換。
YUV 與 RGB 之間的轉換公式。

需要注意的是 OpenGLES 的内置矩陣實際上是一列一列地建構的,比如 YUV 和 RGB 的轉換矩陣的建構是:
mat3 convertMat = mat3(1.0, 1.0, 1.0, //第一列
0.0,-0.338,1.732, //第二列
1.371,-0.698, 0.0);//第三列
OpenGLES 實作 YUV 渲染需要用到 GL_LUMINANCE 和 GL_LUMINANCE_ALPHA 格式的紋理,其中 GL_LUMINANCE 紋理用來加載 NV21 Y Plane 的資料,GL_LUMINANCE_ALPHA 紋理用來加載 UV Plane 的資料。
OpenGLES 常用紋理的格式類型
GL_LUMINANCE 紋理在着色器中采樣的紋理像素格式是(L,L,L,1),L 表示亮度。GL_LUMINANCE 紋理在着色器中采樣的紋理像素格式是(L,L,L,A),A 表示透明度。
YUV 渲染實作
YUV 渲染步驟:
- 生成 2 個紋理,編譯連結着色器程式;
- 确定紋理坐标及對應的頂點坐标;
- 分别加載 NV21 的兩個 Plane 資料到 2 個紋理,加載紋理坐标和頂點坐标資料到着色器程式;
- 繪制。
片段着色器腳本
#version 300 es
precision mediump float;
in vec2 v_texCoord;
layout(location = 0) out vec4 outColor;
uniform sampler2D y_texture;
uniform sampler2D uv_texture;
void main()
{
vec3 yuv;
yuv.x = texture(y_texture, v_texCoord).r;
yuv.y = texture(uv_texture, v_texCoord).a-0.5;
yuv.z = texture(uv_texture, v_texCoord).r-0.5;
vec3 rgb =mat3( 1.0, 1.0, 1.0,
0.0, -0.344, 1.770,
1.403, -0.714, 0.0) * yuv;
outColor = vec4(rgb, 1);
}
y_texture 和 uv_texture 分别是 NV21 Y Plane 和 UV Plane 紋理的采樣器,對兩個紋理采樣之後組成一個(y,u,v)三維向量,之後左乘變換矩陣轉換為(r,g,b)三維向量。
Java 層 Load NV21 資料
private void LoadNV21Image() {
InputStream is = null;
try {
is = getAssets().open("YUV_Image_840x1074.NV21");
} catch (IOException e) {
e.printStackTrace();
}
int lenght = 0;
try {
lenght = is.available();
byte[] buffer = new byte[lenght];
is.read(buffer);
mGLSurfaceView.getNativeRender().native_SetImageData(IMAGE_FORMAT_NV21, 840, 1074, buffer);
} catch (IOException e) {
e.printStackTrace();
} finally {
try
{
is.close();
}
catch(IOException e)
{
e.printStackTrace();
}
}
}
Native 層轉換為 NativeImage
void MyGLRenderContext::SetImageData(int format, int width, int height, uint8_t *pData)
{
LOGCATE("MyGLRenderContext::SetImageData format=%d, width=%d, height=%d, pData=%p", format, width, height, pData);
NativeImage nativeImage;
nativeImage.format = format;
nativeImage.width = width;
nativeImage.height = height;
nativeImage.ppPlane[0] = pData;
switch (format)
{
case IMAGE_FORMAT_NV12:
case IMAGE_FORMAT_NV21:
nativeImage.ppPlane[1] = nativeImage.ppPlane[0] + width * height;
break;
case IMAGE_FORMAT_I420:
nativeImage.ppPlane[1] = nativeImage.ppPlane[0] + width * height;
nativeImage.ppPlane[2] = nativeImage.ppPlane[1] + width * height / 4;
break;
default:
break;
}
if (m_Sample)
{
m_Sample->LoadImage(&nativeImage);
}
}
//copy 到 sample
void NV21TextureMapSample::LoadImage(NativeImage *pImage)
{
LOGCATE("NV21TextureMapSample::LoadImage pImage = %p", pImage->ppPlane[0]);
if (pImage)
{
m_RenderImage.width = pImage->width;
m_RenderImage.height = pImage->height;
m_RenderImage.format = pImage->format;
NativeImageUtil::CopyNativeImage(pImage, &m_RenderImage);
}
}
加載 NV21 的 2 個 Plane 資料到紋理,ppPlane[0] 表示 Y Plane 的指針,ppPlane[1] 表示 UV Plane 的指針,注意 2 個紋理的格式和寬高。
//upload Y plane data
glBindTexture(GL_TEXTURE_2D, m_yTextureId);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, m_RenderImage.width, m_RenderImage.height, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, m_RenderImage.ppPlane[0]);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glBindTexture(GL_TEXTURE_2D, GL_NONE);
//update UV plane data
glBindTexture(GL_TEXTURE_2D, m_uvTextureId);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE_ALPHA, m_RenderImage.width >> 1, m_RenderImage.height >> 1, 0, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, m_RenderImage.ppPlane[1]);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glBindTexture(GL_TEXTURE_2D, GL_NONE);
簡單代碼實作
// 編譯連結着色器程式,生成 2 個紋理
void NV21TextureMapSample::Init()
{
char vShaderStr[] =
"#version 300 es \n"
"layout(location = 0) in vec4 a_position; \n"
"layout(location = 1) in vec2 a_texCoord; \n"
"out vec2 v_texCoord; \n"
"void main() \n"
"{ \n"
" gl_Position = a_position; \n"
" v_texCoord = a_texCoord; \n"
"} \n";
char fShaderStr[] =
"#version 300 es \n"
"precision mediump float; \n"
"in vec2 v_texCoord; \n"
"layout(location = 0) out vec4 outColor; \n"
"uniform sampler2D y_texture; \n"
"uniform sampler2D uv_texture; \n"
"void main() \n"
"{ \n"
" vec3 yuv; \n"
" yuv.x = texture(y_texture, v_texCoord).r; \n"
" yuv.y = texture(uv_texture, v_texCoord).a-0.5; \n"
" yuv.z = texture(uv_texture, v_texCoord).r-0.5; \n"
" highp vec3 rgb = mat3( 1, 1, 1, \n"
" 0, -0.344, 1.770, \n"
" 1.403, -0.714, 0) * yuv; \n"
" outColor = vec4(rgb, 1); \n"
"} \n";
// Load the shaders and get a linked program object
m_ProgramObj= GLUtils::CreateProgram(vShaderStr, fShaderStr, m_VertexShader, m_FragmentShader);
// Get the sampler location
m_ySamplerLoc = glGetUniformLocation (m_ProgramObj, "y_texture" );
m_uvSamplerLoc = glGetUniformLocation(m_ProgramObj, "uv_texture");
//create textures
GLuint textureIds[2] = {0};
glGenTextures(2, textureIds);
m_yTextureId = textureIds[0];
m_uvTextureId = textureIds[1];
}
// 加載 NV21 圖像資料到紋理,加載紋理坐标和頂點坐标資料到着色器程式,繪制實作 YUV 渲染
void NV21TextureMapSample::Draw(int screenW, int screenH)
{
LOGCATE("NV21TextureMapSample::Draw()");
if(m_ProgramObj == GL_NONE || m_yTextureId == GL_NONE || m_uvTextureId == GL_NONE) return;
//upload Y plane data
glBindTexture(GL_TEXTURE_2D, m_yTextureId);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, m_RenderImage.width, m_RenderImage.height, 0, GL_LUMINANCE, GL_UNSIGNED_BYTE, m_RenderImage.ppPlane[0]);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glBindTexture(GL_TEXTURE_2D, GL_NONE);
//update UV plane data
glBindTexture(GL_TEXTURE_2D, m_uvTextureId);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE_ALPHA, m_RenderImage.width >> 1, m_RenderImage.height >> 1, 0, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, m_RenderImage.ppPlane[1]);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glBindTexture(GL_TEXTURE_2D, GL_NONE);
//glViewport(0, 0, m_RenderImage.width, m_RenderImage.height);
GLfloat verticesCoords[] = {
-1.0f, 0.78f, 0.0f, // Position 0
-1.0f, -0.78f, 0.0f, // Position 1
1.0f, -0.78f, 0.0f, // Position 2
1.0f, 0.78f, 0.0f, // Position 3
};
GLfloat textureCoords[] = {
0.0f, 0.0f, // TexCoord 0
0.0f, 1.0f, // TexCoord 1
1.0f, 1.0f, // TexCoord 2
1.0f, 0.0f // TexCoord 3
};
GLushort indices[] = { 0, 1, 2, 0, 2, 3 };
// Use the program object
glUseProgram (m_ProgramObj);
// Load the vertex position
glVertexAttribPointer (0, 3, GL_FLOAT,
GL_FALSE, 3 * sizeof (GLfloat), verticesCoords);
// Load the texture coordinate
glVertexAttribPointer (1, 2, GL_FLOAT,
GL_FALSE, 2 * sizeof (GLfloat), textureCoords);
glEnableVertexAttribArray (0);
glEnableVertexAttribArray (1);
// Bind the Y plane map
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, m_yTextureId);
// Set the Y plane sampler to texture unit to 0
glUniform1i(m_ySamplerLoc, 0);
// Bind the UV plane map
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, m_uvTextureId);
// Set the UV plane sampler to texture unit to 1
glUniform1i(m_uvSamplerLoc, 1);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, indices);
}
YUV 渲染結果 聯系與交流
技術交流、擷取源碼可以掃碼添加我的微信:Byte-Flow ,領取視訊教程
「視訊雲技術」你最值得關注的音視訊技術公衆号,每周推送來自阿裡雲一線的實踐技術文章,在這裡與音視訊領域一流工程師交流切磋。