作者:位元組流動
來源:
https://blog.csdn.net/Kennethdroid/article/details/108135636 FFmpeg 開發系列連載: FFmpeg 開發(01):FFmpeg 編譯和內建 FFmpeg 開發(02):FFmpeg + ANativeWindow 實作視訊解碼播放 FFmpeg 開發(03):FFmpeg + OpenSLES 實作音頻解碼播放 FFmpeg 開發(04):FFmpeg + OpenGLES 實作音頻可視化播放 前面 Android FFmpeg 開發系列文章中,我們已經利用 FFmpeg 的解碼功能和 ANativeWindow 的渲染功能,實作了的視訊的解碼播放。但是,當你想為播放器做一些視訊濾鏡時,如加水印、旋轉縮放等效果,使用 OpenGL ES 實作起來就極為友善。OpenGLES 渲染解碼幀
經過上面幾節的介紹,我們對音視訊的解碼過程已經比較熟悉了。本文要用 OpenGL 實作視訊的渲染,這裡再回顧下視訊的解碼流程:

從流程圖中可以看出,解碼一幀圖像後,首先将對圖像進行格式轉換,轉換成 RGBA 格式,使用 OpenGL 或 ANativeWindow 可以直接進行渲染。
當然,使用 OpenGL 進行渲染時,為了提升性能,可以将格式轉換放到 GPU 上來做(即 shader 實作 YUV 到 RGB 的轉換),也可以使用 OES 紋理直接接收 YUV 圖像資料,這裡就不進行展開講了。
了解視訊解碼到渲染的流程之後,我們就可以建構 OpenGL 渲染環境。從之前介紹 EGL 的文章中,我們知道在使用 OpenGL API 之前,必須要先利用 EGL 建立好 OpenGL 的渲染上下文環境。至于 EGL 怎麼使用,可以參考文章
OpenGLES 與 EGL 的關系。
由于本文是面向初學者快速上手 FFmpeg 開發,我們直接利用 Android GLSurfaceView 類建立 OpenGL 渲染環境,GLSurfaceView 類已經封裝了 EGL 建立渲染上下文的操作,并啟動了一個獨立的渲染線程,完全符合我們渲染視訊解碼幀的需求。
實際上,GLSurfaceView 類在生産開發中可以滿足絕大多數的螢幕渲染場景,一般要實作多線程渲染的時候才需要我們單獨操作 EGL 的接口。
那麼,你肯定會有疑問:GLSurfaceView 是 Java 的類,難道要将 Native 層解碼後的視訊圖像傳到 Java 層再進行渲染嗎?大可不必,我們隻需要将 Java 層的調用棧通過 JNI 延伸到 Native 層即可。
GLSurfaceView 類 Renderer 接口對應渲染的三個關鍵函數,我們通過 JNI 延伸到 Native 層:
@Override
public void onSurfaceCreated(GL10 gl10, EGLConfig eglConfig) {
FFMediaPlayer.native_OnSurfaceCreated();
}
@Override
public void onSurfaceChanged(GL10 gl10, int w, int h) {
FFMediaPlayer.native_OnSurfaceChanged(w, h);
}
@Override
public void onDrawFrame(GL10 gl10) {
FFMediaPlayer.native_OnDrawFrame();
}
//for video openGL render
public static native void native_OnSurfaceCreated();
public static native void native_OnSurfaceChanged(int width, int height);
public static native void native_OnDrawFrame();
然後,我們在 Native 層建立一個 OpenGLRender 類來用來管理 OpenGL 的渲染。
//接口
class VideoRender {
public:
virtual ~VideoRender(){}
virtual void Init(int videoWidth, int videoHeight, int *dstSize) = 0;
virtual void RenderVideoFrame(NativeImage *pImage) = 0;
virtual void UnInit() = 0;
};
//OpenGLRender 類定義
class OpenGLRender: public VideoRender{
public:
virtual void Init(int videoWidth, int videoHeight, int *dstSize);
virtual void RenderVideoFrame(NativeImage *pImage);
virtual void UnInit();
//對應 Java 層 GLSurfaceView.Renderer 的三個接口
void OnSurfaceCreated();
void OnSurfaceChanged(int w, int h);
void OnDrawFrame();
//靜态執行個體管理
static OpenGLRender *GetInstance();
static void ReleaseInstance();
//設定變換矩陣,控制圖像的旋轉縮放
void UpdateMVPMatrix(int angleX, int angleY, float scaleX, float scaleY);
private:
OpenGLRender();
virtual ~OpenGLRender();
static std::mutex m_Mutex;
static OpenGLRender* s_Instance;
GLuint m_ProgramObj = GL_NONE;
GLuint m_TextureId;
GLuint m_VaoId;
GLuint m_VboIds[3];
NativeImage m_RenderImage;
glm::mat4 m_MVPMatrix;//變換矩陣
};
OpenGLRender 類的完整實作。
#include "OpenGLRender.h"
#include <GLUtils.h>
#include <gtc/matrix_transform.hpp>
OpenGLRender* OpenGLRender::s_Instance = nullptr;
std::mutex OpenGLRender::m_Mutex;
static char vShaderStr[] =
"#version 300 es\n"
"layout(location = 0) in vec4 a_position;\n"
"layout(location = 1) in vec2 a_texCoord;\n"
"uniform mat4 u_MVPMatrix;\n"
"out vec2 v_texCoord;\n"
"void main()\n"
"{\n"
" gl_Position = u_MVPMatrix * a_position;\n"
" v_texCoord = a_texCoord;\n"
"}";
static char fShaderStr[] =
"#version 300 es\n"
"precision highp float;\n"
"in vec2 v_texCoord;\n"
"layout(location = 0) out vec4 outColor;\n"
"uniform sampler2D s_TextureMap;//采樣器\n"
"void main()\n"
"{\n"
" outColor = texture(s_TextureMap, v_texCoord);\n"
"}";
GLfloat verticesCoords[] = {
-1.0f, 1.0f, 0.0f, // Position 0
-1.0f, -1.0f, 0.0f, // Position 1
1.0f, -1.0f, 0.0f, // Position 2
1.0f, 1.0f, 0.0f, // Position 3
};
GLfloat textureCoords[] = {
0.0f, 0.0f, // TexCoord 0
0.0f, 1.0f, // TexCoord 1
1.0f, 1.0f, // TexCoord 2
1.0f, 0.0f // TexCoord 3
};
GLushort indices[] = { 0, 1, 2, 0, 2, 3 };
OpenGLRender::OpenGLRender() {
}
OpenGLRender::~OpenGLRender() {
// 釋放緩存圖像
NativeImageUtil::FreeNativeImage(&m_RenderImage);
}
//初始化視訊圖像的寬和高
void OpenGLRender::Init(int videoWidth, int videoHeight, int *dstSize) {
LOGCATE("OpenGLRender::InitRender video[w, h]=[%d, %d]", videoWidth, videoHeight);
std::unique_lock<std::mutex> lock(m_Mutex);
m_RenderImage.format = IMAGE_FORMAT_RGBA;
m_RenderImage.width = videoWidth;
m_RenderImage.height = videoHeight;
dstSize[0] = videoWidth;
dstSize[1] = videoHeight;
m_FrameIndex = 0;
}
// 接收解碼後的視訊幀
void OpenGLRender::RenderVideoFrame(NativeImage *pImage) {
LOGCATE("OpenGLRender::RenderVideoFrame pImage=%p", pImage);
if(pImage == nullptr || pImage->ppPlane[0] == nullptr)
return;
//加互斥鎖,解碼線程和渲染線程是 2 個不同的線程,避免資料通路沖突
std::unique_lock<std::mutex> lock(m_Mutex);
if(m_RenderImage.ppPlane[0] == nullptr)
{
NativeImageUtil::AllocNativeImage(&m_RenderImage);
}
NativeImageUtil::CopyNativeImage(pImage, &m_RenderImage);
}
void OpenGLRender::UnInit() {
}
// 設定變換矩陣,控制圖像的旋轉縮放
void OpenGLRender::UpdateMVPMatrix(int angleX, int angleY, float scaleX, float scaleY)
{
angleX = angleX % 360;
angleY = angleY % 360;
//轉化為弧度角
float radiansX = static_cast<float>(MATH_PI / 180.0f * angleX);
float radiansY = static_cast<float>(MATH_PI / 180.0f * angleY);
// Projection matrix
glm::mat4 Projection = glm::ortho(-1.0f, 1.0f, -1.0f, 1.0f, 0.1f, 100.0f);
//glm::mat4 Projection = glm::frustum(-ratio, ratio, -1.0f, 1.0f, 4.0f, 100.0f);
//glm::mat4 Projection = glm::perspective(45.0f,ratio, 0.1f,100.f);
// View matrix
glm::mat4 View = glm::lookAt(
glm::vec3(0, 0, 4), // Camera is at (0,0,1), in World Space
glm::vec3(0, 0, 0), // and looks at the origin
glm::vec3(0, 1, 0) // Head is up (set to 0,-1,0 to look upside-down)
);
// Model matrix
glm::mat4 Model = glm::mat4(1.0f);
Model = glm::scale(Model, glm::vec3(scaleX, scaleY, 1.0f));
Model = glm::rotate(Model, radiansX, glm::vec3(1.0f, 0.0f, 0.0f));
Model = glm::rotate(Model, radiansY, glm::vec3(0.0f, 1.0f, 0.0f));
Model = glm::translate(Model, glm::vec3(0.0f, 0.0f, 0.0f));
m_MVPMatrix = Projection * View * Model;
}
void OpenGLRender::OnSurfaceCreated() {
LOGCATE("OpenGLRender::OnSurfaceCreated");
m_ProgramObj = GLUtils::CreateProgram(vShaderStr, fShaderStr);
if (!m_ProgramObj)
{
LOGCATE("OpenGLRender::OnSurfaceCreated create program fail");
return;
}
glGenTextures(1, &m_TextureId);
glBindTexture(GL_TEXTURE_2D, m_TextureId);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glBindTexture(GL_TEXTURE_2D, GL_NONE);
// Generate VBO Ids and load the VBOs with data
glGenBuffers(3, m_VboIds);
glBindBuffer(GL_ARRAY_BUFFER, m_VboIds[0]);
glBufferData(GL_ARRAY_BUFFER, sizeof(verticesCoords), verticesCoords, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, m_VboIds[1]);
glBufferData(GL_ARRAY_BUFFER, sizeof(textureCoords), textureCoords, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_VboIds[2]);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices), indices, GL_STATIC_DRAW);
// Generate VAO Id
glGenVertexArrays(1, &m_VaoId);
glBindVertexArray(m_VaoId);
glBindBuffer(GL_ARRAY_BUFFER, m_VboIds[0]);
glEnableVertexAttribArray(0);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(GLfloat), (const void *)0);
glBindBuffer(GL_ARRAY_BUFFER, GL_NONE);
glBindBuffer(GL_ARRAY_BUFFER, m_VboIds[1]);
glEnableVertexAttribArray(1);
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 2 * sizeof(GLfloat), (const void *)0);
glBindBuffer(GL_ARRAY_BUFFER, GL_NONE);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, m_VboIds[2]);
glBindVertexArray(GL_NONE);
UpdateMVPMatrix(0, 0, 1.0f, 1.0f);
}
void OpenGLRender::OnSurfaceChanged(int w, int h) {
LOGCATE("OpenGLRender::OnSurfaceChanged [w, h]=[%d, %d]", w, h);
m_ScreenSize.x = w;
m_ScreenSize.y = h;
glViewport(0, 0, w, h);
glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
}
void OpenGLRender::OnDrawFrame() {
glClear(GL_COLOR_BUFFER_BIT);
if(m_ProgramObj == GL_NONE || m_TextureId == GL_NONE || m_RenderImage.ppPlane[0] == nullptr) return;
LOGCATE("OpenGLRender::OnDrawFrame [w, h]=[%d, %d]", m_RenderImage.width, m_RenderImage.height);
m_FrameIndex++;
//upload RGBA image data
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, m_TextureId);
//加互斥鎖,解碼線程和渲染線程是 2 個不同的線程,避免資料通路沖突
std::unique_lock<std::mutex> lock(m_Mutex);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, m_RenderImage.width, m_RenderImage.height, 0, GL_RGBA, GL_UNSIGNED_BYTE, m_RenderImage.ppPlane[0]);
lock.unlock();
glBindTexture(GL_TEXTURE_2D, GL_NONE);
// Use the program object
glUseProgram (m_ProgramObj);
glBindVertexArray(m_VaoId);
GLUtils::setMat4(m_ProgramObj, "u_MVPMatrix", m_MVPMatrix);
// Bind the RGBA map
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, m_TextureId);
GLUtils::setFloat(m_ProgramObj, "s_TextureMap", 0);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, (const void *)0);
}
// 單例模式,全局隻有一個 OpenGLRender
OpenGLRender *OpenGLRender::GetInstance() {
if(s_Instance == nullptr)
{
std::lock_guard<std::mutex> lock(m_Mutex);
if(s_Instance == nullptr)
{
s_Instance = new OpenGLRender();
}
}
return s_Instance;
}
// 釋放靜态執行個體
void OpenGLRender::ReleaseInstance() {
if(s_Instance != nullptr)
{
std::lock_guard<std::mutex> lock(m_Mutex);
if(s_Instance != nullptr)
{
delete s_Instance;
s_Instance = nullptr;
}
}
}
OpenGLRender 在 JNI 層的調用。
JNIEXPORT void JNICALL
Java_com_byteflow_learnffmpeg_media_FFMediaPlayer_native_1OnSurfaceCreated(JNIEnv *env,
jclass clazz) {
OpenGLRender::GetInstance()->OnSurfaceCreated();
}
JNIEXPORT void JNICALL
Java_com_byteflow_learnffmpeg_media_FFMediaPlayer_native_1OnSurfaceChanged(JNIEnv *env,
jclass clazz, jint width,
jint height) {
OpenGLRender::GetInstance()->OnSurfaceChanged(width, height);
}
JNIEXPORT void JNICALL
Java_com_byteflow_learnffmpeg_media_FFMediaPlayer_native_1OnDrawFrame(JNIEnv *env, jclass clazz) {
OpenGLRender::GetInstance()->OnDrawFrame();
}
[外鍊圖檔轉存失敗,源站可能有防盜鍊機制,建議将圖檔儲存下來直接上傳(img-k2U2SUP5-1597931759211)(https://upload-images.jianshu.io/upload_images/3239933-4b80db0466dc8aa8.gif?imageMogr2/auto-orient/strip)]
添加簡單的視訊濾鏡
這裡又回到了 OpenGL ES 開發領域,對這一塊感興趣的同學可以參考這篇
Android OpenGL ES 從入門到精通系統性學習教程利用 OpenGL 實作好視訊的渲染之後,可以很友善地利用 shader 添加你想要的視訊濾鏡,這裡我們直接可以參考
相機濾鏡的實作。
黑白濾鏡
我們将輸出視訊幀的一半渲染成經典黑白風格的圖像,實作的 shader 如下:
//黑白濾鏡
#version 300 es
precision highp float;
in vec2 v_texCoord;
layout(location = 0) out vec4 outColor;
uniform sampler2D s_TextureMap;//采樣器
void main()
{
outColor = texture(s_TextureMap, v_texCoord);
if(v_texCoord.x > 0.5) //将輸出視訊幀的一半渲染成經典黑白風格的圖像
outColor = vec4(vec3(outColor.r*0.299 + outColor.g*0.587 + outColor.b*0.114), outColor.a);
}
動态網格
動态網格濾鏡是将視訊圖像分成規則的網格,動态修改網格的邊框寬度,實作的 shader 如下:
//dynimic mesh 動态網格
#version 300 es
precision highp float;
in vec2 v_texCoord;
layout(location = 0) out vec4 outColor;
uniform sampler2D s_TextureMap;//采樣器
uniform float u_Offset;
uniform vec2 u_TexSize;
void main()
{
vec2 imgTexCoord = v_texCoord * u_TexSize;
float sideLength = u_TexSize.y / 6.0;
float maxOffset = 0.15 * sideLength;
float x = mod(imgTexCoord.x, floor(sideLength));
float y = mod(imgTexCoord.y, floor(sideLength));
float offset = u_Offset * maxOffset;
if(offset <= x
&& x <= sideLength - offset
&& offset <= y
&& y <= sideLength - offset)
{
outColor = texture(s_TextureMap, v_texCoord);
}
else
{
outColor = vec4(1.0, 1.0, 1.0, 1.0);
}
}
動态網格濾鏡的渲染過程:
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, m_TextureId);
std::unique_lock<std::mutex> lock(m_Mutex);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, m_RenderImage.width, m_RenderImage.height, 0, GL_RGBA, GL_UNSIGNED_BYTE, m_RenderImage.ppPlane[0]);
lock.unlock();
glBindTexture(GL_TEXTURE_2D, GL_NONE);
//指定着色器程式
glUseProgram (m_ProgramObj);
//綁定 VAO
glBindVertexArray(m_VaoId);
//傳入變換矩陣
GLUtils::setMat4(m_ProgramObj, "u_MVPMatrix", m_MVPMatrix);
//綁定紋理
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, m_TextureId);
GLUtils::setFloat(m_ProgramObj, "s_TextureMap", 0);
//設定偏移量
float offset = (sin(m_FrameIndex * MATH_PI / 25) + 1.0f) / 2.0f;
GLUtils::setFloat(m_ProgramObj, "u_Offset", offset);
//設定圖像尺寸
GLUtils::setVec2(m_ProgramObj, "u_TexSize", vec2(m_RenderImage.width, m_RenderImage.height));
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, (const void *)0);
動态網格濾鏡的呈現效果:
縮放和旋轉
我們在 GLSurfaceView 監聽使用者的滑動和縮放手勢,控制 OpenGLRender 的變換矩陣,進而實作視訊圖像的旋轉和縮放。
聯系與交流
技術交流/擷取源碼可以添加我的微信:Byte-Flow
「視訊雲技術」你最值得關注的音視訊技術公衆号,每周推送來自阿裡雲一線的實踐技術文章,在這裡與音視訊領域一流工程師交流切磋。