opengl es 2.0 視訊播放
demo: https://github.com/wangzuxing/MyFFmpegH264H265YUVOpenGL
以YUV紋理貼圖實作視訊播放:
1、把jni端解碼的YUV資料/攝像頭預覽的YUV資料傳入glTexImage2D()函數,并以GL_LUMINANCE格式進行紋理加載、綁定
2、把YUV資料産生的相關紋理單元賦給片元着色器的各2D采樣器,然後進行紋理采樣、數值計算、轉換成RGB資料
如下:
GLES20.glTexImage2D
(
GLES20.GL_TEXTURE_2D, //紋理單元的類型
0, //紋理單元的層次,非mipmap紋理level設定為0
GLES20.GL_LUMINANCE,
//紋理單元的資料格式:
// GL_RGB、GL_RGBA、GL_LUMINANCE、GL_LUMINANCE_ALPHA、GL_ALPHA
_video_width, //紋理單元的寬度
_video_height, //紋理單元的高度
0, //紋理單元的邊框,如果包含邊框取值為1,不包含邊框取值為0
GLES20.GL_LUMINANCE, //data所指向的資料的格式
GLES20.GL_UNSIGNED_BYTE, //data所指向的資料的類型
y //指向的資料
);
GLES20Support.java:
public class GLES20Support {
public static boolean detectOpenGLES20(Context context) {
ActivityManager am = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
ConfigurationInfo info = am.getDeviceConfigurationInfo(); // 擷取配置資訊, 取得GLES version
return (info.reqGlEsVersion >= );
}
...
}
MainActivity0.java:
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
...
if (GLES20Support.detectOpenGLES20(this) == false) { // 判斷android系統是否支援opengl es 2.0
GLES20Support.getNoSupportGLES20Dialog(this);
}
GLFrameSurface glSurfaceView = new GLFrameSurface(this);
glSurfaceView.setEGLContextClientVersion(); //設定OpenGL ES 2.0版本支援
glRenderer = new GLFrameRenderer(null, glSurfaceView, getDM(this));
glSurfaceView.setRenderer(glRenderer);
...
}
//1、主線程調用GLFrameRenderer glRenderer
//2、
boolean yuv_update = true;
Handler myHandler = new Handler() {
public void handleMessage(Message msg) {
switch (msg.what) {
...
case :
if(yuv_update){
glRenderer.update(width, height);
yuv_update = false;
}
byte[] y = new byte[yuvPlanes[].remaining()];
yuvPlanes[].get(y, , y.length);
byte[] u = new byte[yuvPlanes[].remaining()];
yuvPlanes[].get(u, , u.length);
byte[] v = new byte[yuvPlanes[].remaining()];
yuvPlanes[].get(v, , v.length);
glRenderer.update(y, u, v); //GLFrameRenderer glRenderer;
break;
...
}
super.handleMessage(msg);
}
};
// jni端需調用函數,傳入解碼的yuv資料到java端,由opengl es2.0進行紋理貼圖顯示
public void updateYUV(byte[] yuvData, int width0, int height0) {
synchronized (this) {
copyFrom(yuvData,width0,height0);
Message message = new Message();
message.what = ;
myHandler.sendMessage(message);
}
}
public ByteBuffer[] yuvPlanes;
int planeSize;// = width * height;
int planeSize_l;
ByteBuffer[] planes = new ByteBuffer[];
//析取出 Y、U、V 分量,送opengl es 2.0 的glTexImage2D()函數産生紋理資料
//GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, _video_width, _video_height, 0,GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, y); // y u v
public void copyFrom(byte[] yuvData, int width, int height) {
if (yuvPlanes == null) {
int[] yuvStrides = { width, width / , width / };
planeSize = width * height;
planeSize_l = planeSize*/;
yuvPlanes = new ByteBuffer[];
yuvPlanes[] = ByteBuffer.allocateDirect(yuvStrides[] * height);
yuvPlanes[] = ByteBuffer.allocateDirect(yuvStrides[] * height / );
yuvPlanes[] = ByteBuffer.allocateDirect(yuvStrides[] * height / );
}
if (yuvData.length < planeSize_l) {
throw new RuntimeException("Wrong arrays size: " + yuvData.length);
}
planes[] = ByteBuffer.wrap(yuvData, , planeSize);
planes[] = ByteBuffer.wrap(yuvData, planeSize, planeSize / );
planes[] = ByteBuffer.wrap(yuvData, planeSize + planeSize / , planeSize / );
for (int i = ; i < ; i++) {
yuvPlanes[i].position();
yuvPlanes[i].put(planes[i]);
yuvPlanes[i].position();
yuvPlanes[i].limit(yuvPlanes[i].capacity());
}
}
/*
packed formats:将Y、U、V值儲存成Macro Pixels陣列,和RGB的存放方式類似。
planar formats:将Y、U、V的三個份量分别存放在不同的矩陣中。
COLOR_FormatYUV420Planar: YUV420P I420
COLOR_FormatYUV420SemiPlanar: YUV420SP NV12
YUV420P,Y,U,V三個分量都是平面格式,分為I420和YV12。I420格式和YV12格式的不同處在U平面和V平面的位置不同。
在I420格式中,U平面緊跟在Y平面之後,然後才是V平面(即:YUV);但YV12則是相反(即:YVU)。
YUV420SP, Y分量平面格式,UV打包格式, 即NV12。 NV12與NV21類似,U 和 V 交錯排列,不同在于UV順序。
I420: YYYYYYYY UU VV =>YUV420P
YV12: YYYYYYYY VV UU =>YUV420P
NV12: YYYYYYYY UVUV =>YUV420SP
NV21: YYYYYYYY VUVU =>YUV420SP
*/
//yv12 =》 yuv420p : yvu -> yuv
private void swapYV12toI420(byte[] yv12bytes, byte[] i420bytes, int width, int height)
{
System.arraycopy(yv12bytes, , i420bytes, ,width*height);
System.arraycopy(yv12bytes, width*height+width*height/, i420bytes, width*height,width*height/);
System.arraycopy(yv12bytes, width*height, i420bytes, width*height+width*height/,width*height/);
}
// camera preview data --> onFrame() mediacodec編碼
public void onFrame(byte[] buf, int offset, int length, int flag) {
swapYV12toI420(buf, h264, width, height); // I420 視訊格式
if(isPlaying){
copyFrom(h264, width ,height); // h264 存放的就是I420格式的yuv資料
Message message = new Message(); // ui線程重新整理顯示
message.what = ;
myHandler.sendMessage(message);
}
...
}
}
GLFrameRenderer.java:
public class GLFrameRenderer implements Renderer {
private ISimplePlayer mParentAct;
private GLSurfaceView mTargetSurface;
private GLProgram prog = new GLProgram(); // 頂點坐标、紋理坐标定義,vertex shader、fragment shader定義、讀取,參數的傳遞、紋理的建立等
private int mScreenWidth, mScreenHeight;
private int mVideoWidth, mVideoHeight;
private ByteBuffer y;
private ByteBuffer u;
private ByteBuffer v;
public GLFrameRenderer(ISimplePlayer callback, GLSurfaceView surface, DisplayMetrics dm) {
mParentAct = callback;
mTargetSurface = surface;
mScreenWidth = ;//dm.widthPixels;
mScreenHeight = ;//dm.heightPixels;
}
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
if (!prog.isProgramBuilt()) {
prog.buildProgram(); // 2、讀取vertex shader、fragment shader, createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
// get handle for "vPosition" and "a_texCoord"
// get uniform location for y/u/v
}
}
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
GLES20.glViewport(, , width, height);
}
@Override
public void onDrawFrame(GL10 gl) {
synchronized (this) {
if (y != null) {
// reset position, have to be done
y.position();
u.position();
v.position();
prog.buildTextures(y, u, v, mVideoWidth, mVideoHeight); //3、根據I420的y、u、v資料,産生對應的紋理貼圖
GLES20.glClearColor(f, f, f, f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
prog.drawFrame(); // 4、GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); 渲染\顯示紋理貼圖
}
}
}
public void update(int w, int h) {
Utils.LOGD("update INIT E");
if (w > && h > ) {
if (mScreenWidth > && mScreenHeight > ) {
float f1 = f * mScreenHeight / mScreenWidth;
float f2 = f * h / w;
//1、根據視訊高、寬比與螢幕高、寬比的比較,調用createBuffers(), 産生vertices buffer,四頂點
if (f1 == f2) {
prog.createBuffers(GLProgram.squareVertices);
} else if (f1 < f2) {
float widScale = f1 / f2;
prog.createBuffers(new float[] { -widScale, -f, widScale, -f, -widScale, f, widScale, f,});
} else {
float heightScale = f2 / f1;
prog.createBuffers(new float[] { -f, -heightScale, f, -heightScale, -f, heightScale, f, heightScale, });
}
}
if (w != mVideoWidth && h != mVideoHeight) {
this.mVideoWidth = w;
this.mVideoHeight = h;
int yarraySize = w * h;
int uvarraySize = yarraySize / ;
synchronized (this) {
y = ByteBuffer.allocate(yarraySize);
u = ByteBuffer.allocate(uvarraySize);
v = ByteBuffer.allocate(uvarraySize);
}
}
}
}
public void update(byte[] ydata, byte[] udata, byte[] vdata) {
synchronized (this) {
y.clear();
u.clear();
v.clear();
y.put(ydata, , ydata.length);
u.put(udata, , udata.length);
v.put(vdata, , vdata.length);
// request to render
mTargetSurface.requestRender(); //5、GLSurfaceView請求渲染
}
}
}
GLProgram.java
public class GLProgram {
// program id
private int _program;
// window position
public final int mWinPosition;
// texture id
private int _textureI;
private int _textureII;
private int _textureIII;
// texture index in gles
private int _tIindex;
private int _tIIindex;
private int _tIIIindex;
// vertices on screen
private float[] _vertices;
// handles
private int _positionHandle = -, _coordHandle = -;
private int _yhandle = -, _uhandle = -, _vhandle = -;
private int _ytid = -, _utid = -, _vtid = -;
// vertices buffer
private ByteBuffer _vertice_buffer;
private ByteBuffer _coord_buffer;
// video width and height
private int _video_width = -;
private int _video_height = -;
// flow control
private boolean isProgBuilt = false;
public GLProgram(int position) {
if (position < || position > ) {
throw new RuntimeException("Index can only be 0 to 4");
}
mWinPosition = position;
setup(mWinPosition);
}
public void setup(int position) {
switch (mWinPosition) {
case :
_vertices = squareVertices1; //
_textureI = GLES20.GL_TEXTURE0;
_textureII = GLES20.GL_TEXTURE1;
_textureIII = GLES20.GL_TEXTURE2;
_tIindex = ; // GLES20.GL_TEXTURE0 紋理單元在系統中的索引為0,即opengl es 2.0函數中對參數為GLES20.GL_TEXTURE0的,也可以用0代替
_tIIindex = ;
_tIIIindex = ;
break;
case :
_vertices = squareVertices2;
_textureI = GLES20.GL_TEXTURE3;
_textureII = GLES20.GL_TEXTURE4;
_textureIII = GLES20.GL_TEXTURE5;
_tIindex = ;
_tIIindex = ;
_tIIIindex = ;
break;
case :
_vertices = squareVertices3;
_textureI = GLES20.GL_TEXTURE6;
_textureII = GLES20.GL_TEXTURE7;
_textureIII = GLES20.GL_TEXTURE8;
_tIindex = ;
_tIIindex = ;
_tIIIindex = ;
break;
case :
_vertices = squareVertices4;
_textureI = GLES20.GL_TEXTURE9;
_textureII = GLES20.GL_TEXTURE10;
_textureIII = GLES20.GL_TEXTURE11;
_tIindex = ;
_tIIindex = ;
_tIIIindex = ;
break;
case :
default:
_vertices = squareVertices;
_textureI = GLES20.GL_TEXTURE0;
_textureII = GLES20.GL_TEXTURE1;
_textureIII = GLES20.GL_TEXTURE2;
_tIindex = ;
_tIIindex = ;
_tIIIindex = ;
break;
}
}
public boolean isProgramBuilt() {
return isProgBuilt;
}
public void buildProgram() {
// TODO createBuffers(_vertices, coordVertices);
if (_program <= ) {
_program = createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
}
Utils.LOGD("_program = " + _program);
//get handle for "vPosition" and "a_texCoord"
//擷取程式中頂點坐标屬性引用(句柄)
_positionHandle = GLES20.glGetAttribLocation(_program, "vPosition");
Utils.LOGD("_positionHandle = " + _positionHandle);
checkGlError("glGetAttribLocation vPosition");
if (_positionHandle == -) {
throw new RuntimeException("Could not get attribute location for vPosition");
}
//擷取程式中頂點紋理坐标屬性引用(句柄)
_coordHandle = GLES20.glGetAttribLocation(_program, "a_texCoord");
Utils.LOGD("_coordHandle = " + _coordHandle);
checkGlError("glGetAttribLocation a_texCoord");
if (_coordHandle == -) {
throw new RuntimeException("Could not get attribute location for a_texCoord");
}
// get uniform location for y/u/v, we pass data through these uniforms
//擷取程式中2D采樣器引用(句柄),紋理貼圖
_yhandle = GLES20.glGetUniformLocation(_program, "tex_y"); //
Utils.LOGD("_yhandle = " + _yhandle);
checkGlError("glGetUniformLocation tex_y");
if (_yhandle == -) {
throw new RuntimeException("Could not get uniform location for tex_y");
}
_uhandle = GLES20.glGetUniformLocation(_program, "tex_u");
Utils.LOGD("_uhandle = " + _uhandle);
checkGlError("glGetUniformLocation tex_u");
if (_uhandle == -) {
throw new RuntimeException("Could not get uniform location for tex_u");
}
_vhandle = GLES20.glGetUniformLocation(_program, "tex_v");
Utils.LOGD("_vhandle = " + _vhandle);
checkGlError("glGetUniformLocation tex_v");
if (_vhandle == -) {
throw new RuntimeException("Could not get uniform location for tex_v");
}
isProgBuilt = true;
}
//build a set of textures, one for R, one for G, and one for B.
public void buildTextures(Buffer y, Buffer u, Buffer v, int width, int height) {
boolean videoSizeChanged = (width != _video_width || height != _video_height);
if (videoSizeChanged) {
_video_width = width;
_video_height = height;
Utils.LOGD("buildTextures videoSizeChanged: w=" + _video_width + " h=" + _video_height);
}
// building texture for Y data
if (_ytid < || videoSizeChanged) {
if (_ytid >= ) {
Utils.LOGD("glDeleteTextures Y");
GLES20.glDeleteTextures(, new int[] { _ytid }, );
checkGlError("glDeleteTextures");
}
// GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
//生成紋理ID
int[] textures = new int[];
GLES20.glGenTextures
(
, //産生的紋理id的數量
textures, //紋理id的數組
//偏移量
);
checkGlError("glGenTextures");
_ytid = textures[];
Utils.LOGD("glGenTextures Y = " + _ytid);
}
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, _ytid);
checkGlError("glBindTexture");
//實際加載紋理
GLES20.glTexImage2D
(
GLES20.GL_TEXTURE_2D, //紋理單元的類型
, //紋理單元的層次,非mipmap紋理level設定為0
GLES20.GL_LUMINANCE, //紋理單元的資料格式 GL_RGB、GL_RGBA、GL_LUMINANCE、GL_LUMINANCE_ALPHA、GL_ALPHA
_video_width, //紋理單元的寬度
_video_height, //紋理單元的高度
, //紋理單元的邊框,如果包含邊框取值為1,不包含邊框取值為0
GLES20.GL_LUMINANCE, //data所指向的資料的格式
GLES20.GL_UNSIGNED_BYTE, //data所指向的資料的類型
y //指向的資料
);
checkGlError("glTexImage2D");
//非Mipmap紋理采樣過濾參數
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
//ST方向紋理拉伸方式
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
// building texture for U data
if (_utid < || videoSizeChanged) {
if (_utid >= ) {
Utils.LOGD("glDeleteTextures U");
GLES20.glDeleteTextures(, new int[] { _utid }, );
checkGlError("glDeleteTextures");
}
int[] textures = new int[];
GLES20.glGenTextures(, textures, );
checkGlError("glGenTextures");
_utid = textures[];
Utils.LOGD("glGenTextures U = " + _utid);
}
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, _utid);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, , GLES20.GL_LUMINANCE, _video_width / , _video_height / , ,
GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, u);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
// building texture for V data
if (_vtid < || videoSizeChanged) {
if (_vtid >= ) {
Utils.LOGD("glDeleteTextures V");
GLES20.glDeleteTextures(, new int[] { _vtid }, );
checkGlError("glDeleteTextures");
}
int[] textures = new int[];
GLES20.glGenTextures(, textures, );
checkGlError("glGenTextures");
_vtid = textures[];
Utils.LOGD("glGenTextures V = " + _vtid);
}
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, _vtid);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, , GLES20.GL_LUMINANCE, _video_width / , _video_height / , ,
GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, v);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
}
//render the frame the YUV data will be converted to RGB by shader.
public void drawFrame() {
GLES20.glUseProgram(_program);
checkGlError("glUseProgram");
//指定頂點屬性數組
GLES20.glVertexAttribPointer(_positionHandle, , GLES20.GL_FLOAT, false, , _vertice_buffer);
//指定渲染時索引值為_positionHandle的頂點屬性數組的資料格式和位置, 頂點資料和shader程式中的變量進行關聯
checkGlError("glVertexAttribPointer mPositionHandle");
GLES20.glEnableVertexAttribArray(_positionHandle); //使能索引值為_positionHandle的頂點屬性數組
GLES20.glVertexAttribPointer(_coordHandle, , GLES20.GL_FLOAT, false, , _coord_buffer);
checkGlError("glVertexAttribPointer maTextureHandle");
GLES20.glEnableVertexAttribArray(_coordHandle);
// bind textures
GLES20.glActiveTexture(_textureI); //激活紋理單元
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, _ytid); //綁定紋理對象到紋理單元
GLES20.glUniform1i(_yhandle, _tIindex); //通過_yhandle句柄,把該紋理單元指派給shader中sampler2D tex_y
//GLES20.GL_TEXTURE0~GLES20.GL_TEXTURE31 紋理單元在系統中的索引為0~31,即opengl es 2.0函數中對參數為GLES20.GL_TEXTUREi,也可以用i代替(i=0~31)
GLES20.glActiveTexture(_textureII);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, _utid);
GLES20.glUniform1i(_uhandle, _tIIindex);
GLES20.glActiveTexture(_textureIII);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, _vtid);
GLES20.glUniform1i(_vhandle, _tIIIindex);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, , ); // GL_TRIANGLE_STRIP繪制方式,數組中頂點的數量4
GLES20.glFinish();
GLES20.glDisableVertexAttribArray(_positionHandle); //禁用索引值為_positionHandle的頂點屬性數組
GLES20.glDisableVertexAttribArray(_coordHandle);
}
/**
* create program and load shaders, fragment shader is very important.
*/
public int createProgram(String vertexSource, String fragmentSource) {
// create shaders
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource); //加載頂點着色器
int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource); //加載片元着色器
// just check
Utils.LOGD("vertexShader = " + vertexShader);
Utils.LOGD("pixelShader = " + pixelShader);
int program = GLES20.glCreateProgram(); //建立程式
if (program != ) {
GLES20.glAttachShader(program, vertexShader); //向程式中加入頂點着色器
checkGlError("glAttachShader");
GLES20.glAttachShader(program, pixelShader); //向程式中加入片元着色器
checkGlError("glAttachShader");
GLES20.glLinkProgram(program); //連結程式
int[] linkStatus = new int[]; //存放連結成功program數量的數組
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, ); //擷取program的連結情況
if (linkStatus[] != GLES20.GL_TRUE) {
Utils.LOGE("Could not link program: ");
Utils.LOGE(GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program);
program = ;
}
}
return program;
}
/**
* create shader with given source.
*/
private int loadShader(int shaderType, String source) {
int shader = GLES20.glCreateShader(shaderType); //根據類型建立shader
if (shader != ) {
GLES20.glShaderSource(shader, source); //加載shader的源代碼
GLES20.glCompileShader(shader); //編譯shader
int[] compiled = new int[]; //存放編譯成功shader數量的數組
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, ); //擷取Shader的編譯情況
if (compiled[] == ) {
Utils.LOGE("Could not compile shader " + shaderType);
Utils.LOGE(GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = ;
}
}
return shader;
}
/**
* these two buffers are used for holding vertices, screen vertices and texture vertices.
*/
void createBuffers(float[] vert) {
_vertice_buffer = ByteBuffer.allocateDirect(vert.length * ); // 建立頂點坐标資料緩沖
_vertice_buffer.order(ByteOrder.nativeOrder());
_vertice_buffer.asFloatBuffer().put(vert);
_vertice_buffer.position();
if (_coord_buffer == null) {
_coord_buffer = ByteBuffer.allocateDirect(coordVertices.length * ); // 根據紋理坐标數組---建立頂點紋理坐标資料緩沖
_coord_buffer.order(ByteOrder.nativeOrder()); //設定本地位元組序
_coord_buffer.asFloatBuffer().put(coordVertices); //轉換為Float型緩沖,并向緩沖區中放入頂點紋理資料
_coord_buffer.position(); //設定緩沖區起始位置
}
}
private void checkGlError(String op) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Utils.LOGE("***** " + op + ": glError " + error);
throw new RuntimeException(op + ": glError " + error);
}
}
//預設定義的螢幕頂點坐标
static float[] squareVertices = { -f, -f, f, -f, -f, f, f, f, }; // fullscreen
static float[] squareVertices1 = { -f, f, f, f, -f, f, f, f, }; // left-top
static float[] squareVertices2 = { f, -f, f, -f, f, f, f, f, }; // right-bottom
static float[] squareVertices3 = { -f, -f, f, -f, -f, f, f, f, }; // left-bottom
static float[] squareVertices4 = { f, f, f, f, f, f, f, f, }; // right-top
//頂點紋理坐标
private static float[] coordVertices = { f, f, f, f, f, f, f, f, };// whole-texture
//頂點着色器代碼 vec4 vPosition齊次坐标(x,y,0,1)
private static final String VERTEX_SHADER = "attribute vec4 vPosition;\n" + "attribute vec2 a_texCoord;\n"
+ "varying vec2 tc;\n" + "void main() {\n" + "gl_Position = vPosition;\n" + "tc = a_texCoord;\n" + "}\n";
//片元着色器代碼, sampler2D對應GL_TEXTURE_2D的紋理采樣器
private static final String FRAGMENT_SHADER = "precision mediump float;\n" + "uniform sampler2D tex_y;\n"
+ "uniform sampler2D tex_u;\n" + "uniform sampler2D tex_v;\n" + "varying vec2 tc;\n" + "void main() {\n"
+ "vec4 c = vec4((texture2D(tex_y, tc).r - 16./255.) * 1.164);\n"
+ "vec4 U = vec4(texture2D(tex_u, tc).r - 128./255.);\n"
+ "vec4 V = vec4(texture2D(tex_v, tc).r - 128./255.);\n" + "c += V * vec4(1.596, -0.813, 0, 0);\n"
+ "c += U * vec4(0, -0.392, 2.017, 0);\n" + "c.a = 1.0;\n" + "gl_FragColor = c;\n" + "}\n";
}