第三章OpenGLES基础-基础-GLSL渲染纹理
- 互联网
- 2025-07-23 06:15:02

第三章 OpenGL ES 基础-GLSL渲染纹理
第一章 OpenGL ES 基础-屏幕、纹理、顶点坐标 第二章 OpenGL ES 基础-GLSL语法简单总结 第三章 OpenGL ES 基础-GLSL渲染纹理 第四章 OpenGL ES 基础-位移、缩放、旋转原理 第五章 OpenGL ES 基础-透视投影矩阵与正交投影矩阵 第六章 OpenGL ES 基础-FBO、VBO理解与运用 第七章 OpenGL ES 基础-输入输出框架思维
GLSL的shader介绍vec4 :是四个分量的向量 mat4 :是4x4的浮点矩阵 渲染一张图片纹理写简单的shader
着色器分为两类:顶点着色器和片元着色器。顶点着色器:
顶点着色器是一种处理输入顶点数据的着色器。它主要用于对每个顶点进行变换、投影和其他操作,以便将顶点从模型空间转换到裁剪空间。顶点着色器执行的操作包括顶点位置变换、法线变换、颜色插值等。这些操作影响顶点的最终呈现方式。片元着色器:
片元着色器(也称为像素着色器)负责计算片元(像素)的颜色值。它通常在顶点着色器处理后的几何形状上运行,用于确定像素的最终颜色。片元着色器可以执行纹理采样、光照计算、阴影处理等操作,以确定每个像素最终的颜色输出。 //顶点坐标(原点为显示区域中心店) private final float[] vertexData = { -1.0f, -1.0f, //左下角 1.0f, -1.0f, //右下角 -1.0f, 1.0f, //左上角 1.0f, 1.0f, //右上角 }; //纹理坐标(原点为显示区域左下角) private final float[] textureData = { 0.0f, 0.0f, //左下角 1.0f, 0.0f, //右下角 0.0f, 1.0f, //左上角 1.0f, 1.0f, //右上角 }; String vertexSource = "attribute vec4 av_Position; // 顶点位置属性\n" + "attribute vec4 af_Position; // 纹理坐标属性 (S, T)\n" + "varying vec2 v_texPosition; // 传递给片段着色器的纹理坐标\n" + "uniform mat4 u_Matrix; // 投影矩阵\n" + "void main() {\n" + " // 调整纹理坐标,将 y 坐标翻转(上下颠倒)\n" + " v_texPosition = vec2(af_Position.x, 1.0 - af_Position.y);\n" + //" v_texPosition = af_Position.xy;\n" + // 另一种纹理坐标赋值方式 " gl_Position = u_Matrix * av_Position; // 计算最终顶点位置\n" + "}\n"; String fragmentSource = "precision mediump float; // 浮点数精度\n" + "varying vec2 v_texPosition; // 接收来自顶点着色器的纹理坐标\n" + "uniform sampler2D sTexture; // 纹理采样器\n" + "\n" + "void main() {\n" + " // 从纹理中获取像素颜色,并输出到屏幕\n" + " gl_FragColor = texture2D(sTexture, v_texPosition);\n" + "}"; OpenGL线程加载顶点着色器和片元着色器 public static int loadShader(int shaderType, String source) { int shader = GLES20.glCreateShader(shaderType); if(shader != 0) { GLES20.glShaderSource(shader, source); GLES20.glCompileShader(shader); int[] compile = new int[1]; GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compile, 0); if(compile[0] != GLES20.GL_TRUE) { Log.d("ywl5320", "shader compile error"); GLES20.glDeleteShader(shader); shader = 0; } } return shader; } public static int createProgram(String vertexSource, String fragmentSource) { int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource); if(vertexShader == 0) { return 0; } int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource); if(fragmentShader == 0) { return 0; } int program = GLES20.glCreateProgram(); if(program != 0) { GLES20.glAttachShader(program, vertexShader); GLES20.glAttachShader(program, fragmentShader); GLES20.glLinkProgram(program); int[] linsStatus = new int[1]; GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linsStatus, 0); if(linsStatus[0] != GLES20.GL_TRUE) { Log.d("ywl5320", "link program error"); GLES20.glDeleteProgram(program); program = 0; } } return program; } program =createProgram(vertexSource, fragmentSource); avPosition = GLES20.glGetAttribLocation(program, "av_Position"); afPosition = GLES20.glGetAttribLocation(program, "af_Position"); samplerOES = GLES20.glGetUniformLocation(program, "sTexture"); umatrix = GLES20.glGetUniformLocation(program, "u_Matrix");根据代码片段中的 glGetAttribLocation 和 glGetUniformLocation 这些函数,我们可以得出以下推导:
针对 avPosition 和 afPosition:
glGetAttribLocation 函数用于获取着色器程序对象中顶点属性的位置。由于 avPosition 和 afPosition 分别表示顶点着色器中的属性变量,可能代表顶点位置等。若要加载一个 vec4 类型的属性,通常在顶点着色器中会使用类似 attribute vec4 av_Position; 的语法声明。针对 samplerOES:
glGetUniformLocation 函数用于获取着色器程序对象中统一变量的位置。由于 samplerOES 在这里被用作 uniform 变量,它可能代表一个纹理采样器(sampler)。对于纹理采样器,在片元着色器中通常使用 sampler2D 或其他类型的采样器来进行纹理采样。针对 umatrix:
同样是通过 glGetUniformLocation 获取的位置,可能表示一个矩阵变量。命名为 u_Matrix 通常用于传递变换矩阵给着色器,比如模型视图投影矩阵等。在 GLSL 着色器代码中,该矩阵可能被声明为 uniform mat4 u_Matrix;。 bitmap data数据原点(0,0)放在左上角位置,而OpenGL的纹理坐标纹理坐标原点(0,0)在左下角 /** * Creates a texture from bitmap. *创建一个图片纹理,上下颠倒 * @param bmp bitmap data bitmap data数据原点(0,0)放在左上角位置,而OpenGL的纹理坐标纹理坐标原点(0,0)在左下角 * @return Handle to texture. * */ public static int createImageTexture(Bitmap bmp) { int[] textureHandles = new int[1]; int textureHandle; GLES20.glGenTextures(1, textureHandles, 0); textureHandle = textureHandles[0]; // Bind the texture handle to the 2D texture target. GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle); // Configure min/mag filtering, i.e. what scaling method do we use if what we're rendering // is smaller or larger than the source image. GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); // Load the data from the buffer into the texture handle. GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, /*level*/ 0, bmp, 0); return textureHandle; }这段代码主要完成了以下操作:
清除颜色缓冲区并设置清除颜色为黑色。使用指定的着色器程序。通过 glUniformMatrix4fv 函数将一个 4x4 矩阵传递给着色器中的 uniform 变量 u_Matrix。配置顶点属性数据并启用顶点属性数组。激活并绑定纹理对象,将纹理单元索引传递给纹理采样器的 uniform 变量。最后,使用 glDrawArrays 绘制四边形(采用三角形带方式)。这些步骤一起实现了对图像的绘制渲染过程。
public void onDrawFrame(GL10 gl) { // 清除颜色缓冲区,将整个屏幕清除为黑色 GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f); // 设置清除颜色为黑色 // 使用着色器程序 GLES20.glUseProgram(program); // 将变换矩阵传递给着色器中的u_Matrix uniform 变量 GLES20.glUniformMatrix4fv(umatrix, 1, false, matrix, 0); // 启用顶点属性数组 avPosition GLES20.glEnableVertexAttribArray(avPosition); // 指定 avPosition 的数据格式和数据源 GLES20.glVertexAttribPointer(avPosition, 2, GLES20.GL_FLOAT, false, 8, vertexBuffer); // 启用顶点属性数组 afPosition GLES20.glEnableVertexAttribArray(afPosition); // 指定 afPosition 的数据格式和数据源 GLES20.glVertexAttribPointer(afPosition, 2, GLES20.GL_FLOAT, false, 8, textureBuffer); // 激活纹理单元 0 GLES20.glActiveTexture(GLES20.GL_TEXTURE0); // 将纹理对象绑定到当前活动纹理单元上 GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, bitmapId); // 将纹理单元索引传递给 samplerOES uniform 变量 GLES20.glUniform1i(samplerOES, 0); // 绘制图形,使用当前绑定的着色器程序和设置的顶点数据 GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); } 总结:在 OpenGL 中,线程调用 GLSL(OpenGL Shading Language)时通常需要注意一些关联问题,以确保正确的操作和流程。以下是一些关于 OpenGL 线程和 GLSL 关联的注意事项:
GLSL 着色器编译:
在一个线程中加载、编译和链接 GLSL 着色器程序。确保着色器程序在正确的上下文中创建,并且每个着色器程序都与合适的 OpenGL 上下文相关联。Uniform 和 Attribute 数据传递:
在执行渲染操作之前,要确保将 uniform 变量和顶点属性数据传递给 GLSL 着色器程序。这些数据应该在绑定了正确的 OpenGL 上下文后传递,以便着色器能够访问这些数据。纹理操作:
当涉及到纹理操作时,需要确保纹理对象绑定到正确的纹理单元,并在正确的上下文中进行操作。必须小心处理纹理对象的状态和绑定。 完整代码 package com.mg.cc.opengldemo; import android.content.Context; import android.graphics.Bitmap; import android.graphics.SurfaceTexture; import android.media.MediaPlayer; import android.opengl.GLES11Ext; import android.opengl.GLES20; import android.opengl.GLSurfaceView; import android.opengl.GLUtils; import android.opengl.Matrix; import android.util.Log; import android.view.Surface; import com.mg.cc.opengldemo.utils.BitmapUtils; import java.io.BufferedOutputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.FloatBuffer; import javax.microedition.khronos.egl.EGLConfig; import javax.microedition.khronos.opengles.GL10; public class BitmapRender implements GLSurfaceView.Renderer { String vertexSource ="attribute vec4 av_Position;\n" + "attribute vec4 af_Position;//S T 纹理坐标\n" + "varying vec2 v_texPosition;\n" + "uniform mat4 u_Matrix;\n" + "void main() {\n" + " v_texPosition = vec2(af_Position.x, 1.0 - af_Position.y);\n"+//上下颠倒 // " v_texPosition= af_Position.xy;\n" + " gl_Position = u_Matrix*av_Position ;\n" + "}\n"; String fragmentSource ="precision mediump float;\n" + "varying vec2 v_texPosition;\n" + "uniform sampler2D sTexture;\n" + "\n" + "void main() {\n" + " gl_FragColor=texture2D(sTexture, v_texPosition);\n" + "}"; public static final int NO_TEXTURE = -1; private int screenWidth, screenHeight; private MediaPlayer mediaPlayer; private Context context; //顶点旋转 private int umatrix; private float[] matrix = new float[16]; // private final float[] vertexData = { // // 1f, -1f,//右下角 // -1f, -1f,//左下角 // 1f, 1f, //右上角 // -1f, 1f //左上角 // // }; // private final float[] textureData = { // 1f, 0f, //右下角 // 0f, 0f,//左下角 // 1f, 1f,//右上角 // 0f, 1f //左上角 // }; //顶点坐标(原点为显示区域中心店) private final float[] vertexData = { -1.0f, -1.0f, //左下角 1.0f, -1.0f, //右下角 -1.0f, 1.0f, //左上角 1.0f, 1.0f, //右上角 }; //纹理坐标(原点为显示区域左下角) private final float[] textureData = { 0.0f, 0.0f, //左下角 1.0f, 0.0f, //右下角 0.0f, 1.0f, //左上角 1.0f, 1.0f, //右上角 }; private FloatBuffer vertexBuffer; private FloatBuffer textureBuffer; //mediacodec private int program; private int avPosition; private int afPosition; private int samplerOES; private int bitmapId; private int videoWidth; private int videoHeight; public BitmapRender(Context context, GLSurfaceView surfaceView) { this.context = context; vertexBuffer = ByteBuffer.allocateDirect(vertexData.length * 4) .order(ByteOrder.nativeOrder()) .asFloatBuffer() .put(vertexData); vertexBuffer.position(0); textureBuffer = ByteBuffer.allocateDirect(textureData.length * 4) .order(ByteOrder.nativeOrder()) .asFloatBuffer() .put(textureData); textureBuffer.position(0); surfaceView.setEGLContextClientVersion(2); surfaceView.setRenderer(this);//设置renderer surfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY); } @Override public void onSurfaceCreated(GL10 gl, EGLConfig config) { program =createProgram(vertexSource, fragmentSource); avPosition = GLES20.glGetAttribLocation(program, "av_Position"); afPosition = GLES20.glGetAttribLocation(program, "af_Position"); samplerOES = GLES20.glGetUniformLocation(program, "sTexture"); umatrix = GLES20.glGetUniformLocation(program, "u_Matrix"); Bitmap bitmap= BitmapUtils.getImageFromAssetsFile(context,"bg.png"); videoWidth=bitmap.getWidth(); videoHeight=bitmap.getHeight(); bitmapId = createImageTexture(bitmap); bitmap.recycle(); } @Override public void onSurfaceChanged(GL10 gl, int width, int height) { GLES20.glViewport(0, 0, width, height); screenWidth = width; screenHeight = height; if (videoWidth != 0 && videoHeight != 0){ updateProjection(videoWidth, videoHeight); } } // FilterRender object3D; @Override public void onDrawFrame(GL10 gl) { GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f); GLES20.glUseProgram(program); GLES20.glUniformMatrix4fv(umatrix, 1, false, matrix, 0); GLES20.glEnableVertexAttribArray(avPosition); GLES20.glVertexAttribPointer(avPosition, 2, GLES20.GL_FLOAT, false, 8, vertexBuffer); GLES20.glEnableVertexAttribArray(afPosition); GLES20.glVertexAttribPointer(afPosition, 2, GLES20.GL_FLOAT, false, 8, textureBuffer); GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, bitmapId); GLES20.glUniform1i(samplerOES, 0); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); } public void saveFrame() { ByteBuffer buf = ByteBuffer.allocateDirect(screenWidth * screenHeight * 4); buf.order(ByteOrder.LITTLE_ENDIAN); GLES20.glReadPixels(0, 0, screenWidth, screenHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf); buf.rewind(); try { Bitmap bmp = Bitmap.createBitmap(screenWidth, screenHeight, Bitmap.Config.ARGB_8888); bmp.copyPixelsFromBuffer(buf); String filePath = "/storage/emulated/0/DCIM/111.jpg"; saveBitmap( filePath,bmp); bmp.recycle(); } finally { } } /** * 保存图片 * @param filePath * @param bitmap */ public static void saveBitmap(String filePath, Bitmap bitmap) { if (bitmap == null) { return; } BufferedOutputStream bos = null; try { bos = new BufferedOutputStream(new FileOutputStream(filePath)); bitmap press(Bitmap.CompressFormat.JPEG, 100, bos); bitmap.recycle(); } catch (FileNotFoundException e) { e.printStackTrace(); } finally { if (bos != null) try { bos.close(); } catch (IOException e) { // do nothing } } } public void updateProjection(int videoWidth, int videoHeight) { float screenRatio = (float) screenWidth / screenHeight; this.videoWidth = videoWidth; this.videoHeight = videoHeight; float videoRatio = (float) videoWidth / videoHeight; if (videoRatio > screenRatio) { Matrix.orthoM(matrix, 0, -1f, 1f, -videoRatio / screenRatio, videoRatio / screenRatio, -1f, 1f); } else { Matrix.orthoM(matrix, 0, -screenRatio / videoRatio, screenRatio / videoRatio, -1f, 1f, -1f, 1f); } } public static int loadShader(int shaderType, String source) { int shader = GLES20.glCreateShader(shaderType); if(shader != 0) { GLES20.glShaderSource(shader, source); GLES20.glCompileShader(shader); int[] compile = new int[1]; GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compile, 0); if(compile[0] != GLES20.GL_TRUE) { Log.d("ywl5320", "shader compile error"); GLES20.glDeleteShader(shader); shader = 0; } } return shader; } public static int createProgram(String vertexSource, String fragmentSource) { int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource); if(vertexShader == 0) { return 0; } int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource); if(fragmentShader == 0) { return 0; } int program = GLES20.glCreateProgram(); if(program != 0) { GLES20.glAttachShader(program, vertexShader); GLES20.glAttachShader(program, fragmentShader); GLES20.glLinkProgram(program); int[] linsStatus = new int[1]; GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linsStatus, 0); if(linsStatus[0] != GLES20.GL_TRUE) { Log.d("ywl5320", "link program error"); GLES20.glDeleteProgram(program); program = 0; } } return program; } /** * Creates a texture from bitmap. *创建一个图片纹理,上下颠倒 * @param bmp bitmap data bitmap data数据原点(0,0)放在左上角位置,而OpenGL的纹理坐标纹理坐标原点(0,0)在左下角 * @return Handle to texture. * */ public static int createImageTexture(Bitmap bmp) { int[] textureHandles = new int[1]; int textureHandle; GLES20.glGenTextures(1, textureHandles, 0); textureHandle = textureHandles[0]; // Bind the texture handle to the 2D texture target. GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle); // Configure min/mag filtering, i.e. what scaling method do we use if what we're rendering // is smaller or larger than the source image. GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); // Load the data from the buffer into the texture handle. GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, /*level*/ 0, bmp, 0); return textureHandle; } }第三章OpenGLES基础-基础-GLSL渲染纹理由讯客互联互联网栏目发布,感谢您对讯客互联的认可,以及对我们原创作品以及文章的青睐,非常欢迎各位朋友分享到个人网站或者朋友圈,但转载请说明文章出处“第三章OpenGLES基础-基础-GLSL渲染纹理”