我一直在挣扎着从使用的OpenGL ES 2.0的Android JPG / PNG文件绘制2D图像。 我到处都看的教程是纹理3D图像所以它是粗糙搞清楚如何绘制普通的2D雪碧。我有一个正方形绘制和旋转,但一旦来到纹理我一定搞砸了某个地方,因为我不断收到一个错误说DrawElements未绑定到任何数据,但如果我注释掉任何code做纹理它工作得很好。
任何帮助将是很大的AP preciated。
下面是我的code我的Sprite类和渲染器类:
公共类雪碧
{
//对活动上下文
私人最终语境mActivityContext;
//添加了纹理
私人最终FloatBuffer mCubeTextureCoordinates;
私人诠释mTextureUniformHandle;
私人诠释mTextureCoordinateHandle;
私人最终诠释mTextureCoordinateDataSize = 2;
私人诠释mTextureDataHandle;
私人最终字符串vertexShader code =
//测试
属性VEC2 a_TexCoordinate; +
不同VEC2 v_TexCoordinate; +
//试验结束
统一mat4 uMVPMatrix; +
属性vec4 vPosition; +
无效的主要(){+
GL_POSITION = vPosition * uMVPMatrix; +
//测试
v_TexCoordinate = a_TexCoordinate+
//试验结束
};
私人最终字符串fragmentShader code =
precision mediump浮动; +
统一vec4 vColor; +
//测试
统一sampler2D u_Texture; +
不同VEC2 v_TexCoordinate; +
//试验结束
无效的主要(){+
//gl_FragColor = vColor; +
gl_FragColor =(v_Color *的Texture2D(u_Texture,v_TexCoordinate)); +
};
私人最终诠释shaderProgram;
私人最终FloatBuffer vertexBuffer;
私人最终ShortBuffer drawListBuffer;
私人诠释mPositionHandle;
私人诠释mColorHandle;
私人诠释mMVPMatrixHandle;
//此数组中每个顶点的坐标数量
静态最终诠释COORDS_PER_VERTEX = 2;
静浮spriteCoords [] = {-0.5f,0.5F,//左上
-0.5f,-0.5f,//左下
0.5F,-0.5f,//右下
0.5F,0.5F}; //右上
私人短DRAWORDER [] = {0,1,2,0,2,3}; //为了绘制顶点
私人最终诠释vertexStride = COORDS_PER_VERTEX * 4; //字节每个顶点
//设置颜色有红,绿,蓝和alpha(透明度)值
浮色[] = {0.63671875f,0.76953125f,0.22265625f,1.0F};
公共雪碧(最终上下文activityContext)
{
mActivityContext = activityContext;
//初始化顶点字节缓冲的图形坐标/#的坐标值* 4%的浮动字节
ByteBuffer的BB = ByteBuffer.allocateDirect(spriteCoords.length * 4);
//使用设备的本机字节顺序
bb.order(ByteOrder.nativeOrder());
//创建一个从ByteBuffer的浮点缓冲区
vertexBuffer = bb.asFloatBuffer();
//添加坐标的FloatBuffer
vertexBuffer.put(spriteCoords);
//设置缓冲区读取第一个坐标
vertexBuffer.position(0);
// S,T(或X,Y)的
//纹理坐标数据。
//由于图像有一个Y轴指向下方,(数值增加,因为你来的像移动),而
// OpenGL的具有Y轴指向上方,我们对在这里调整通过翻转Y轴。
//更重要的是,纹理坐标是相同的每一张脸。
最终浮动[] cubeTextureCoordinateData =
{
//前脸
/*0.0f,0.0,
0.0,1.0F,
1.0F,0.0,
0.0,1.0F,
1.0F,1.0F,
1.0F,0.0 * /
-0.5f,0.5F,
-0.5f,-0.5f,
0.5F,-0.5f,
0.5F,0.5F
};
mCubeTextureCoordinates = ByteBuffer.allocateDirect(cubeTextureCoordinateData.length * 4).order(ByteOrder.nativeOrder())asFloatBuffer()。
mCubeTextureCoordinates.put(cubeTextureCoordinateData).position(0);
//初始化字节的缓冲区的抽奖名单
ByteBuffer的DLB = ByteBuffer.allocateDirect(spriteCoords.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(DRAWORDER);
drawListBuffer.position(0);
INT vertexShader = MyGL20Renderer.loadShader(GLES20.GL_VERTEX_SHADER,vertexShader code);
INT fragmentShader = MyGL20Renderer.loadShader(GLES20.GL_FRAGMENT_SHADER,fragmentShader code);
shaderProgram = GLES20.glCreateProgram();
GLES20.glAttachShader(shaderProgram,vertexShader);
GLES20.glAttachShader(shaderProgram,fragmentShader);
//纹理code
GLES20.glBindAttribLocation(shaderProgram,0,a_TexCoordinate);
GLES20.glLinkProgram(shaderProgram);
//载入纹理
mTextureDataHandle = loadTexture(mActivityContext,R.drawable.brick);
}
公共无效抽奖(浮动[] mvpMatrix)
{
//添加程序OpenGL ES的环境
GLES20.glUseProgram(shaderProgram);
//获取处理到顶点着色器的vPosition成员
mPositionHandle = GLES20.glGetAttribLocation(shaderProgramvPosition);
//启用句柄三角形顶点
GLES20.glEnableVertexAttribArray(mPositionHandle);
// prepare三角坐标数据
GLES20.glVertexAttribPointer(mPositionHandle,COORDS_PER_VERTEX,GLES20.GL_FLOAT,假的,vertexStride,vertexBuffer);
//获取处理到片段着色器的vColor成员
mColorHandle = GLES20.glGetUniformLocation(shaderProgramvColor);
//设置颜色绘制三角形
GLES20.glUniform4fv(mColorHandle,1,颜色,0);
//设置纹理把手和绑定纹理
mTextureUniformHandle = GLES20.glGetAttribLocation(shaderProgramu_Texture);
mTextureCoordinateHandle = GLES20.glGetAttribLocation(shaderPrograma_TexCoordinate);
//活动纹理单元设置为纹理单元0。
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
//绑定纹理到本机。
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,mTextureDataHandle);
//告诉质地均匀采样通过结合纹理单元0使用这种纹理着色器。
GLES20.glUniform1i(mTextureUniformHandle,0);
//传入纹理坐标信息
mCubeTextureCoordinates.position(0);
GLES20.glVertexAttribPointer(mTextureCoordinateHandle,mTextureCoordinateDataSize,GLES20.GL_FLOAT,假,0,mCubeTextureCoordinates);
GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);
//获取手柄形状的变换矩阵
mMVPMatrixHandle = GLES20.glGetUniformLocation(shaderProgramuMVPMatrix);
//应用投影和视图变换
GLES20.glUniformMatrix4fv(mMVPMatrixHandle,1,假的,mvpMatrix,0);
//绘制三角形
GLES20.glDrawElements(GLES20.GL_TRIANGLES,drawOrder.length,GLES20.GL_UNSIGNED_SHORT,drawListBuffer);
//禁用顶点数组
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
公共静态INT loadTexture(最终上下文的背景下,最终诠释RESOURCEID)
{
最终诠释[] textureHandle =新INT [1];
GLES20.glGenTextures(1,textureHandle,0);
如果(textureHandle [0]!= 0)
{
最后BitmapFactory.Options选项=新BitmapFactory.Options();
options.inScaled = FALSE; //没有pre-缩放
//阅读资源
最后的位图位= BitmapFactory.de codeResource(context.getResources(),RESOURCEID,期权);
//绑定到在OpenGL纹理
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,textureHandle [0]);
//设置过滤
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,GLES20.GL_TEXTURE_MIN_FILTER,GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,GLES20.GL_TEXTURE_MAG_FILTER,GLES20.GL_NEAREST);
//将位图到绑定的纹理。
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D,0,位图,0);
//回收位图中,由于它的数据已被加载到OpenGL的。
bitmap.recycle();
}
如果(textureHandle [0] == 0)
{
抛出新的RuntimeException(错误加载纹理。);
}
返回textureHandle [0];
}
}
我的渲染器类:
公共类MyGL20Renderer实现GLSurfaceView.Renderer
{
私人最终语境mActivityContext;
//矩阵初始化
私人最终浮动[] mMVPMatrix =新的浮动[16];
私人最终浮动[] mProjMatrix =新的浮动[16];
私人最终浮动[] mVMatrix =新的浮动[16];
私人浮法[] mRotationMatrix =新的浮动[16];
//声明挥发性,因为我们是从另一个线程更新它
公众持股量波动裂伤;
//私人三角三角;
私人雪碧雪碧;
公共MyGL20Renderer(最终上下文activityContext)
{
mActivityContext = activityContext;
}
公共无效onSurfaceCreated(GL10未使用的,EGLConfig配置)
{
//设置背景框的颜色
GLES20.glClearColor(0.0,0.0,0.0,1.0F);
//初始化图形
//三角形=新三角();
精灵=新的Sprite(mActivityContext);
}
公共无效onDrawFrame(GL10未使用)
{
//重绘背景颜色
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
//设置相机位置(视图矩阵)
Matrix.setLookAtM(mVMatrix,0,0,0,-3,0F,0F,0F,0F,1.0F,0.0);
//计算投影和视图变换
Matrix.multiplyMM(mMVPMatrix,0,mProjMatrix,0,mVMatrix,0);
//创建为三角形旋转变换
Matrix.setRotateM(mRotationMatrix,0,裂伤,0,0,-1.0F);
//合并旋转矩阵与投影和相机视图
Matrix.multiplyMM(mMVPMatrix,0,mRotationMatrix,0,mMVPMatrix,0);
//画形
//triangle.Draw(mMVPMatrix);
sprite.Draw(mMVPMatrix);
}
公共无效onSurfaceChanged(GL10未使用的,诠释的宽度,高度INT)
{
GLES20.glViewport(0,0,宽度,高度);
浮动率=(浮点)宽/高;
//这个投影矩阵应用在onDrawFrame对象坐标()方法
Matrix.frustumM(mProjMatrix,0,-ratio,比率,-1,1,3,7);
}
公共静态INT loadShader(整型,字符串着色器code)
{
//创建一个顶点着色器型或片段着色器类型(GLES20.GL_VERTEX_SHADER或GLES20.GL_FRAGMENT_SHADER)
INT着色器= GLES20.glCreateShader(类型);
//添加源$ C $ c和编译
GLES20.glShaderSource(着色,着色器code);
GLES20.glCompileShader(着色);
返回着色器;
}
}
解决方案
v_TexCoordinate = a_TexCoordinate+
应该是
v_TexCoordinate = a_TexCoordinate; +
显然,我忘了一个分号,现在我才意识到我是多么依赖我的IDE的告诉我,当我乱了愚蠢的事情,哈哈。
I've been struggling to draw a 2D image from jpg/png files using openGL ES 2.0 for Android. Everywhere I look the tutorials are for texturing 3D images so its been rough figuring out how to draw a regular 2D Sprite. I got a square to draw and rotate but once it came to texturing I must have messed up somewhere because I keep getting an error saying DrawElements isn't bound to any data but if I comment out any code to do with texturing it works fine.
Any help would be greatly appreciated.
Here is my Code for my Sprite class and Renderer Class:
public class Sprite
{
//Reference to Activity Context
private final Context mActivityContext;
//Added for Textures
private final FloatBuffer mCubeTextureCoordinates;
private int mTextureUniformHandle;
private int mTextureCoordinateHandle;
private final int mTextureCoordinateDataSize = 2;
private int mTextureDataHandle;
private final String vertexShaderCode =
//Test
"attribute vec2 a_TexCoordinate;" +
"varying vec2 v_TexCoordinate;" +
//End Test
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"void main() {" +
" gl_Position = vPosition * uMVPMatrix;" +
//Test
"v_TexCoordinate = a_TexCoordinate" +
//End Test
"}";
private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
//Test
"uniform sampler2D u_Texture;" +
"varying vec2 v_TexCoordinate;" +
//End Test
"void main() {" +
//"gl_FragColor = vColor;" +
"gl_FragColor = (v_Color * texture2D(u_Texture, v_TexCoordinate));" +
"}";
private final int shaderProgram;
private final FloatBuffer vertexBuffer;
private final ShortBuffer drawListBuffer;
private int mPositionHandle;
private int mColorHandle;
private int mMVPMatrixHandle;
// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 2;
static float spriteCoords[] = { -0.5f, 0.5f, // top left
-0.5f, -0.5f, // bottom left
0.5f, -0.5f, // bottom right
0.5f, 0.5f }; //top right
private short drawOrder[] = { 0, 1, 2, 0, 2, 3 }; //Order to draw vertices
private final int vertexStride = COORDS_PER_VERTEX * 4; //Bytes per vertex
// Set color with red, green, blue and alpha (opacity) values
float color[] = { 0.63671875f, 0.76953125f, 0.22265625f, 1.0f };
public Sprite(final Context activityContext)
{
mActivityContext = activityContext;
//Initialize Vertex Byte Buffer for Shape Coordinates / # of coordinate values * 4 bytes per float
ByteBuffer bb = ByteBuffer.allocateDirect(spriteCoords.length * 4);
//Use the Device's Native Byte Order
bb.order(ByteOrder.nativeOrder());
//Create a floating point buffer from the ByteBuffer
vertexBuffer = bb.asFloatBuffer();
//Add the coordinates to the FloatBuffer
vertexBuffer.put(spriteCoords);
//Set the Buffer to Read the first coordinate
vertexBuffer.position(0);
// S, T (or X, Y)
// Texture coordinate data.
// Because images have a Y axis pointing downward (values increase as you move down the image) while
// OpenGL has a Y axis pointing upward, we adjust for that here by flipping the Y axis.
// What's more is that the texture coordinates are the same for every face.
final float[] cubeTextureCoordinateData =
{
//Front face
/*0.0f, 0.0f,
0.0f, 1.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
1.0f, 0.0f*/
-0.5f, 0.5f,
-0.5f, -0.5f,
0.5f, -0.5f,
0.5f, 0.5f
};
mCubeTextureCoordinates = ByteBuffer.allocateDirect(cubeTextureCoordinateData.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
mCubeTextureCoordinates.put(cubeTextureCoordinateData).position(0);
//Initialize byte buffer for the draw list
ByteBuffer dlb = ByteBuffer.allocateDirect(spriteCoords.length * 2);
dlb.order(ByteOrder.nativeOrder());
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);
int vertexShader = MyGL20Renderer.loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = MyGL20Renderer.loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
shaderProgram = GLES20.glCreateProgram();
GLES20.glAttachShader(shaderProgram, vertexShader);
GLES20.glAttachShader(shaderProgram, fragmentShader);
//Texture Code
GLES20.glBindAttribLocation(shaderProgram, 0, "a_TexCoordinate");
GLES20.glLinkProgram(shaderProgram);
//Load the texture
mTextureDataHandle = loadTexture(mActivityContext, R.drawable.brick);
}
public void Draw(float[] mvpMatrix)
{
//Add program to OpenGL ES Environment
GLES20.glUseProgram(shaderProgram);
//Get handle to vertex shader's vPosition member
mPositionHandle = GLES20.glGetAttribLocation(shaderProgram, "vPosition");
//Enable a handle to the triangle vertices
GLES20.glEnableVertexAttribArray(mPositionHandle);
//Prepare the triangle coordinate data
GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, vertexBuffer);
//Get Handle to Fragment Shader's vColor member
mColorHandle = GLES20.glGetUniformLocation(shaderProgram, "vColor");
//Set the Color for drawing the triangle
GLES20.glUniform4fv(mColorHandle, 1, color, 0);
//Set Texture Handles and bind Texture
mTextureUniformHandle = GLES20.glGetAttribLocation(shaderProgram, "u_Texture");
mTextureCoordinateHandle = GLES20.glGetAttribLocation(shaderProgram, "a_TexCoordinate");
//Set the active texture unit to texture unit 0.
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
//Bind the texture to this unit.
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDataHandle);
//Tell the texture uniform sampler to use this texture in the shader by binding to texture unit 0.
GLES20.glUniform1i(mTextureUniformHandle, 0);
//Pass in the texture coordinate information
mCubeTextureCoordinates.position(0);
GLES20.glVertexAttribPointer(mTextureCoordinateHandle, mTextureCoordinateDataSize, GLES20.GL_FLOAT, false, 0, mCubeTextureCoordinates);
GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);
//Get Handle to Shape's Transformation Matrix
mMVPMatrixHandle = GLES20.glGetUniformLocation(shaderProgram, "uMVPMatrix");
//Apply the projection and view transformation
GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
//Draw the triangle
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length, GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
//Disable Vertex Array
GLES20.glDisableVertexAttribArray(mPositionHandle);
}
public static int loadTexture(final Context context, final int resourceId)
{
final int[] textureHandle = new int[1];
GLES20.glGenTextures(1, textureHandle, 0);
if (textureHandle[0] != 0)
{
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inScaled = false; // No pre-scaling
// Read in the resource
final Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(), resourceId, options);
// Bind to the texture in OpenGL
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle[0]);
// Set filtering
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
// Load the bitmap into the bound texture.
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
// Recycle the bitmap, since its data has been loaded into OpenGL.
bitmap.recycle();
}
if (textureHandle[0] == 0)
{
throw new RuntimeException("Error loading texture.");
}
return textureHandle[0];
}
}
My Renderer Class:
public class MyGL20Renderer implements GLSurfaceView.Renderer
{
private final Context mActivityContext;
//Matrix Initializations
private final float[] mMVPMatrix = new float[16];
private final float[] mProjMatrix = new float[16];
private final float[] mVMatrix = new float[16];
private float[] mRotationMatrix = new float[16];
//Declare as volatile because we are updating it from another thread
public volatile float mAngle;
//private Triangle triangle;
private Sprite sprite;
public MyGL20Renderer(final Context activityContext)
{
mActivityContext = activityContext;
}
public void onSurfaceCreated(GL10 unused, EGLConfig config)
{
//Set the background frame color
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
//Initialize Shapes
//triangle = new Triangle();
sprite = new Sprite(mActivityContext);
}
public void onDrawFrame(GL10 unused)
{
//Redraw background color
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
//Set the camera position (View Matrix)
Matrix.setLookAtM(mVMatrix, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
//Calculate the projection and view transformation
Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);
//Create a rotation transformation for the triangle
Matrix.setRotateM(mRotationMatrix, 0, mAngle, 0, 0, -1.0f);
//Combine the rotation matrix with the projection and camera view
Matrix.multiplyMM(mMVPMatrix, 0, mRotationMatrix, 0, mMVPMatrix, 0);
//Draw Shape
//triangle.Draw(mMVPMatrix);
sprite.Draw(mMVPMatrix);
}
public void onSurfaceChanged(GL10 unused, int width, int height)
{
GLES20.glViewport(0, 0, width, height);
float ratio = (float) width / height;
//This Projection Matrix is applied to object coordinates in the onDrawFrame() method
Matrix.frustumM(mProjMatrix, 0, -ratio, ratio, -1, 1, 3, 7);
}
public static int loadShader(int type, String shaderCode)
{
//Create a Vertex Shader Type Or a Fragment Shader Type (GLES20.GL_VERTEX_SHADER OR GLES20.GL_FRAGMENT_SHADER)
int shader = GLES20.glCreateShader(type);
//Add The Source Code and Compile it
GLES20.glShaderSource(shader, shaderCode);
GLES20.glCompileShader(shader);
return shader;
}
}
解决方案
"v_TexCoordinate = a_TexCoordinate" +
should have been
"v_TexCoordinate = a_TexCoordinate;" +
Apparently I forgot a semi-colon, now I realize just how much I rely on my IDE's to tell me when I mess up stupid things haha.