Android:OpenGL ES2纹理不起作用

时间:2011-11-04 23:56:32

标签: java android opengl-es textures opengl-es-2.0

更新:删除了行GLES20.glEnable(GLES20.GL_TEXTURE_2D);但是行GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, 256, 256, 0, GLES20.GL_RGB, GLES20.GL_BYTE, ByteBuffer.wrap(pixels));给出了GL_INVALID_ENUM ...像素缓冲区长度是196608.

项目文件:http://godofgod.co.uk/my_files/NightCamPrj.zip

我正在尝试将相机数据传输到OpenGL ES2着色器,并且相机的东西似乎可以正常工作,但即使我尝试自己的值,也无法使纹理工作。我得到一个黑屏。这是代码:

package com.matthewmitchell.nightcam;

import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.util.Scanner;

import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

import android.content.Context;
import android.content.res.AssetManager;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;

public class MyRenderer implements GLSurfaceView.Renderer{
    private FloatBuffer vertices;
    private FloatBuffer texcoords;
    private int mProgram;
    private int maPositionHandle;
    private int gvTexCoordHandle;
    private int gvSamplerHandle;
    private static Context context;
    int[] camera_texture;
    public void onSurfaceCreated(GL10 unused, EGLConfig config) {
        initShapes();
        GLES20.glClearColor(0.0f, 1.0f, 0.2f, 1.0f);
        Debug.out("Hello init.");
        //Shaders
        int vertexShader = 0;
        int fragmentShader = 0;
        try {
            vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, readFile("vertex.vsh"));
            fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, readFile("fragment.fsh"));
        } catch (IOException e) {
            Debug.out("The shaders could not be found.");
            e.printStackTrace();
        }
        mProgram = GLES20.glCreateProgram();             // create empty OpenGL Program
        GLES20.glAttachShader(mProgram, vertexShader);   // add the vertex shader to program
        Debug.out("VS LOG: " + GLES20.glGetShaderInfoLog(vertexShader)); 
        GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
        Debug.out("FS LOG: " + GLES20.glGetShaderInfoLog(fragmentShader)); 
        GLES20.glLinkProgram(mProgram);                  // creates OpenGL program executables
        Debug.out("PROG LOG: " + GLES20.glGetProgramInfoLog(mProgram)); 
        // get handles
        maPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
        gvTexCoordHandle = GLES20.glGetAttribLocation(mProgram, "a_texCoord");
        gvSamplerHandle = GLES20.glGetAttribLocation(mProgram, "s_texture");
        camera_texture = null;
        GLES20.glEnable(GLES20.GL_TEXTURE_2D);
    }


    private void initShapes(){
        float triangleCoords[] = {
            // X, Y, Z
            -1.0f, -1.0f, 0.0f,
             1.0f, -1.0f, 0.0f,
             -1.0f, 1.0f, 0.0f,
             1.0f,  1.0f, 0.0f,
        }; 
        float texcoordf[] = {
            // X, Y, Z
            -1.0f,-1.0f,
            1.0f,-1.0f,
            -1.0f,1.0f,
            1.0f,1.0f,
        }; //Even if wrong way around it should produce a texture with these coordinates on the screen.

        // initialize vertex Buffer for vertices
        ByteBuffer vbb = ByteBuffer.allocateDirect(triangleCoords.length * 4); 
        vbb.order(ByteOrder.nativeOrder());// use the device hardware's native byte order
        vertices = vbb.asFloatBuffer();  // create a floating point buffer from the ByteBuffer
        vertices.put(triangleCoords);    // add the coordinates to the FloatBuffer
        vertices.position(0);            // set the buffer to read the first coordinate
        // initialize vertex Buffer for texcoords 
        vbb = ByteBuffer.allocateDirect(texcoordf.length * 4); 
        vbb.order(ByteOrder.nativeOrder());// use the device hardware's native byte order
        texcoords = vbb.asFloatBuffer();  // create a floating point buffer from the ByteBuffer
        texcoords.put(texcoordf);    // add the coordinates to the FloatBuffer
        texcoords.position(0);            // set the buffer to read the first coordinate
    }

    private static String readFile(String path) throws IOException {
        //Load file from assets folder using context given by the activity class
        AssetManager assetManager = context.getAssets();
        InputStream stream = assetManager.open(path);
        try {
            return new Scanner(stream).useDelimiter("\\A").next();
        }
        finally {
            stream.close();
        }
    }

    private int loadShader(int type, String shaderCode){
        int shader = GLES20.glCreateShader(type);
        GLES20.glShaderSource(shader, shaderCode);
        GLES20.glCompileShader(shader);
        return shader;
    }

    public void onDrawFrame(GL10 unused) {
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
        if(camera_texture == null){
            return;
        }
        // Add program to OpenGL environment
        GLES20.glUseProgram(mProgram);
        // Prepare the triangle data
        GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, 0, vertices);
        GLES20.glVertexAttribPointer(gvTexCoordHandle, 2, GLES20.GL_FLOAT, false, 0, texcoords);
        GLES20.glEnableVertexAttribArray(maPositionHandle);
        GLES20.glEnableVertexAttribArray(gvTexCoordHandle);
        //Bind texture
        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, camera_texture[0]);
        GLES20.glUniform1i(gvSamplerHandle, 0);
        // Draw the triangle
        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
        //Disable arrays
        GLES20.glDisableVertexAttribArray(maPositionHandle);
        GLES20.glDisableVertexAttribArray(gvTexCoordHandle);
    }

    public void onSurfaceChanged(GL10 unused, int width, int height) {
        GLES20.glViewport(0, 0, width, height);
    }

    public void takeContext(Context mcontext) {
        context = mcontext;
    }

    void bindCameraTexture(byte[] data,int w,int h) {
        //Takes pixel data from camera and makes texture
        byte[] pixels = new byte[256*256*3]; //Testing simple 256x256 texture. Will update for camera resolution
        for(int x = 0;x < 256;x++){
            for(int y = 0;y < 256;y++){
                //Ignore camera data, use test values.
                pixels[(x*256+y)*3] = 0;
                pixels[(x*256+y)*3+1] = 100;
                pixels[(x*256+y)*3+2] = 120;
            }
        }
        //Debug.out("PX:" + pixels[0] + " " + pixels[1] + " " + pixels[2]);
        //Make new texture for new data
        if (camera_texture == null){
            camera_texture = new int[1];
        }else{
            GLES20.glDeleteTextures(1, camera_texture, 0);
        }
        GLES20.glGenTextures(1, camera_texture, 0);
        int tex = camera_texture[0];
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, tex);
        GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, 256, 256, 0, GLES20.GL_RGB, GLES20.GL_BYTE, ByteBuffer.wrap(pixels));
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
    }
}

这是顶点着色器代码:

attribute vec4 vPosition;
attribute vec2 a_texCoord;
varying vec2 v_texCoord;
void main(){ 
    gl_Position = vPosition;
    v_texCoord = a_texCoord;
}    

这是片段着色器代码:

precision mediump float;
varying vec2 v_texCoord;
uniform sampler2D s_texture;
void main(){
    gl_FragColor = texture2D(s_texture, v_texCoord);
}

我们可以忽略这里的相机内容,因为我正在使用测试值。我正在使用测试256x256纹理。我已经完成了我在示例中看到的所有内容。

为什么它是黑色的,我怎么才能让它显示出来?

3 个答案:

答案 0 :(得分:3)

从您的代码中看不到它,但在我看来,您并没有从有渲染上下文的地方调用bindCameraTexture(而您应该在onSurfaceCreated或onSurfaceChanged中执行此操作)。

答案 1 :(得分:3)

我发现您使用glGetAttribLocation()来检索s_texture的位置。这是一个统一变量,而不是属性。请尝试使用glGetUniformLocation()代替此。

我不知道这是否会解决你所有的问题,但这是必须要做的事情。

答案 2 :(得分:0)

我用相机预览作为纹理完成了示例。与您的代码的主要区别是:

  1. 我使用SurfaceTexture将相机预览连接到openGL ES中使用的纹理。
  2. 我使用SurfaceTexture生成的矩阵来调整相机预览的输出,否则会出现黑色闪烁区域。
  3. 我没有在用于相机预览的纹理上明确调用glBindTexture()。

    祝你好运。