- }
- }
- camera.release();
- camera.setPreviewCallbackWithBuffer(null);
- camera.stopPreview();
- public void destroy() {
- @Override
- }
- shader.end();
- mesh.render(shader, GL20.GL_TRIANGLES);
- //Render our mesh using the shader, which in turn will use our textures to render their content on the mesh
- shader.setUniformi("uv_texture", 1);
- //Set the uniform uv_texture object to the texture at slot 1
- shader.setUniformi("y_texture", 0);
- //Set the uniform y_texture object to the texture at slot 0
- shader.begin();
- */
- * Draw the textures onto a mesh using our shader
- /*
- Gdx.gl.glTexParameterf(GL20.GL_TEXTURE_2D, GL20.GL_TEXTURE_WRAP_T, GL20.GL_CLAMP_TO_EDGE);
- Gdx.gl.glTexParameterf(GL20.GL_TEXTURE_2D, GL20.GL_TEXTURE_WRAP_S, GL20.GL_CLAMP_TO_EDGE);
- Gdx.gl.glTexParameterf(GL20.GL_TEXTURE_2D, GL20.GL_TEXTURE_MAG_FILTER, GL20.GL_LINEAR);
- Gdx.gl.glTexParameterf(GL20.GL_TEXTURE_2D, GL20.GL_TEXTURE_MIN_FILTER, GL20.GL_LINEAR);
- //Use linear interpolation when magnifying/minifying the texture to areas larger/smaller than the texture size
- Gdx.gl.glTexImage2D(GL20.GL_TEXTURE_2D, 0, GL20.GL_LUMINANCE_ALPHA, 1280/2, 720/2, 0, GL20.GL_LUMINANCE_ALPHA, GL20.GL_UNSIGNED_BYTE, uvBuffer);
- //Note that we could have also found V at G or B as well.
- //and the second byte (U) into the A component of the texture. That's why we find U and V at A and R respectively in the fragment shader code.
- //and each pixel is two bytes. By setting GL_LUMINANCE_ALPHA, OpenGL puts first byte (V) into R,G and B components and of the texture
- //UV texture is (width/2*height/2) in size (downsampled by 2 in both dimensions, each pixel corresponds to 4 pixels of the Y channel)
- uvTexture.bind();
- Gdx.gl.glActiveTexture(GL20.GL_TEXTURE1);
- //Set texture slot 1 as active and bind our texture object to it
- */
- * Prepare the UV channel texture
- /*
- Gdx.gl.glTexParameterf(GL20.GL_TEXTURE_2D, GL20.GL_TEXTURE_WRAP_T, GL20.GL_CLAMP_TO_EDGE);
- Gdx.gl.glTexParameterf(GL20.GL_TEXTURE_2D, GL20.GL_TEXTURE_WRAP_S, GL20.GL_CLAMP_TO_EDGE);
- Gdx.gl.glTexParameterf(GL20.GL_TEXTURE_2D, GL20.GL_TEXTURE_MAG_FILTER, GL20.GL_LINEAR);
- Gdx.gl.glTexParameterf(GL20.GL_TEXTURE_2D, GL20.GL_TEXTURE_MIN_FILTER, GL20.GL_LINEAR);
- //Use linear interpolation when magnifying/minifying the texture to areas larger/smaller than the texture size
- Gdx.gl.glTexImage2D(GL20.GL_TEXTURE_2D, 0, GL20.GL_LUMINANCE, 1280, 720, 0, GL20.GL_LUMINANCE, GL20.GL_UNSIGNED_BYTE, yBuffer);
- //Y texture is (width*height) in size and each pixel is one byte; by setting GL_LUMINANCE, OpenGL puts this byte into R,G and B components of the texture
- yTexture.bind();
- Gdx.gl.glActiveTexture(GL20.GL_TEXTURE0);
- //Set texture slot 0 as active and bind our texture object to it
- */
- * Prepare the Y channel texture
- /*
- uvBuffer.position(0);
- uvBuffer.put(image, 1280*720, 1280*720/2);
- //Copy the UV channels of the image into their buffer, the following (width*height/2) bytes are the UV channel; the U and V bytes are interspread
- yBuffer.position(0);
- yBuffer.put(image, 0, 1280*720);
- //Copy the Y channel of the image into its buffer, the first (width*height) bytes are the Y channel
- */
- * we must copy the channels in our byte array into buffers before setting them to textures
- * Because of Java's limitations, we can't reference the middle of an array and
- /*
- public void renderBackground() {
- @Override
- }
- camera.addCallbackBuffer(image);
- //Send the buffer reference to the next preview so that a new buffer is not allocated and we use the same space
- public void onPreviewFrame(byte[] data, Camera camera) {
- @Override
- }
- camera.addCallbackBuffer(image);
- //Set the first buffer, the preview doesn't start unless we set the buffers
- camera.startPreview();
- //Start the preview
- camera.setParameters(params);
- params.setPreviewSize(1280,720);
- params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
- Camera.Parameters params = camera.getParameters();
- //Set the camera parameters
- camera.setPreviewCallbackWithBuffer(this);
- //We set the buffer ourselves that will be used to hold the preview image
- camera = Camera.open(0);
- */
- * Initialize the Android camera
- /*
- mesh.setIndices(indices);
- mesh.setVertices(vertices);
- //Set vertices and indices to our mesh
- short[] indices = {0, 1, 2, 0, 2, 3};
- //The indices come in trios of vertex indices that describe the triangles of our mesh
- };
- 1.0f, 0.0f // TexCoord 3
- 1.0f, 1.0f, // Position 3
- 1.0f, 1.0f, // TexCoord 2
- 1.0f, -1.0f, // Position 2
- 0.0f, 1.0f, // TexCoord 1
- -1.0f, -1.0f, // Position 1
- 0.0f, 0.0f, // TexCoord 0
- -1.0f, 1.0f, // Position 0
- float[] vertices = {
- //The vertices include the screen coordinates (between -1.0 and 1.0) and texture coordinates (between 0.0 and 1.0)
- new VertexAttribute(Usage.TextureCoordinates, 2, "a_texCoord"));
- new VertexAttribute(Usage.Position, 2, "a_position"),
- mesh = new Mesh(true, 4, 6,
- //Create our mesh that we will draw on, it has 4 vertices corresponding to the 4 corners of the screen
- shader = new ShaderProgram(vertexShader, fragmentShader);
- //Create and compile our shader
- "} \n";
- " gl_FragColor = vec4(r, g, b, 1.0); \n" +
- //We finally set the RGB color of our pixel
- " b = y + 2.03211*u; \n" +
- " g = y - 0.39465*u - 0.58060*v; \n" +
- " r = y + 1.13983*v; \n" +
- //The numbers are just YUV to RGB conversion constants
- " v = texture2D(uv_texture, v_texCoord).r - 0.5; \n" +
- " u = texture2D(uv_texture, v_texCoord).a - 0.5; \n" +
- //in the texture, this is probably the fastest way to use them in the shader
- //texture respectively using GL_LUMINANCE_ALPHA. Since U,V bytes are interspread
- //We had put the U and V values of each pixel to the A and R,G,B components of the
- " y = texture2D(y_texture, v_texCoord).r; \n" +
- //that's why we're pulling it from the R component, we could also use G or B
- //We had put the Y values of each pixel to the R,G,B components by GL_LUMINANCE,
- " float r, g, b, y, u, v; \n" +
- "void main (void){ \n" +
- "uniform sampler2D uv_texture; \n" +
- "uniform sampler2D y_texture; \n" +
- "varying vec2 v_texCoord; \n" +
- "#endif \n" +
- "precision highp float; \n" +
- "#ifdef GL_ES \n" +
- String fragmentShader =
- //Effectively making YUV to RGB conversion
- //Our fragment shader code; takes Y,U,V values for each pixel and calculates R,G,B colors,
- "} \n";
- " v_texCoord = a_texCoord; \n" +
- " gl_Position = a_position; \n" +
- "void main(){ \n" +
- "varying vec2 v_texCoord; \n" +
- "attribute vec2 a_texCoord; \n" +
- "attribute vec4 a_position; \n" +
- String vertexShader =
- //Our vertex shader code; nothing special
- uvBuffer.order(ByteOrder.nativeOrder());
- yBuffer.order(ByteOrder.nativeOrder());
- uvBuffer = ByteBuffer.allocateDirect(1280*720/2); //We have (width/2*height/2) pixels, each pixel is 2 bytes
- yBuffer = ByteBuffer.allocateDirect(1280*720);
- //Allocate buffers on the native memory space, not inside the JVM heap
- uvTexture = new Texture(1280/2,720/2,Format.LuminanceAlpha); //A 16-bit per pixel format
- yTexture = new Texture(1280,720,Format.Intensity); //A 8-bit per pixel format
- //Allocate textures
- Texture.setEnforcePotImages(false);
- //Do not enforce power of two texture sizes
- */
- * Initialize the OpenGL/libgdx stuff
- /*
- public void init(){
- @Override
- }
- image = new byte[1280*720/8*12];
- //Our YUV image is 12 bits per pixel
- public AndroidDependentCameraController(){
- Mesh mesh; //Our mesh that we will draw the texture on
- Texture uvTexture; //Our UV texture
- Texture yTexture; //Our Y texture
- ShaderProgram shader; //Our shader
- private ByteBuffer uvBuffer;
- private ByteBuffer yBuffer;
- //The Y and UV buffers that will pass our image channel data to the textures
- private Camera camera; //The camera object
- private static byte[] image; //The image buffer that will hold the camera image when preview callback arrives
- public class AndroidDependentCameraController implements PlatformDependentCameraController, Camera.PreviewCallback {
来源: http://blog.csdn.net/jxw167/article/details/75501637