Commit b0bdf319 authored by zhanglin's avatar zhanglin

[merge] merge ImageReader类

parent e8f3fe64
precision mediump float;
varying vec2 vTextureCoord;
uniform sampler2D sTexture;
void main() {
float y, u, v;
vec4 color;
// 计算实际纹理坐标位置
vec2 texCoord = vTextureCoord;
// 原始图像区域映射:
// Y: 0.625 - 0.875 (占总高度的0.25)
// UV: 0.875 - 1.0 (占总高度的0.125)
// 计算Y分量位置 (从0.625开始)
float yTexCoord = texCoord.y * 0.25 + 0.625;
y = texture2D(sTexture, vec2(texCoord.x, yTexCoord)).r;
// 计算UV分量位置 (从0.875开始)
// UV数据是按2x2像素块采样的,所以x坐标需要除以2
float uvTexCoord = texCoord.y * 0.125 + 0.875;
vec2 uvPos = vec2(texCoord.x, uvTexCoord);
vec4 uvColor = texture2D(sTexture, uvPos);
v = uvColor.r; // V在第一个分量
u = uvColor.g; // U在第二个分量
// YUV转RGB的矩阵运算
y = 1.164 * (y - 0.0625);
u = u - 0.5;
v = v - 0.5;
color.r = y + 1.596 * v;
color.g = y - 0.813 * v - 0.391 * u;
color.b = y + 2.018 * u;
color.a = 1.0;
// 确保颜色值在有效范围内
color = clamp(color, 0.0, 1.0);
gl_FragColor = color;
}
\ No newline at end of file
...@@ -4,10 +4,21 @@ import android.content.Context; ...@@ -4,10 +4,21 @@ import android.content.Context;
import android.content.res.AssetManager; import android.content.res.AssetManager;
import android.graphics.Bitmap; import android.graphics.Bitmap;
import android.graphics.BitmapFactory; import android.graphics.BitmapFactory;
import android.graphics.PixelFormat;
import android.media.Image;
import android.media.ImageReader;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import android.opengl.GLES20; import android.opengl.GLES20;
import android.opengl.GLSurfaceView; import android.opengl.GLSurfaceView;
import android.opengl.Matrix; import android.opengl.Matrix;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log; import android.util.Log;
import android.view.Surface;
import java.io.File; import java.io.File;
import java.io.FileOutputStream; import java.io.FileOutputStream;
...@@ -19,7 +30,6 @@ import java.nio.FloatBuffer; ...@@ -19,7 +30,6 @@ import java.nio.FloatBuffer;
import java.nio.IntBuffer; import java.nio.IntBuffer;
import java.util.Arrays; import java.util.Arrays;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10; import javax.microedition.khronos.opengles.GL10;
/** /**
...@@ -29,6 +39,18 @@ import javax.microedition.khronos.opengles.GL10; ...@@ -29,6 +39,18 @@ import javax.microedition.khronos.opengles.GL10;
public class ImageRenderer implements GLSurfaceView.Renderer { public class ImageRenderer implements GLSurfaceView.Renderer {
private Context mContext; private Context mContext;
private int mNV21ToRGBAProgram;
private boolean mNeedConvertBack = false;
private ImageReader mImageReader;
private EGLSurface mEGLSurface;
private EGLDisplay mEGLDisplay;
private EGLContext mEGLContext;
private EGLConfig mEGLConfig;
private Surface mSurface;
private HandlerThread mHandlerThread;
private Handler mHandler;
public ImageRenderer(Context context) { public ImageRenderer(Context context) {
mContext = context; mContext = context;
...@@ -57,15 +79,16 @@ public class ImageRenderer implements GLSurfaceView.Renderer { ...@@ -57,15 +79,16 @@ public class ImageRenderer implements GLSurfaceView.Renderer {
} }
@Override @Override
public void onSurfaceCreated(GL10 glUnused, EGLConfig config) { public void onSurfaceCreated(GL10 glUnused, javax.microedition.khronos.egl.EGLConfig config) {
Log.v(TAG, "onSurfaceCreated"); Log.v(TAG, "onSurfaceCreated");
// Ignore the passed-in GL10 interface, and use the GLES20 // Ignore the passed-in GL10 interface, and use the GLES20
// class's static methods instead. // class's static methods instead.
/* Set up alpha blending and an Android background color */ /* Set up alpha blending and an Android background color */
GLES20.glEnable(GLES20.GL_BLEND); GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendFunc(GLES20.GL_ONE, GLES20.GL_ZERO);
// GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA); // GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
GLES20.glBlendFunc(GLES20.GL_ONE, GLES20.GL_ZERO);
GLES20.glClearColor(0.643f, 0.776f, 0.223f, 1.0f); GLES20.glClearColor(0.643f, 0.776f, 0.223f, 1.0f);
/* Set up shaders and handles to their variables */ /* Set up shaders and handles to their variables */
...@@ -130,42 +153,178 @@ public class ImageRenderer implements GLSurfaceView.Renderer { ...@@ -130,42 +153,178 @@ public class ImageRenderer implements GLSurfaceView.Renderer {
mPixelBuffer = ByteBuffer.allocateDirect(RGBA_SIZE); mPixelBuffer = ByteBuffer.allocateDirect(RGBA_SIZE);
mPixelBuffer.order(ByteOrder.nativeOrder()); mPixelBuffer.order(ByteOrder.nativeOrder());
}
// 创建NV21转RGBA的program
String nv21ToRgbaShader = loadShaderFromAssets("FRAGMENT_SHADER_NV21ToRGBA.glsl");
mNV21ToRGBAProgram = createProgram(mVertexShader, nv21ToRgbaShader);
// 创建HandlerThread
mHandlerThread = new HandlerThread("ImageReaderThread");
mHandlerThread.start();
mHandler = new Handler(mHandlerThread.getLooper());
// 创建ImageReader
mImageReader = ImageReader.newInstance(mWidth, mHeight,
PixelFormat.RGBA_8888, 2);
mImageReader.setOnImageAvailableListener(
new ImageReader.OnImageAvailableListener() {
@Override @Override
public void onDrawFrame(GL10 glUnused) { public void onImageAvailable(ImageReader reader) {
// 绑定帧缓冲区 Image image = null;
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFrameBuffer); try {
checkGlError("glBindFramebuffer mFrameBuffer: " + mFrameBuffer); image = reader.acquireLatestImage();
if (image != null) {
processImage(image);
}
} finally {
if (image != null) {
image.close();
}
}
}
}, mHandler);
renderTexture(); // 获取Surface
mSurface = mImageReader.getSurface();
// 创建EGL环境
createEGLContext();
}
mPixelBuffer.clear(); private void processImage(Image image) {
GLES20.glPixelStorei(GLES20.GL_PACK_ALIGNMENT, 1); Image.Plane[] planes = image.getPlanes();
GLES20.glReadPixels(0, 0, mWidth, mHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, mPixelBuffer); ByteBuffer buffer = planes[0].getBuffer();
checkGlError("glReadPixels"); int pixelStride = planes[0].getPixelStride();
int rowStride = planes[0].getRowStride();
int rowPadding = rowStride - pixelStride * mWidth;
buffer.position(RGBA_SIZE * 5 / 8);
buffer.get(mImageBytes);
// 读取Y数据
// int yOffset = (int)(mHeight * 0.625f);
// int yHeight = (int)(mHeight * 0.25f);
// buffer.position(yOffset * rowStride);
// for (int row = 0; row < yHeight; row++) {
// buffer.get(mImageBytes, row * mWidth, mWidth);
// buffer.position(buffer.position() + rowPadding);
// }
//
// // 读取UV数据
// int uvOffset = (int)(mHeight * 0.875f);
// int uvHeight = (int)(mHeight * 0.125f);
// buffer.position(uvOffset * rowStride);
// for (int row = 0; row < uvHeight; row++) {
// buffer.get(mImageBytes, Y_SIZE + row * mWidth, mWidth);
// buffer.position(buffer.position() + rowPadding);
// }
mPixelBuffer.rewind();
// mPixelBuffer.position(Y_SIZE / 2);
mPixelBuffer.position(NV21_OFFSET);
mPixelBuffer.get(mImageBytes);
// mPixelBuffer.position(0);
// mPixelBuffer.get(mImageBytes, Y_SIZE, Y_SIZE / 2);
saveImage(mImageBytes, "nv21"); saveImage(mImageBytes, "nv21");
}
private void createEGLContext() {
mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
int[] version = new int[2];
EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1);
int[] attribList = {
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_ALPHA_SIZE, 8,
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGL14.EGL_NONE
};
android.opengl.EGLConfig[] configs = new android.opengl.EGLConfig[1];
int[] numConfigs = new int[1];
EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0,
configs.length, numConfigs, 0);
mEGLConfig = configs[0];
// 获取当前的EGL上下文作为共享上下文
EGLContext sharedContext = EGL14.eglGetCurrentContext();
int[] contextAttribs = {
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
};
// 使用共享上下文创建新的上下文
mEGLContext = EGL14.eglCreateContext(mEGLDisplay, mEGLConfig,
sharedContext, contextAttribs, 0);
int[] surfaceAttribs = {
EGL14.EGL_NONE
};
mEGLSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, mEGLConfig,
mSurface, surfaceAttribs, 0);
}
@Override
public void onDrawFrame(GL10 glUnused) {
// 保存当前的EGL上下文
EGLDisplay currentDisplay = EGL14.eglGetCurrentDisplay();
EGLSurface currentDrawSurface = EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW);
EGLSurface currentReadSurface = EGL14.eglGetCurrentSurface(EGL14.EGL_READ);
EGLContext currentContext = EGL14.eglGetCurrentContext();
// 绑定EGL环境
EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext);
// 渲染到ImageReader的Surface
renderTexture(mProgram, mTextureID, true);
// 提交渲染结果
EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurface);
EGL14.eglMakeCurrent(currentDisplay, currentDrawSurface, currentReadSurface, currentContext);
// 第一次渲染:RGBA转NV21
// GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFrameBuffer);
// checkGlError("glBindFramebuffer mFrameBuffer: " + mFrameBuffer);
//
// renderTexture(mProgram, mTextureID, true); // 使用RGB2NV21的shader
//
// // 读取NV21数据
// GLES20.glReadPixels(0, 0, mWidth, mHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, mPixelBuffer);
// checkGlError("glReadPixels");
//
// mPixelBuffer.clear();
// mPixelBuffer.position(RGBA_SIZE * 5 / 8);
//
// byte[] yData = new byte[16]; // 读取前16个Y值
// mPixelBuffer.get(yData);
//
// StringBuilder ySb = new StringBuilder("Y values: ");
// for (byte b : yData) {
// ySb.append(String.format("%02X ", b & 0xFF));
// }
// Log.d(TAG, "y value test: " + ySb.toString());
//
// mPixelBuffer.position(RGBA_SIZE * 5 / 8);
// mPixelBuffer.get(mImageBytes);
// saveImage(mImageBytes, "nv21");
// mPixelBuffer.rewind();
//// // 将NV21数据重新上传到纹理
//// GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureID);
//// mPixelBuffer.position(0);
//// GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, mWidth, mHeight,
//// 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, mPixelBuffer);
//// checkGlError("glTexImage2D with NV21 data");
//
// // 第二次渲染:NV21转回RGBA并显示到屏幕
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
checkGlError("glBindFramebuffer 0"); checkGlError("glBindFramebuffer 0");
renderTexture(); renderTexture(mNV21ToRGBAProgram, mFrameTextureID, false); // 使用NV21ToRGBA的shader
} }
private void renderTexture() { private void renderTexture(int program, int textureId, boolean toNv21) {
GLES20.glClear( GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT); GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glUseProgram(mProgram); GLES20.glUseProgram(program);
checkGlError("glUseProgram"); checkGlError("glUseProgram");
GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureID); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
// 设置顶点坐标 // 设置顶点坐标
mVertices.position(VERTICES_DATA_POS_OFFSET); mVertices.position(VERTICES_DATA_POS_OFFSET);
...@@ -194,6 +353,7 @@ public class ImageRenderer implements GLSurfaceView.Renderer { ...@@ -194,6 +353,7 @@ public class ImageRenderer implements GLSurfaceView.Renderer {
checkGlError("glUniformMatrix4fv mSTMatrix"); checkGlError("glUniformMatrix4fv mSTMatrix");
// RGB2NV21 需要设置uWidth和uHeight // RGB2NV21 需要设置uWidth和uHeight
if(toNv21){
if (muWidthHandle != -1) { if (muWidthHandle != -1) {
// 设置uWidth // 设置uWidth
GLES20.glUniform1f(muWidthHandle, mWidth); GLES20.glUniform1f(muWidthHandle, mWidth);
...@@ -204,6 +364,7 @@ public class ImageRenderer implements GLSurfaceView.Renderer { ...@@ -204,6 +364,7 @@ public class ImageRenderer implements GLSurfaceView.Renderer {
GLES20.glUniform1f(muHeightHandle, mHeight); GLES20.glUniform1f(muHeightHandle, mHeight);
checkGlError("glUniform1f muHeightHandle"); checkGlError("glUniform1f muHeightHandle");
} }
}
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
checkGlError("glDrawArrays"); checkGlError("glDrawArrays");
...@@ -220,9 +381,9 @@ public class ImageRenderer implements GLSurfaceView.Renderer { ...@@ -220,9 +381,9 @@ public class ImageRenderer implements GLSurfaceView.Renderer {
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, mWidth, mHeight, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buffer); GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, mWidth, mHeight, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buffer);
checkGlError("texImage2D textures"); checkGlError("texImage2D textures");
// Can't do mipmapping with camera source // 设置纹理过滤模式
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_NEAREST); GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR); GLES20.GL_LINEAR);
// Clamp to edge is the only option // Clamp to edge is the only option
...@@ -260,6 +421,17 @@ public class ImageRenderer implements GLSurfaceView.Renderer { ...@@ -260,6 +421,17 @@ public class ImageRenderer implements GLSurfaceView.Renderer {
Log.e(TAG, "Framebuffer is not complete"); Log.e(TAG, "Framebuffer is not complete");
} }
// 设置纹理参数
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mFrameTextureID);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S,
GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T,
GLES20.GL_CLAMP_TO_EDGE);
// 解绑帧缓冲区 // 解绑帧缓冲区
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
checkGlError("glBindFramebuffer 0"); checkGlError("glBindFramebuffer 0");
...@@ -311,6 +483,23 @@ public class ImageRenderer implements GLSurfaceView.Renderer { ...@@ -311,6 +483,23 @@ public class ImageRenderer implements GLSurfaceView.Renderer {
return program; return program;
} }
public void release() {
if (mImageReader != null) {
mImageReader.close();
mImageReader = null;
}
if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE,
EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
if (mEGLSurface != EGL14.EGL_NO_SURFACE) {
EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
}
if (mEGLContext != EGL14.EGL_NO_CONTEXT) {
EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
}
EGL14.eglTerminate(mEGLDisplay);
}
}
private void checkGlError(String op) { private void checkGlError(String op) {
int error; int error;
...@@ -330,7 +519,7 @@ public class ImageRenderer implements GLSurfaceView.Renderer { ...@@ -330,7 +519,7 @@ public class ImageRenderer implements GLSurfaceView.Renderer {
-1.0f, -1.0f, 0, 0.f, 0.f, // 左下 -1.0f, -1.0f, 0, 0.f, 0.f, // 左下
1.0f, -1.0f, 0, 1.f, 0.f, // 右下 1.0f, -1.0f, 0, 1.f, 0.f, // 右下
-1.0f, 1.0f, 0, 0.f, 1.f, // 左上 -1.0f, 1.0f, 0, 0.f, 1.f, // 左上
1.0f, 1.0f, 0, 1.f, 1.f, // 右上 1.0f, 1.0f, 0, 1.f, 1.f // 右上
}; };
// private final float[] mVerticesData = { // private final float[] mVerticesData = {
...@@ -454,8 +643,12 @@ public class ImageRenderer implements GLSurfaceView.Renderer { ...@@ -454,8 +643,12 @@ public class ImageRenderer implements GLSurfaceView.Renderer {
} }
} }
private int index = 0;
private static boolean saved = false;
private void saveImage(byte[] buffer, String suffix) { private void saveImage(byte[] buffer, String suffix) {
if(saved) return;
Log.v(TAG, "saveImage buffer length: " + buffer.length); Log.v(TAG, "saveImage buffer length: " + buffer.length);
FileOutputStream fos = null; FileOutputStream fos = null;
try { try {
...@@ -471,11 +664,11 @@ public class ImageRenderer implements GLSurfaceView.Renderer { ...@@ -471,11 +664,11 @@ public class ImageRenderer implements GLSurfaceView.Renderer {
} else { } else {
file.createNewFile(); file.createNewFile();
} }
// 写入文件
fos = new FileOutputStream(file); fos = new FileOutputStream(file);
fos.write(buffer); fos.write(buffer);
fos.flush(); fos.flush();
fos.close(); fos.close();
saved = true;
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); e.printStackTrace();
} finally { } finally {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment