<uses-permission android:name="android.permission.CAMERA" />
<trolleg.CameraView android:id="@+id/fd_fase_surface_view" android:layout_width="match_parent" android:layout_height="match_parent" android:alpha="0"/>
public class CameraView extends SurfaceView implements SurfaceHolder.Callback, Camera.PreviewCallback { public FrameCamera frameCamera = new FrameCamera(); // , private static final int MAGIC_TEXTURE_ID = 10; boolean cameraFacing; private byte mBuffer[]; private static final String TAG = "CameraView"; private Camera mCamera; private SurfaceTexture mSurfaceTexture; int numberOfCameras; int cameraIndex; int previewWidth; int previewHeight; int cameraWidth; int cameraHeight; public CameraView(Context context, AttributeSet attrs) { super(context, attrs); cameraIndex = 0; // numberOfCameras = android.hardware.Camera.getNumberOfCameras(); android.hardware.Camera.CameraInfo cameraInfo = new android.hardware.Camera.CameraInfo(); // for (int i = 0; i < numberOfCameras; i++) { android.hardware.Camera.getCameraInfo(i, cameraInfo); if (cameraInfo.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) { cameraIndex = i; } } getHolder().addCallback(this); } @Override public void surfaceCreated(SurfaceHolder holder) { } @Override public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) { previewHeight = h; previewWidth = w; startCameraPreview(w , h); } private void startCameraPreview(int previewWidthLocal, int previewHeightLocal) { releaseCamera(); mCamera = Camera.open(cameraIndex); Camera.Parameters params = mCamera.getParameters(); params.setPreviewFormat(ImageFormat.NV21); // NV21, // ... mCamera.setParameters(params); // - int size = cameraWidth * cameraHeight; size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8; mBuffer = new byte[size]; try { // mCamera.addCallbackBuffer(mBuffer); mCamera.setPreviewCallbackWithBuffer(this); // mCamera.setPreviewDisplay(null); mCamera.startPreview(); } catch (Exception e){ Log.d(TAG, "Error starting camera preview: " + e.getMessage()); } } @Override public void surfaceDestroyed(SurfaceHolder surfaceHolder) { releaseCamera(); } @Override public void onPreviewFrame(byte[] data, Camera camera) { synchronized (frameCamera) { // frameCamera.cameraWidth = cameraWidth; frameCamera.cameraHeight = cameraHeight; frameCamera.facing = cameraFacing; if (frameCamera.bufferFromCamera == null || frameCamera.bufferFromCamera.length != data.length) { frameCamera.bufferFromCamera = new byte[data.length]; } System.arraycopy(data, 0, frameCamera.bufferFromCamera, 0, data.length); frameCamera.wereProcessed = false; } // mCamera.addCallbackBuffer(mBuffer); } public void disableView() { releaseCamera(); } public void enableView() { startCameraPreview(previewWidth, previewHeight); } }
<android.opengl.GLSurfaceView android:id="@+id/fd_glsurface" android:layout_width="match_parent" android:layout_height="match_parent" />
GLSurfaceView gLSurfaceView = findViewById(R.id.fd_glsurface); gLSurfaceView.setEGLContextClientVersion(2); gLSurfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0); gLSurfaceView.getHolder().setFormat(PixelFormat.TRANSPARENT); gLSurfaceView.setRenderer(new OurRenderer()); // gLSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
public class OurRenderer implements GLSurfaceView.Renderer { int programNv21ToRgba; // nv21 rgba, int texNV21FromCamera[] = new int[2]; // id- , \ UV // ByteBuffer bufferY; ByteBuffer bufferUV; private void initShaders() { int vertexShaderId = ShaderUtils.createShader(GLES20.GL_VERTEX_SHADER, FileUtils.getStringFromAsset(context.getAssets(), "shaders/vss_2d.glsl")); int fragmentShaderId = ShaderUtils.createShader(GLES20.GL_FRAGMENT_SHADER, FileUtils.getStringFromAsset(context.getAssets(), "shaders/fss_n21_to_rgba.glsl")); programNv21ToRgba = ShaderUtils.createProgram(vertexShaderId, fragmentShaderId); } public void onSurfaceCreated(GL10 gl, EGLConfig config) { initShaders(); // GLES20.glGenTextures(2, texNV21FromCamera, 0); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texNV21FromCamera[0]); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texNV21FromCamera[1]); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST); } // GLSurfaceView.RENDERMODE_CONTINUOUSLY public void onDrawFrame(GL10 gl) { // , synchronized (frameCamera) { mCameraWidth = frameCamera.cameraWidth; mCameraHeight = frameCamera.cameraHeight; int cameraSize = mCameraWidth * mCameraHeight; if (bufferY == null) { bufferY = ByteBuffer.allocateDirect(cameraSize); bufferUV = ByteBuffer.allocateDirect(cameraSize / 2); } // nv21 : \ UV bufferY.put(frameCamera.bufferFromCamera, 0, cameraSize); bufferY.position(0); bufferUV.put(frameCamera.bufferFromCamera, cameraSize, cameraSize / 2); bufferUV.position(0); // \ GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texNV21FromCamera[0]); GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, mCameraWidth, (int) (mCameraHeight), 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, bufferY); GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, bufferY); GLES20.glFlush(); // UV \+ALPHA, , , GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texNV21FromCamera[1]); GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE_ALPHA, mCameraWidth / 2, (int) (mCameraHeight * 0.5), 0, GLES20.GL_LUMINANCE_ALPHA, GLES20.GL_UNSIGNED_BYTE, bufferUV); GLES20.GL_LUMINANCE_ALPHA, GLES20.GL_UNSIGNED_BYTE, bufferUV); GLES20.glFlush(); } // nv21 rgba GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); // buffer 0 - GlSurfaceView, , GLES20.glViewport(0, 0, widthSurf, heightSurf); GLES20.glUseProgram(programNv21ToRgba); // int vPos = GLES20.glGetAttribLocation(programNv21ToRgba, "vPosition"); int vTex = GLES20.glGetAttribLocation(programNv21ToRgba, "vTexCoord"); GLES20.glEnableVertexAttribArray(vPos); GLES20.glEnableVertexAttribArray(vTex); int ufacing = GLES20.glGetUniformLocation(programNv21ToRgba, "u_facing"); GLES20.glUniform1i(ufacing, facing1 ? 1 : 0); GLES20.glUniform1f(GLES20.glGetUniformLocation(programNv21ToRgba, "cameraWidth"), mCameraWidth); GLES20.glUniform1f(GLES20.glGetUniformLocation(programNv21ToRgba, "cameraHeight"), mCameraHeight); GLES20.glUniform1f(GLES20.glGetUniformLocation(programNv21ToRgba, "previewWidth"), widthSurf); GLES20.glUniform1f(GLES20.glGetUniformLocation(programNv21ToRgba, "previewHeight"), heightSurf); ShaderEffectHelper.shaderEffect2dWholeScreen(new Point(0, 0), new Point(widthSurf, heightSurf), texNV21FromCamera[0], programNv21ToRgba, vPos, vTex, texNV21FromCamera[1]); }
public class ShaderEffectHelper { ... public static void shaderEffect2dWholeScreen(Point center, Point center2, int texIn, int programId, int poss, int texx, Integer texIn2) { GLES20.glUseProgram(programId); int uColorLocation = GLES20.glGetUniformLocation(programId, "u_Color"); GLES20.glUniform4f(uColorLocation, 0.0f, 0.0f, 1.0f, 1.0f); int uCenter = GLES20.glGetUniformLocation(programId, "uCenter"); GLES20.glUniform2f(uCenter, (float)center.x, (float)center.y); int uCenter2 = GLES20.glGetUniformLocation(programId, "uCenter2"); GLES20.glUniform2f(uCenter2, (float)center2.x, (float)center2.y); // FloatBuffer vertexData = convertArray(new float[]{ -1, -1, -1, 1, 1, -1, 1, 1 }); // FloatBuffer texData = convertArray(new float[] { 0, 0, 0, 1, 1, 0, 1, 1 }); GLES20.glVertexAttribPointer(poss, 2, GLES20.GL_FLOAT, false, 0, vertexData); GLES20.glVertexAttribPointer(texx, 2, GLES20.GL_FLOAT, false, 0, texData); GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texIn); GLES20.glUniform1i(GLES20.glGetUniformLocation(programId, "sTexture"), 0); if (texIn2 != null) { GLES20.glActiveTexture(GLES20.GL_TEXTURE1); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texIn2); GLES20.glUniform1i(GLES20.glGetUniformLocation(programId, "sTexture2"), 1); } GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); // , 4 , GLES20.glFlush(); } ...
attribute vec2 vPosition; attribute vec2 vTexCoord; varying vec2 texCoord; uniform mat4 uMVP; // for 2d triangles varying vec2 v_TexCoordinate; varying vec2 v_TexOrigCoordinate; // simple coomon 2d shader void main() { texCoord = vTexCoord; v_TexCoordinate = vTexCoord; v_TexOrigCoordinate = vec2(vPosition.x / 2.0 + 0.5, vPosition.y / 2.0 + 0.5); gl_Position = vec4 ( vPosition.x, vPosition.y, 0.0, 1.0 ); }
precision mediump float; uniform sampler2D sTexture; // y - texture uniform sampler2D sTexture2; //uv texture varying vec2 texCoord; uniform int u_facing; uniform float cameraWidth; uniform float cameraHeight; // remember, camera is rotated 90 degree uniform float previewWidth; uniform float previewHeight; const mat3 yuv2rgb = mat3( 1, 0, 1.2802, 1, -0.214821, -0.380589, 1, 2.127982, 0 ); // shader from convert NV21 to RGBA void main() { vec2 coord = vec2(texCoord.y, texCoord.x); if (u_facing == 0) coord.x = 1.0 - coord.x; // centered pic by maximum size coord.y = 1.0 - coord.y; if (previewWidth / previewHeight > cameraHeight / cameraWidth) { coord.x = 0.5 - (0.5 - coord.x) * previewHeight * (cameraHeight / previewWidth) / cameraWidth;// (cameraHeight / cameraWidth) * (previewWidth / previewHeight); } else if (previewWidth / previewHeight < cameraHeight / cameraWidth) { coord.y = 0.5 - (0.5 - coord.y) * previewWidth * (cameraWidth / previewHeight) / cameraHeight; } float y = texture2D(sTexture, coord).r; float u = texture2D(sTexture2, coord).a; float v = texture2D(sTexture2, coord).r; vec4 color; // another way sligthly lighter // TODO find correct way of transfromation color.r = (1.164 * (y - 0.0625)) + (1.596 * (v - 0.5)); color.g = (1.164 * (y - 0.0625)) - (0.391 * (u - 0.5)) - (0.813 * (v - 0.5)); color.b = (1.164 * (y - 0.0625)) + (2.018 * (u - 0.5)); color.a = 1.0; vec3 yuv = vec3( 1.1643 * y - 0.0627, u - 0.5, v - 0.5 ); vec3 rgb = yuv * yuv2rgb; color = vec4(rgb, 1.0); gl_FragColor = color; }
Source: https://habr.com/ru/post/347140/
All Articles