【OpenGL】Android端Camera与OpenGL融合显示(AR技术基础)

时间:2023-01-30 03:54:14

Android端Camera与OpenGL融合显示(AR技术基础)

【OpenGL】Android端Camera与OpenGL融合显示(AR技术基础)

import android.app.ActivityManager;
import android.content.Context;
import android.content.pm.ConfigurationInfo;
import android.graphics.PixelFormat;
import android.hardware.Camera;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.opengl.GLSurfaceView;
import android.util.Log;
import android.view.Display;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.Window;
import android.view.WindowManager;
import android.widget.FrameLayout;

import android.hardware.Camera.Parameters;
import android.provider.Settings.System;
import android.app.Activity;
import android.view.Display;
import android.view.Menu;
import android.view.Surface;

import android.view.SurfaceHolder.Callback;
import android.view.ViewGroup.LayoutParams;


import java.io.IOException;

public class MainActivity extends AppCompatActivity {

private final int CONTEXT_CLIENT_VERSION = 1;

private PreView mPreView;
private GLSurfaceView mGLSurfaceView;

FrameLayout frameLayout;

private int mWidth = 0;
private int mHeight = 0;
private FrontCamera mFrontCamera = new FrontCamera();
private SurfaceView mSurface;
private SurfaceHolder mHolder;
private Context mContext;

//private PreView preview;

GLSurfaceView surfaceView;
MyRendererPNG render;

int width,height;

@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mContext = this;

//mPreView = (PreView)findViewById(R.id.surfaceView);
//mGLSurfaceView = new GLSurfaceView ( this );

//requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
WindowManager wManager = (WindowManager)getSystemService(WINDOW_SERVICE);
Display display = wManager.getDefaultDisplay();
width = display.getWidth();
height = display.getHeight();
setContentView(R.layout.activity_main);
frameLayout = (FrameLayout)findViewById(R.id.frameView);

/**
* 先添加GLSurfaceView
*/
surfaceView = new GLSurfaceView(this);
surfaceView.setEGLConfigChooser(8, 8, 8, 8, 16, 0);
/**
* Set the desired PixelFormat of the surface. The default is OPAQUE. When working with a SurfaceView,
* this must be called from the same thread running the SurfaceView's window.
*/
surfaceView.getHolder().setFormat(PixelFormat.TRANSLUCENT);
render = new MyRendererPNG();
surfaceView.setRenderer(render);
frameLayout.addView(surfaceView, new LayoutParams(LayoutParams.FILL_PARENT,LayoutParams.FILL_PARENT));

/**
* 再添加SurfaceView图层
*/
mSurface = new SurfaceView(this);
mHolder = mSurface.getHolder();
mHolder.addCallback(new Callback()
{
@Override
public void surfaceDestroyed(SurfaceHolder holder)
{
mFrontCamera.stopCamera();
}

@Override
public void surfaceCreated(SurfaceHolder holder)
{
//初始化前置摄像头
mFrontCamera.openCam(holder ,mContext);
mFrontCamera.everyTime();
}

@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height)
{
mFrontCamera.pingMuChange();
}
});
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
frameLayout.addView(mSurface, new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT));

}

@Override
protected void onResume()
{
// Ideally a game should implement onResume() and onPause()
// to take appropriate action when the activity looses focus
super.onResume();
}


@Override
protected void onPause()
{
// Ideally a game should implement onResume() and onPause()
// to take appropriate action when the activity looses focus
super.onPause();
}
}

import android.app.Activity;
import android.content.Context;
import android.hardware.Camera;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;

import java.io.IOException;

public class FrontCamera{
static final String TAG = "CameraTag";
public Camera mCamera;
public byte[] mData = null;
public int w = 0;
public int h =0;
int mCurrentCamIndex = 0;
Context mContext;

//初始化相机
public Camera initCamera() {
int cameraCount = 0;
Camera cam = null;
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
cameraCount = Camera.getNumberOfCameras();

for (int camIdx = 0; camIdx < cameraCount; camIdx++) {
Camera.getCameraInfo(camIdx, cameraInfo);

//在这里打开的是前置摄像头,可修改打开后置OR前置
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
try {
Log.i(TAG, "初始化相机 CameraIndex " + camIdx);
cam = Camera.open(camIdx);
mCurrentCamIndex = camIdx;
} catch (RuntimeException e) {
Log.e(TAG, "Camera failed to open: " + e.getLocalizedMessage());
}
}
}
return cam;
}

/**
* 停止相机
*
* */
public void stopCamera() {
if (mCamera != null) {
mCamera.setPreviewCallback(null);
mCamera.stopPreview();
mCamera.release();
Log.i(TAG, "StopCamera: 停止预览并释放资源");
mCamera = null;
}
}

/**
* 旋转屏幕后自动适配(若只用到竖的,也可不要)
* 已经在manifests中让此Activity只能竖屏了
* @param activity 相机显示在的Activity
* @param cameraId 相机的ID
* @param camera 相机对象
*/
public static void setCameraDisplayOrientation(Activity activity, int cameraId, Camera camera)
{
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(cameraId, info);
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
int degrees = 0;
switch (rotation)
{
case Surface.ROTATION_0: degrees = 0; break;
case Surface.ROTATION_90: degrees = 90; break;
case Surface.ROTATION_180: degrees = 180; break;
case Surface.ROTATION_270: degrees = 270; break;
}
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT)
{
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
}
else
{
// back-facing
result = (info.orientation - degrees + 360) % 360;
}
camera.setDisplayOrientation(result);
}

public void everyTime()
{
mCamera.setPreviewCallback(new Camera.PreviewCallback() {
@Override
public void onPreviewFrame(byte[] data, Camera camera) {

mData = data;
Camera.Size size = camera.getParameters().getPreviewSize();
w = size.width;
h = size.height;

}
});
}

public void openCam(SurfaceHolder holder, Context mContext)
{
this.mContext = mContext;
if (mCamera == null)
{
mCamera = initCamera();
Log.i(TAG, "mCamera: " + mCamera.toString());
}

try {
//适配竖排固定角度
Log.i(TAG, "mCamera: " + mCamera.toString());
setCameraDisplayOrientation((Activity) mContext, mCurrentCamIndex, mCamera);
mCamera.setPreviewDisplay(holder);
Log.i(TAG, "开始预览");


} catch (IOException e) {
mCamera.release();
mCamera = null;
}
}

public void pingMuChange()
{
Camera.Parameters parameters = mCamera.getParameters();
parameters.setPreviewSize(320,240);
parameters.setPreviewFrameRate(15);
parameters.setSceneMode(Camera.Parameters.SCENE_MODE_NIGHT);
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
//mCamera.setParameters(parameters);
mCamera.startPreview();
}

}

import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;

import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

import android.content.Context;
import android.opengl.GLSurfaceView.Renderer;

public class MyRendererPNG implements Renderer
{
float[] verteices = new float[]
{
0.1f,0.6f,0.0f,
-0.3f,0.0f,0.0f,
0.3f,0.1f,0.0f
};
int[] colors = new int[]
{
65535,0,0,0,
0,65535,0,0,
0,0,65535,0
};

FloatBuffer vBuffer = MemUtil.makeFloatBuffer(verteices);
IntBuffer cBuffer = MemUtil.makeIntBuffer(colors);

public MyRendererPNG ()
{
}
@Override
public void onDrawFrame(GL10 gl)
{
gl.glClear(GL10.GL_COLOR_BUFFER_BIT|GL10.GL_DEPTH_BUFFER_BIT);
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_COLOR_ARRAY);
gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();
gl.glTranslatef(0.0f, 0.0f, -1.0f);
gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vBuffer);
gl.glColorPointer(4, GL10.GL_FIXED, 0, cBuffer);
gl.glDrawArrays(GL10.GL_TRIANGLES, 0, 3);
gl.glFinish();
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisableClientState(GL10.GL_COLOR_ARRAY);
}

@Override
public void onSurfaceChanged(GL10 gl, int width, int height)
{
gl.glViewport(0, 0, width, height);
gl.glMatrixMode(GL10.GL_PROJECTION);
gl.glLoadIdentity();
float ratio = (float)width/height;
gl.glFrustumf(-ratio, ratio, -1, 1, 1, 9);
}

@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config)
{
gl.glDisable(GL10.GL_DITHER);
gl.glHint(GL10.GL_PERSPECTIVE_CORRECTION_HINT, GL10.GL_FASTEST);
gl.glClearColor(0, 0, 0, 0);
gl.glShadeModel(GL10.GL_SMOOTH);
gl.glEnable(GL10.GL_DEPTH_TEST);
gl.glDepthFunc(GL10.GL_LEQUAL);
}

}
import java.nio.ByteBuffer;import java.nio.ByteOrder;import java.nio.FloatBuffer;import java.nio.IntBuffer;/** * Created by duan on 2017. */public class MemUtil {    public static FloatBuffer makeFloatBuffer(float[] arr){        ByteBuffer bb = ByteBuffer.allocateDirect(arr.length * 4);        bb.order(ByteOrder.nativeOrder());        FloatBuffer fb = bb.asFloatBuffer();        fb.put(arr);        fb.position(0);        return fb;    }    public static IntBuffer makeIntBuffer(int[] arr){        ByteBuffer bb = ByteBuffer.allocateDirect(arr.length * 4);        bb.order(ByteOrder.nativeOrder());        IntBuffer fb = bb.asIntBuffer();        fb.put(arr);        fb.position(0);        return fb;    }}