使用GLsurfaceview修改带效果的相机预览

Posted

tags:

篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了使用GLsurfaceview修改带效果的相机预览相关的知识,希望对你有一定的参考价值。

我创建了一个自定义相机应用程序,我使用GLsurfaceviewGLSurfaceview.Renderer创建相机预览。我已成功完成它,现在我正在尝试在相机预览上应用滤镜(棕褐色模式,空白和白色等)。如何在相机预览上应用效果。

正常观点enter image description here

过滤器应用enter image description here

这是我的GLSurfaceView.Renderer

public class MainRenderer implements GLSurfaceView.Renderer,SurfaceTexture.OnFrameAvailableListener{

private final String vss =
        "attribute vec2 vPosition;
" +
                "attribute vec2 vTexCoord;
" +
                "varying vec2 texCoord;
" +
                "void main() {
" +
                "  texCoord = vTexCoord;
" +
                "  gl_Position = vec4 ( vPosition.x, vPosition.y, 0.0, 1.0 );
" +
                "}";
private final String fss =
        "#extension GL_OES_EGL_image_external : require
" +
                "precision mediump float;
" +
                "uniform samplerExternalOES sTexture;
" +
                "varying vec2 texCoord;
" +
                "void main() {
" +
                "  gl_FragColor = texture2D(sTexture,texCoord);
" +
                "}";

private int[] hTex;
private FloatBuffer pVertex;
private FloatBuffer pTexCoord;
private int hProgram;

private Camera mCamera;
private SurfaceTexture mSTexture;

private boolean mUpdateST = false;

private CameraPreview mView;
Context mContext;
private String fileName;
private File sdRoot;
private String dir;
private ExifInterface exif;
private int orientation;
private android.hardware.Camera.PictureCallback pictureCallBack = new Camera.PictureCallback() {

    public void onPictureTaken(byte[] data, Camera camera) {

        fileName = "IMG_" + new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date()).toString() + ".jpg";
        File mkDir = new File(sdRoot, dir);
        mkDir.mkdirs();
        File pictureFile = new File(sdRoot, dir + fileName);
        try {
            FileOutputStream purge = new FileOutputStream(pictureFile);
            purge.write(data);
            purge.close();
        } catch (FileNotFoundException e) {
            Log.d("DG_DEBUG", "File not found: " + e.getMessage());
        } catch (IOException e) {
            Log.d("DG_DEBUG", "Error accessing file: " + e.getMessage());
        }
        // Adding Exif data for the orientation. For some strange reason the
        // ExifInterface class takes a string instead of a file.
        try {
            exif = new ExifInterface("/sdcard/" + dir + fileName);
            exif.setAttribute(ExifInterface.TAG_ORIENTATION, "" + orientation);
            exif.saveAttributes();
        } catch (IOException e) {
            e.printStackTrace();
        }
        mContext.sendBroadcast(new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE, Uri.fromFile(pictureFile)));

    }
};;

public MainRenderer(CameraPreview cameraPreview, Context context) {
    mView = cameraPreview;
    mContext = context;
    mCamera = getCameraInstance();
    float[] vtmp = { 1.0f, -1.0f, -1.0f, -1.0f, 1.0f, 1.0f, -1.0f, 1.0f };




    float[] ttmp = { 0.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 1.0f, 1.0f };
    pVertex = ByteBuffer.allocateDirect(8 * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
    pVertex.put ( vtmp );
    pVertex.position(0);
    pTexCoord = ByteBuffer.allocateDirect(8*4).order(ByteOrder.nativeOrder()).asFloatBuffer();
    pTexCoord.put ( ttmp );
    pTexCoord.position(0);
}
public void close()
{
    mUpdateST = false;
    mSTexture.release();
    mCamera.stopPreview();
    mCamera.release();
    mCamera = null;
    deleteTex();
}
public void takePicture(File file,String dir,int orientation){
    this.orientation = orientation;
    sdRoot = file;
    this.dir=dir;
    mCamera.takePicture(null,null,pictureCallBack);
}
private void initTex() {
    hTex = new int[1];
    GLES20.glGenTextures(1, hTex, 0);
    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, hTex[0]);
    GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
    GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
}

private void deleteTex() {
    GLES20.glDeleteTextures ( 1, hTex, 0 );
}
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
    initTex();
    mSTexture = new SurfaceTexture ( hTex[0] );
    mSTexture.setOnFrameAvailableListener(this);
    try {
        mCamera.setPreviewTexture(mSTexture);
    } catch ( IOException ioe ) {
    }
    catch (Exception e){
    }
    GLES20.glClearColor ( 1.0f, 1.0f, 0.0f, 1.0f );
    hProgram = loadShader ( vss, fss );
}

public Camera getCameraInstance() {
    Camera c = null;
    Camera.CameraInfo ci = new Camera.CameraInfo();
    try {
        for(int i=0;i<Camera.getNumberOfCameras();i++){
            Camera.getCameraInfo(i,ci);
            if(ci.facing== Camera.CameraInfo.CAMERA_FACING_FRONT)
                c=Camera.open(i);
        }
    } catch (Exception e) {
    }
    return c;

}
private static int loadShader ( String vss, String fss ) {
    int vshader = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
    GLES20.glShaderSource(vshader, vss);
    GLES20.glCompileShader(vshader);
    int[] compiled = new int[1];
    GLES20.glGetShaderiv(vshader, GLES20.GL_COMPILE_STATUS, compiled, 0);
    if (compiled[0] == 0) {
        Log.e("Shader", "Could not compile vshader");
        Log.v("Shader", "Could not compile vshader:"+GLES20.glGetShaderInfoLog(vshader));
        GLES20.glDeleteShader(vshader);
        vshader = 0;
    }

    int fshader = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
    GLES20.glShaderSource(fshader, fss);
    GLES20.glCompileShader(fshader);
    GLES20.glGetShaderiv(fshader, GLES20.GL_COMPILE_STATUS, compiled, 0);
    if (compiled[0] == 0) {
        Log.e("Shader", "Could not compile fshader");
        Log.v("Shader", "Could not compile fshader:"+GLES20.glGetShaderInfoLog(fshader));
        GLES20.glDeleteShader(fshader);
        fshader = 0;
    }

    int program = GLES20.glCreateProgram();
    GLES20.glAttachShader(program, vshader);
    GLES20.glAttachShader(program, fshader);
    GLES20.glLinkProgram(program);

    return program;
}

@Override
public void onSurfaceChanged ( GL10 unused, int width, int height ) {
    GLES20.glViewport( 0, 0, width, height );
    Camera.Parameters param = mCamera.getParameters();
    List<Camera.Size> psize = param.getSupportedPreviewSizes();
    if ( psize.size() > 0 ) {
        int i;
        for ( i = 0; i < psize.size(); i++ ) {
            if ( psize.get(i).width < width || psize.get(i).height < height )
                break;
        }
        if ( i > 0 )
            i--;
        param.setPreviewSize(psize.get(i).width, psize.get(i).height);
        //Log.i("mr","ssize: "+psize.get(i).width+", "+psize.get(i).height);
    }
    param.set("orientation", "portrait");
    mCamera.setParameters ( param );
    mCamera.startPreview();
}

@Override
public void onDrawFrame ( GL10 unused ) {
    GLES20.glClear( GLES20.GL_COLOR_BUFFER_BIT );

    synchronized(this) {
        if ( mUpdateST ) {
            mSTexture.updateTexImage();
            mUpdateST = false;
        }
    }

    GLES20.glUseProgram(hProgram);


    int ph = GLES20.glGetAttribLocation(hProgram, "vPosition");
    int tch = GLES20.glGetAttribLocation ( hProgram, "vTexCoord" );
    int th = GLES20.glGetUniformLocation ( hProgram, "sTexture" );

    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, hTex[0]);
    GLES20.glUniform1i(th, 0);

    GLES20.glVertexAttribPointer(ph, 2, GLES20.GL_FLOAT, false, 4*2, pVertex);
    GLES20.glVertexAttribPointer(tch, 2, GLES20.GL_FLOAT, false, 4*2, pTexCoord );
    GLES20.glEnableVertexAttribArray(ph);
    GLES20.glEnableVertexAttribArray(tch);

    GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
    GLES20.glFlush();
}

@Override
public synchronized void onFrameAvailable ( SurfaceTexture st ) {
    mUpdateST = true;
    mView.requestRender();
}

}

答案

在OpengGL中应用滤镜只是基本上对片段着色器中的gl_FragColor值执行一些操作。某些复杂的滤镜可能需要在顶点着色器中执行计算(尺寸,坐标)。

例如,如果要应用黑/白滤镜,请更改片段着色器,如下所示。

private final String fss =
    "#extension GL_OES_EGL_image_external : require
" +
            "precision mediump float;
" +
            "uniform samplerExternalOES sTexture;
" +
            "varying vec2 texCoord;
" +
            "void main() {
" +
            "  vec4 tc = texture2D(sTexture, texCoord);
" +
            "  float luminance = 0.3 * tc.r + 0.59 * tc.g + 0.11 * tc.b;
" +
            "  gl_FragColor = vec4(luminance, luminance, luminance, 1.0);
" +
            "}";

您可以在Android GPUImage中找到更多示例和过滤效果。

以上是关于使用GLsurfaceview修改带效果的相机预览的主要内容,如果未能解决你的问题,请参考以下文章

调整大小后 GLSurfaceView 相机预览被打乱

使用 OpenGL 2.0 API 在 GLSurfaceView 上将 Android 相机预览旋转 90 度

Android 绘制相机预览

Android OpenGL基础相机预览及滤镜

转玩转Android Camera开发:国内首发---使用GLSurfaceView预览Camera 基础拍照demo

如何使用实时贴纸制作自定义相机