[OpenGL]从零开始写一个Android平台下的全景视频播放器——2.2 使用GLSurfaceView和MediaPlayer播放一个平面视频(中)
Posted Red风信子
tags:
篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了[OpenGL]从零开始写一个Android平台下的全景视频播放器——2.2 使用GLSurfaceView和MediaPlayer播放一个平面视频(中)相关的知识,希望对你有一定的参考价值。
更新着色器代码
因为使用了,需要更新着色器代码 fragment_shader.glsl
#extension GL_OES_EGL_image_external : require
precision mediump float;
varying vec2 vTexCoord;
uniform samplerExternalOES sTexture;
void main()
gl_FragColor=texture2D(sTexture, vTexCoord);
precision mediump float;
varying vec2 vTexCoord;
uniform samplerExternalOES sTexture;
void main()
gl_FragColor=texture2D(sTexture, vTexCoord);
在这里,我们使用samplerExternalOES代替之前的sampler2D,需要注意的是第一行的注解是必须加上的
这个samplerExternalOES有什么用呢? 其实就是之前说的和surfaceTexture配合进行纹理更新和格式转换,具体的可以直接看OpenGL ES的官方文档,我截了一段供大家参考(估计也没什么人看。。):
vertex_shader.glsl
attribute vec4 aPosition;
attribute vec4 aTexCoord;
varying vec2 vTexCoord;
uniform mat4 uMatrix;
uniform mat4 uSTMatrix;
void main()
vTexCoord = (uSTMatrix * aTexCoord).xy;
gl_Position = uMatrix*aPosition;
vec4 aPosition;
attribute vec4 aTexCoord;
varying vec2 vTexCoord;
uniform mat4 uMatrix;
uniform mat4 uSTMatrix;
void main()
vTexCoord = (uSTMatrix * aTexCoord).xy;
gl_Position = uMatrix*aPosition;
在顶点着色器中,需要加入对应的uSTMatrix,并且aTexCoord要改成长度为4的向量,以便于做乘法操作。
更新GLRenderer
在GLRenderer中,用类似的方法获取uSTMMatrixHandle
uSTMMatrixHandle = GLES20.glGetUniformLocation(programId, "uSTMatrix");
GLES20.glGetUniformLocation(programId, "uSTMatrix");
每次绘制的时候,将mSTMatrix用类似的方法传给OpenGL:
GLES20.glUniformMatrix4fv(uSTMMatrixHandle, 1, false, mSTMatrix, 0);
1, false, mSTMatrix, 0);
因为用了扩展纹理,所以我们绑定的纹理类型也要做修改:
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,textureId);
我们刚才只是设定了一个boolean来表示可以更新纹理了,却没有具体操作,所以在onDrawFrame最开始加入如下代码
synchronized (this)
if (updateSurface)
surfaceTexture.updateTexImage();
surfaceTexture.getTransformMatrix(mSTMatrix);
updateSurface = false;
(this)
if (updateSurface)
surfaceTexture.updateTexImage();
surfaceTexture.getTransformMatrix(mSTMatrix);
updateSurface = false;
为了后续操作方便,我将绘制三角形的方式换成了绘制三角形带。
因为目前写的代码已经比较长了,我会在最后直接贴出GLRenderer的代码,如果遇到了什么问题,可以对照着看一下。
那我们来试一下吧,直接指定一个视频源,看看能不能正常显示(别忘了声明权限哦):
glSurfaceView.setRenderer(new GLRenderer(this, Environment.getExternalStorageDirectory().getPath()+"/360Video/video.mp4"));
new GLRenderer(this, Environment.getExternalStorageDirectory().getPath()+"/360Video/video.mp4"));
这是一个2:1的全景视频,可以看到全景视频被压扁了,而且画面也是倒过来的,如果按一下HOME键,会发现视频并不会自己停止播放,我们会在下一节来解决屏幕尺寸适应和播放器生命周期的问题。
GLRenderer.java
package com.martin.ads.panoramaopengltutorial;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.net.Uri;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import android.util.Log;
import android.view.Surface;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
/** * Created by Ads on 2016/11/13. */
public class GLRenderer implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener
private static final String TAG = "GLRenderer";
private Context context;
private int aPositionHandle;
private int programId;
private FloatBuffer vertexBuffer;
private final float[] vertexData =
1f,1f,0f,
-1f,1f,0f,
1f,-1f,0f,
-1f,-1f,0f
;
private final float[] projectionMatrix=new float[16];
private int uMatrixHandle;
private final float[] textureVertexData =
1f,0f,
0f,0f,
1f,1f,
0f,1f
;
private FloatBuffer textureVertexBuffer;
private int uTextureSamplerHandle;
private int aTextureCoordHandle;
private int textureId;
private SurfaceTexture surfaceTexture;
private MediaPlayer mediaPlayer;
private float[] mSTMatrix = new float[16];
private int uSTMMatrixHandle;
private boolean updateSurface;
private boolean playerPrepared;
private int screenWidth,screenHeight;
public GLRenderer(Context context,String videoPath)
this.context = context;
playerPrepared=false;
synchronized(this)
updateSurface = false;
vertexBuffer = ByteBuffer.allocateDirect(vertexData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(vertexData);
vertexBuffer.position(0);
textureVertexBuffer = ByteBuffer.allocateDirect(textureVertexData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(textureVertexData);
textureVertexBuffer.position(0);
mediaPlayer=new MediaPlayer();
try
mediaPlayer.setDataSource(context, Uri.parse(videoPath));
catch (IOException e)
e.printStackTrace();
mediaPlayer.setAudiostreamType(AudioManager.STREAM_MUSIC);
mediaPlayer.setLooping(true);
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config)
String vertexShader = ShaderUtils.readRawTextFile(context, R.raw.vertex_shader);
String fragmentShader= ShaderUtils.readRawTextFile(context, R.raw.fragment_shader);
programId=ShaderUtils.createProgram(vertexShader,fragmentShader);
aPositionHandle= GLES20.glGetAttribLocation(programId,"aPosition");
uMatrixHandle=GLES20.glGetUniformLocation(programId,"uMatrix");
uSTMMatrixHandle = GLES20.glGetUniformLocation(programId, "uSTMatrix");
uTextureSamplerHandle=GLES20.glGetUniformLocation(programId,"sTexture");
aTextureCoordHandle=GLES20.glGetAttribLocation(programId,"aTexCoord");
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
textureId = textures[0];
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
ShaderUtils.checkGlError("glBindTexture mTextureID");
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);
surfaceTexture = new SurfaceTexture(textureId);
surfaceTexture.setOnFrameAvailableListener(this);
Surface surface = new Surface(surfaceTexture);
mediaPlayer.setSurface(surface);
surface.release();
if (!playerPrepared)
try
mediaPlayer.prepare();
playerPrepared=true;
catch (IOException t)
Log.e(TAG, "media player prepare failed");
mediaPlayer.start();
playerPrepared=true;
@Override
public void onSurfaceChanged(GL10 gl, int width, int height)
screenWidth=width; screenHeight=height;
float ratio=width>height?
(float)width/height:
(float)height/width;
if (width>height)
Matrix.orthoM(projectionMatrix,0,-ratio,ratio,-1f,1f,-1f,1f);
else Matrix.orthoM(projectionMatrix,0,-1f,1f,-ratio,ratio,-1f,1f);
@Override
public void onDrawFrame(GL10 gl)
GLES20.glClear( GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
synchronized (this)
if (updateSurface)
surfaceTexture.updateTexImage();
surfaceTexture.getTransformMatrix(mSTMatrix);
updateSurface = false;
GLES20.glUseProgram(programId);
GLES20.glUniformMatrix4fv(uMatrixHandle,1,false,projectionMatrix,0);
GLES20.glUniformMatrix4fv(uSTMMatrixHandle, 1, false, mSTMatrix, 0);
vertexBuffer.position(0);
GLES20.glEnableVertexAttribArray(aPositionHandle);
GLES20.glVertexAttribPointer(aPositionHandle, 3, GLES20.GL_FLOAT, false,
12, vertexBuffer);
textureVertexBuffer.position(0);
GLES20.glEnableVertexAttribArray(aTextureCoordHandle);
GLES20.glVertexAttribPointer(aTextureCoordHandle,2,GLES20.GL_FLOAT,false,8,textureVertexBuffer);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,textureId);
GLES20.glUniform1i(uTextureSamplerHandle,0);
GLES20.glViewport(0,0,screenWidth,screenHeight);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
@Override
synchronized public void onFrameAvailable(SurfaceTexture surface)
updateSurface = true;
com.martin.ads.panoramaopengltutorial;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.net.Uri;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import android.util.Log;
import android.view.Surface;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
/** * Created by Ads on 2016/11/13. */
public class GLRenderer implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener
private static final String TAG = "GLRenderer";
private Context context;
private int aPositionHandle;
private int programId;
private FloatBuffer vertexBuffer;
private final float[] vertexData =
1f,1f,0f,
-1f,1f,0f,
1f,-1f,0f,
-1f,-1f,0f
;
private final float[] projectionMatrix=new float[16];
private int uMatrixHandle;
private final float[] textureVertexData =
1f,0f,
0f,0f,
1f,1f,
0f,1f
;
private FloatBuffer textureVertexBuffer;
private int uTextureSamplerHandle;
private int aTextureCoordHandle;
private int textureId;
private SurfaceTexture surfaceTexture;
private MediaPlayer mediaPlayer;
private float[] mSTMatrix = new float[16];
private int uSTMMatrixHandle;
private boolean updateSurface;
private boolean playerPrepared;
private int screenWidth,screenHeight;
public GLRenderer(Context context,String videoPath)
this.context = context;
playerPrepared=false;
synchronized(this)
updateSurface = false;
vertexBuffer = ByteBuffer.allocateDirect(vertexData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(vertexData);
vertexBuffer.position(0);
textureVertexBuffer = ByteBuffer.allocateDirect(textureVertexData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(textureVertexData);
textureVertexBuffer.position(0);
mediaPlayer=new MediaPlayer();
try
mediaPlayer.setDataSource(context, Uri.parse(videoPath));
catch (IOException e)
e.printStackTrace();
mediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
mediaPlayer.setLooping(true);
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config)
String vertexShader = ShaderUtils.readRawTextFile(context, R.raw.vertex_shader);
String fragmentShader= ShaderUtils.readRawTextFile(context, R.raw.fragment_shader);
programId=ShaderUtils.createProgram(vertexShader,fragmentShader);
aPositionHandle= GLES20.glGetAttribLocation(programId,"aPosition");
uMatrixHandle=GLES20.glGetUniformLocation(programId,"uMatrix");
uSTMMatrixHandle = GLES20.glGetUniformLocation(programId, "uSTMatrix");
uTextureSamplerHandle=GLES20.glGetUniformLocation(programId,"sTexture");
aTextureCoordHandle=GLES20.glGetAttribLocation(programId,"aTexCoord");
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
textureId = textures[0];
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
ShaderUtils.checkGlError("glBindTexture mTextureID");
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);
surfaceTexture = new SurfaceTexture(textureId);
surfaceTexture.setOnFrameAvailableListener(this);
Surface surface = new Surface(surfaceTexture);
mediaPlayer.setSurface(surface);
surface.release();
if (!playerPrepared)
try
mediaPlayer.prepare();
playerPrepared=true;
catch (IOException t)
Log.e(TAG, "media player prepare failed");
mediaPlayer.start();
playerPrepared=true;
@Override
public void onSurfaceChanged(GL10 gl, int width, int height)
screenWidth=width; screenHeight=height;
float ratio=width>height?
(float)width/height:
(float)height/width;
if (width>height)
Matrix.orthoM(projectionMatrix,0,-ratio,ratio,-1f,1f,-1f,1f);
else Matrix.orthoM(projectionMatrix,0,-1f,1f,-ratio,ratio,-1f,1f);
@Override
public void onDrawFrame(GL10 gl)
GLES20.glClear( GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
synchronized (this)
if (updateSurface)
surfaceTexture.updateTexImage();
surfaceTexture.getTransformMatrix(mSTMatrix);
updateSurface = false;
GLES20.glUseProgram(programId);
GLES20.glUniformMatrix4fv(uMatrixHandle,1,false,projectionMatrix,0);
GLES20.glUniformMatrix4fv(uSTMMatrixHandle, 1, false, mSTMatrix, 0);
vertexBuffer.position(0);
GLES20.glEnableVertexAttribArray(aPositionHandle);
GLES20.glVertexAttribPointer(aPositionHandle, 3, GLES20.GL_FLOAT, false,
12, vertexBuffer);
textureVertexBuffer.position(0);
GLES20.glEnableVertexAttribArray(aTextureCoordHandle);
GLES20.glVertexAttribPointer(aTextureCoordHandle,2,GLES20.GL_FLOAT,false,8,textureVertexBuffer);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,textureId);
GLES20.glUniform1i(uTextureSamplerHandle,0);
GLES20.glViewport(0,0,screenWidth,screenHeight);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
@Override
synchronized public void onFrameAvailable(SurfaceTexture surface)
updateSurface = true;
以上是关于[OpenGL]从零开始写一个Android平台下的全景视频播放器——2.2 使用GLSurfaceView和MediaPlayer播放一个平面视频(中)的主要内容,如果未能解决你的问题,请参考以下文章