如何使用GPU Video-android获取Watermark并在视频上进行过滤?
Posted
tags:
篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了如何使用GPU Video-android获取Watermark并在视频上进行过滤?相关的知识,希望对你有一定的参考价值。
我正在研究视频编辑android应用程序,并希望在视频上同时应用过滤器和水印。我一直在为这个https://github.com/MasayukiSuda/GPUVideo-android使用GPUVideo-android lib
问题出在这个库中我无法同时获得水印和fliter。
下面是我的一个过滤器的FRAGMENT_SHADER
private static final String FRAGMENT_SHADER =
"precision mediump float;" +
"varying vec2 vTextureCoord;" +
"uniform lowp sampler2D sTexture;" +
"const highp vec3 weight = vec3(0.2125, 0.7154, 0.0721);" +
"void main() {" +
" vec4 FragColor = texture2D(sTexture, vTextureCoord);
" +
" gl_FragColor.r = dot(FragColor.rgb, vec3(.393, .769, .189));
" +
" gl_FragColor.g = dot(FragColor.rgb, vec3(.349, .686, .168));
" +
" gl_FragColor.b = dot(FragColor.rgb, vec3(.272, .534, .131));
" +
"}";
and below is for watermark
protected static final String DEFAULT_FRAGMENT_SHADER =
"precision mediump float;
" +
"varying vec2 vTextureCoord;
" +
"uniform lowp sampler2D sTexture;
" +
"uniform lowp sampler2D oTexture;
" +
"void main() {
" +
" lowp vec4 textureColor = texture2D(sTexture, vTextureCoord);
" +
" lowp vec4 textureColor2 = texture2D(oTexture, vTextureCoord);
" +
"
" +
" gl_FragColor = mix(textureColor, textureColor2, textureColor2.a);
" +
"}
";
我希望将两种效果结合在一起:
请帮助我,我尝试为下面扩展GLFilter类的过滤器创建一个自定义类是我的SepiaFilter和OverlayFilter的代码。
public class SepiaFilter extends OverlayFilter {
private Bitmap bitmap;
private Position position = Position.LEFT_TOP;
private static final String FRAGMENT_SHADER =
"precision mediump float;" +
"varying vec2 vTextureCoord;" +
"uniform lowp sampler2D sTexture;" +
"const highp vec3 weight = vec3(0.2125, 0.7154, 0.0721);" +
"void main() {" +
" vec4 FragColor = texture2D(sTexture, vTextureCoord);
" +
" gl_FragColor.r = dot(FragColor.rgb, vec3(.393, .769, .189));
" +
" gl_FragColor.g = dot(FragColor.rgb, vec3(.349, .686, .168));
" +
" gl_FragColor.b = dot(FragColor.rgb, vec3(.272, .534, .131));
" +
"}";
public SepiaFilter(Bitmap bitmap)
{
super(FRAGMENT_SHADER);
this.bitmap = bitmap;
}
public SepiaFilter(Bitmap bitmap, Position position)
{
super(FRAGMENT_SHADER);
this.bitmap = bitmap;
this.position = position;
}
@Override
protected void drawCanvas(Canvas canvas) {
if (bitmap != null && !bitmap.isRecycled()) {
switch (position) {
case LEFT_TOP:
canvas.drawBitmap(bitmap, 0, 0, null);
break;
case LEFT_BOTTOM:
canvas.drawBitmap(bitmap, 0, canvas.getHeight() - bitmap.getHeight(), null);
break;
case RIGHT_TOP:
canvas.drawBitmap(bitmap, canvas.getWidth() - bitmap.getWidth(), 0, null);
break;
case RIGHT_BOTTOM:
canvas.drawBitmap(bitmap, canvas.getWidth() - bitmap.getWidth(), canvas.getHeight() - bitmap.getHeight(), null);
break;
}
}
}
public enum Position {
LEFT_TOP,
LEFT_BOTTOM,
RIGHT_TOP,
RIGHT_BOTTOM
}
}
public abstract class OverlayFilter extends GlFilter {
private int[] textures = new int[1];
private Bitmap bitmap = null;
protected Size inputResolution = new Size(1280, 720);
public OverlayFilter(String FRAGMENT_SHADER)
{
super(DEFAULT_VERTEX_SHADER, FRAGMENT_SHADER);
}
public void setResolution(Size resolution) {
this.inputResolution = resolution;
}
@Override
public void setFrameSize(int width, int height) {
super.setFrameSize(width, height);
setResolution(new Size(width, height));
}
private void createBitmap() {
releaseBitmap(bitmap);
bitmap = Bitmap.createBitmap(inputResolution.getWidth(), inputResolution.getHeight(), Bitmap.Config.ARGB_8888);
}
@Override
public void setup() {
super.setup();// 1
GLES20.glGenTextures(1, textures, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
createBitmap();
}
@Override
public void onDraw() {
if (bitmap == null) {
createBitmap();
}
if (bitmap.getWidth() != inputResolution.getWidth() || bitmap.getHeight() != inputResolution.getHeight()) {
createBitmap();
}
bitmap.eraseColor(Color.argb(0, 0, 0, 0));
Canvas bitmapCanvas = new Canvas(bitmap);
bitmapCanvas.scale(1, -1, bitmapCanvas.getWidth() / 2, bitmapCanvas.getHeight() / 2);
drawCanvas(bitmapCanvas);
int offsetDepthMapTextureUniform = getHandle("oTexture");// 3
GLES20.glActiveTexture(GLES20.GL_TEXTURE3);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
if (bitmap != null && !bitmap.isRecycled()) {
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, bitmap, 0);
}
GLES20.glUniform1i(offsetDepthMapTextureUniform, 3);
}
protected abstract void drawCanvas(Canvas canvas);
public static void releaseBitmap(Bitmap bitmap) {
if (bitmap != null && !bitmap.isRecycled()) {
bitmap.recycle();
bitmap = null;
}
}
}
// code to apply filter
Bitmap watermark = BitmapFactory.decodeResource(getResources(), R.drawable.shashankimg);
GPUPlayerView gpuPlayerView.setGlFilter( new SepiaFilter(watermark));
答案
使用GlFilterGroup类。写的用法是读我的。
不应该编辑着色器。
以上是关于如何使用GPU Video-android获取Watermark并在视频上进行过滤?的主要内容,如果未能解决你的问题,请参考以下文章
如何在多个 GPU 节点上获取分配给 SLURM 作业的 GPU ID?
如何使用 Python 和 Numba 获取 GPU 中的 CUDA 内核数量?
如何使用 C++ 从 macOS 的 Activity Monitor 应用程序获取 CPU、GPU 和 RAM 使用情况?