如何在Android用FFmpeg解码图像

Posted

tags:

篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了如何在Android用FFmpeg解码图像相关的知识,希望对你有一定的参考价值。

CC libavcodec/log2_tab.o
CC libavutil/log2_tab.o
CC libswresample/log2_tab.o
AR libavcodec/libavcodec.a
LD libavutil/libavutil.so.52
AR libavutil/libavutil.a
AR libswresample/libswresample.a
LD libavcodec/libavcodec.so.55
LD libswresample/libswresample.so.0
LD libswscale/libswscale.so.2
LD libavformat/libavformat.so.55
INSTALL libavformat/libavformat.a
INSTALL libavformat/libavformat.so
STRIP install-libavformat-shared
INSTALL libavcodec/libavcodec.a
INSTALL libavcodec/libavcodec.so
STRIP install-libavcodec-shared
INSTALL libswresample/libswresample.a
INSTALL libswresample/libswresample.so
STRIP install-libswresample-shared
INSTALL libswscale/libswscale.a
INSTALL libswscale/libswscale.so
STRIP install-libswscale-shared
INSTALL libavutil/libavutil.a
INSTALL libavutil/libavutil.so
STRIP install-libavutil-shared
INSTALL libavformat/avformat.h
INSTALL libavformat/avio.h
INSTALL libavformat/version.h
INSTALL libavformat/libavformat.pc
INSTALL libavcodec/avcodec.h
INSTALL libavcodec/avfft.h
INSTALL libavcodec/dxva2.h
INSTALL libavcodec/old_codec_ids.h
INSTALL libavcodec/vaapi.h
INSTALL libavcodec/vda.h
INSTALL libavcodec/vdpau.h
INSTALL libavcodec/version.h
INSTALL libavcodec/xvmc.h
INSTALL libavcodec/libavcodec.pc
INSTALL libswresample/swresample.h
INSTALL libswresample/version.h
INSTALL libswresample/libswresample.pc
INSTALL libswscale/swscale.h
INSTALL libswscale/version.h
INSTALL libswscale/libswscale.pc
INSTALL libavutil/adler32.h
INSTALL libavutil/aes.h
INSTALL libavutil/attributes.h
INSTALL libavutil/audio_fifo.h
INSTALL libavutil/audioconvert.h
INSTALL libavutil/avassert.h
INSTALL libavutil/avstring.h
INSTALL libavutil/avutil.h
INSTALL libavutil/base64.h
INSTALL libavutil/blowfish.h
INSTALL libavutil/bprint.h
INSTALL libavutil/bswap.h
INSTALL libavutil/buffer.h
INSTALL libavutil/channel_layout.h
INSTALL libavutil/common.h
INSTALL libavutil/cpu.h
INSTALL libavutil/crc.h
INSTALL libavutil/error.h
INSTALL libavutil/eval.h
INSTALL libavutil/fifo.h
INSTALL libavutil/file.h
INSTALL libavutil/frame.h
INSTALL libavutil/hmac.h
INSTALL libavutil/imgutils.h
INSTALL libavutil/intfloat.h
INSTALL libavutil/intfloat_readwrite.h
INSTALL libavutil/intreadwrite.h
INSTALL libavutil/lfg.h
INSTALL libavutil/log.h
INSTALL libavutil/mathematics.h
INSTALL libavutil/md5.h
INSTALL libavutil/mem.h
INSTALL libavutil/murmur3.h
INSTALL libavutil/dict.h
INSTALL libavutil/old_pix_fmts.h
INSTALL libavutil/opt.h
INSTALL libavutil/parseutils.h
INSTALL libavutil/pixdesc.h
INSTALL libavutil/pixfmt.h
INSTALL libavutil/random_seed.h
INSTALL libavutil/rational.h
INSTALL libavutil/ripemd.h
INSTALL libavutil/samplefmt.h
INSTALL libavutil/sha.h
INSTALL libavutil/sha512.h
INSTALL libavutil/time.h
INSTALL libavutil/timecode.h
INSTALL libavutil/timestamp.h
INSTALL libavutil/version.h
INSTALL libavutil/xtea.h
INSTALL libavutil/lzo.h
INSTALL libavutil/avconfig.h
INSTALL libavutil/libavutil.pc

link ffmpeg.

二、新建一个android工程,在工程目录下新建一个jni文件夹,在文件夹下新建一个ffmpeg文件夹,用来放ffmpeg相关的头文件。在ffmpeg文件夹下新建Android.mk文件用来预先加载ffmpeg动态库。Android.mk文件内容如下:

LOCAL_PATH := $(call my-dir)

include $(CLEAR_VARS)
LOCAL_MODULE := ffmpeg
LOCAL_SRC_FILES := /path/to/build/output/libffmpeg.so
include $(PREBUILT_SHARED_LIBRARY)

三、在jni下新建Android.mk文件和Application.mk两个文件用来指定编译的顺序和编译的平台以及对应的cpu指令集。

Application.mk

APP_ABI := armeabi
APP_PLATFORM := android-9
Android.mk

include $(call all-subdir-makefiles)
四、编写JNI文件,用来绑定java文件与.c文件的交互,文件内容如下:

/*
* ffmpeg_jni.c
*
* Created on: Sep 1, 2014
* Author: clarck
*/
#include <stdlib.h>
#include <string.h>
#include <stdio.h>
#include <jni.h>

#include "../include/ffmpeg_logger.h"
#include "../include/ffmpeg.h"

// 指定要注册的类,对应完整的java类名
#define JNIREG_CLASS "com/clarck/android/ffmpeg/MainActivity"

JNIEXPORT void JNICALL native_setDataSource(JNIEnv *env, jclass classzz, jstring path)
char *filepath = ffmpeg_jstringTostr(env, path);
ffmpeg_setDataSource(filepath);


//Java和JNI函数的绑定
static JNINativeMethod method_table[] =
"setDataSource", "(Ljava/lang/String;)V", native_setDataSource
;

//注册native方法到java中
static int registerNativeMethods(JNIEnv *env, const char *className,
JNINativeMethod *gMethods, int numMethods)
jclass clazz;
clazz = (*env)->FindClass(env, className);
if (clazz == NULL)
return JNI_FALSE;


if ((*env)->RegisterNatives(env, clazz, gMethods, numMethods) < 0)
return JNI_FALSE;


return JNI_TRUE;


//调用注册方法
int register_ndk_load(JNIEnv *env)
return registerNativeMethods(env, JNIREG_CLASS, method_table,
(int) (sizeof(method_table) / sizeof(method_table[0])));


JNIEXPORT jint JNI_OnLoad(JavaVM *vm, void *reserved)
JNIEnv *env = NULL;
jint result = -1;

if ((*vm)->GetEnv(vm, (void**)&env, JNI_VERSION_1_6) != JNI_OK)
return result;


register_ndk_load(env);

//返回JNI的版本
return JNI_VERSION_1_6;


五、编写ffmpeg调用函数,内容如下:

/*
* ffmpeg.c
*
* Created on: Sep 1, 2014
* Author: clarck
*/
#include <jni.h>
#include <android/native_window_jni.h>
#include "../include/ffmpeg.h"
#include "../include/ffmpeg_logger.h"
#include "../ffmpeg/include/libavcodec/avcodec.h"
#include "../ffmpeg/include/libavformat/avformat.h"
#include "../ffmpeg/include/libavutil/pixfmt.h"
#include "../ffmpeg/include/libswscale/swscale.h"

char* ffmpeg_jstringTostr(JNIEnv* env, jstring jstr)
char* pStr = NULL;

jclass jstrObj = (*env)->FindClass(env, "java/lang/String");
jstring encode = (*env)->NewStringUTF(env, "utf-8");
jmethodID methodId = (*env)->GetMethodID(env, jstrObj, "getBytes",
"(Ljava/lang/String;)[B");
jbyteArray byteArray = (jbyteArray) (*env)->CallObjectMethod(env, jstr,
methodId, encode);
jsize strLen = (*env)->GetArrayLength(env, byteArray);
jbyte *jBuf = (*env)->GetByteArrayElements(env, byteArray, JNI_FALSE);

if (jBuf > 0)
pStr = (char*) malloc(strLen + 1);

if (!pStr)
return NULL ;


memcpy(pStr, jBuf, strLen);

pStr[strLen] = 0;


(*env)->ReleaseByteArrayElements(env, byteArray, jBuf, 0);

return pStr;


void ffmpeg_setDataSource(char *file_path)
LOGI("ffmpeg_setDataSource:%s", file_path);

AVFormatContext *pFormatCtx;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
AVFrame *pFrame, *pFrameYUV;
AVPacket *packet;
uint8_t *out_buffer;

static struct SwsContext *img_convert_ctx;

int videoStream, i, numBytes;
int ret, got_picture;

av_register_all();
pFormatCtx = avformat_alloc_context();

if (avformat_open_input(&pFormatCtx, file_path, NULL, NULL) != 0)
LOGE("can‘t open the file. \n");
return;


if (avformat_find_stream_info(pFormatCtx, NULL) < 0)
LOGE("Could‘t find stream infomation.\n");
return;


videoStream = 1;
for (i = 0; i < pFormatCtx->nb_streams; i++)
if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
videoStream = i;



if (videoStream == -1)
LOGE("Didn‘t find a video stream.\n");
return;


pCodecCtx = pFormatCtx->streams[videoStream]->codec;
pCodec = avcodec_find_decoder(pCodecCtx->codec_id);

if (pCodec == NULL)
LOGE("Codec not found.\n");
return;


if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0)
LOGE("Could not open codec.\n");
return;


pFrame = av_frame_alloc();
pFrameYUV = av_frame_alloc();

numBytes = avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width,
pCodecCtx->height);
out_buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t));
avpicture_fill((AVPicture *) pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P,
pCodecCtx->width, pCodecCtx->height);

int y_size = pCodecCtx->width * pCodecCtx->height;

packet = (AVPacket *) malloc(sizeof(AVPacket));
av_new_packet(packet, y_size);

av_dump_format(pFormatCtx, 0, file_path, 0);

while (av_read_frame(pFormatCtx, packet) >= 0)
if (packet->stream_index == videoStream)
ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture,
packet);

LOGI("avcodec_decode_video2 ret:%d", ret);

if (ret < 0)
LOGE("decode error.\n");
return;


if (got_picture)
//TODO 此处可以将解码出来的图片保存起来。


av_free_packet(packet);


av_free(out_buffer);
av_free(pFrameYUV);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);


六、编写Android.mk用来编译相关的.c文件,内容如下:

LOCAL_PATH := $(call my-dir)

include $(CLEAR_VARS)

FFMPEG_PATH := ../ffmpeg
LOCAL_C_INCLUDES := $(LOCAL_PATH)/$(FFMPEG_PATH)/include

LOCAL_MODULE := ffmpeg_player
LOCAL_SRC_FILES += ffmpeg_jni.c
LOCAL_SRC_FILES += ffmpeg.c

LOCAL_SHARED_LIBRARIES := ffmpeg
LOCAL_LDLIBS := -llog

include $(BUILD_SHARED_LIBRARY)

七、编写java文件中相关执行调用方法
package com.clarck.android.ffmpeg;

import android.app.Activity;
import android.os.Bundle;

public class MainActivity extends Activity

@Override
protected void onCreate(Bundle savedInstanceState)
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);

setDataSource("/sdcard/a.mp4");


public native void setDataSource(String path);

static
System.loadLibrary("ffmpeg");
System.loadLibrary("ffmpeg_player");

参考技术A 创建一个VideoPicture结构体用来保存解码出来的图像。

/*
* SDL_Lesson.c
*
* Created on: Aug 12, 2014
* Author: clarck
*/
#include <jni.h>
#include <android/native_window_jni.h>
#include "SDL.h"
#include "SDL_thread.h"
#include "SDL_events.h"
#include "../include/logger.h"
#include "../ffmpeg/include/libavcodec/avcodec.h"
#include "../ffmpeg/include/libavformat/avformat.h"
#include "../ffmpeg/include/libavutil/pixfmt.h"
#include "../ffmpeg/include/libswscale/swscale.h"
#include "../ffmpeg/include/libswresample/swresample.h"

#define SDL_AUDIO_BUFFER_SIZE 1024

#define MAX_AUDIO_SIZE (5 * 16 * 1024)
#define MAX_VIDEO_SIZE (5 * 256 * 1024)

#define FF_ALLOC_EVENT (SDL_USEREVENT)
#define FF_REFRESH_EVENT (SDL_USEREVENT + 1)
#define FF_QUIT_EVENT (SDL_USEREVENT + 2)

#define VIDEO_PICTURE_QUEUE_SIZE 1
#define AVCODEC_MAX_AUDIO_FRAME_SIZE 192000 // 1 second of 48khz 32bit audio

typedef struct PacketQueue
AVPacketList *first_pkt, *last_pkt;
int nb_packets;
int size;
SDL_mutex *mutex;
SDL_cond *cond;
PacketQueue;

typedef struct VideoPicture
SDL_Window *screen;
SDL_Renderer *renderer;
SDL_Texture *bmp;

AVFrame* rawdata;
int width, height; /*source height & width*/
int allocated;
VideoPicture;

typedef struct VideoState
char filename[1024];
AVFormatContext *ic;
int videoStream, audiostream;
AVStream *audio_st;
AVFrame *audio_frame;
PacketQueue audioq;
unsigned int audio_buf_size;
unsigned int audio_buf_index;
AVPacket audio_pkt;
uint8_t *audio_pkt_data;
int audio_pkt_size;
uint8_t *audio_buf;
DECLARE_ALIGNED(16,uint8_t,audio_buf2) [AVCODEC_MAX_AUDIO_FRAME_SIZE * 4];
enum AVSampleFormat audio_src_fmt;
enum AVSampleFormat audio_tgt_fmt;
int audio_src_channels;
int audio_tgt_channels;
int64_t audio_src_channel_layout;
int64_t audio_tgt_channel_layout;
int audio_src_freq;
int audio_tgt_freq;
struct SwrContext *swr_ctx;

AVStream *video_st;
PacketQueue videoq;

VideoPicture pictq[VIDEO_PICTURE_QUEUE_SIZE];
int pictq_size, pictq_rindex, pictq_windex;
SDL_mutex *pictq_mutex;
SDL_cond *pictq_cond;

SDL_Thread *parse_tid;
SDL_Thread *audio_tid;
SDL_Thread *video_tid;

AVIOContext *io_ctx;
struct SwsContext *sws_ctx;

int quit;
VideoState;

VideoState *global_video_state;

使用 Audiotrack 在 Android 中播放 javacv-ffmpeg 解码的音频

【中文标题】使用 Audiotrack 在 Android 中播放 javacv-ffmpeg 解码的音频【英文标题】:Playing javacv-ffmpeg decoded audio in Android with Audiotrack 【发布时间】:2014-03-31 11:24:37 【问题描述】:

我正在开发 Android 应用程序,我需要在其中播放来自 Red5 服务器的 AAC 实时音频流。 我已经使用 javacv-ffmpeg 成功解码了音频流。 但我的问题是如何播放解码样本中的音频。 我已经尝试过以下方式

int len = avcodec.avcodec_decode_audio4( audio_c,  samples_frame,  got_frame,  pkt2);
if (len <= 0)
    this.pkt2.size(0);
 else 
    if (this.got_frame[0] != 0) 
            long pts = avutil.av_frame_get_best_effort_timestamp(samples_frame);
            int sample_format = samples_frame.format();
                   int planes = avutil.av_sample_fmt_is_planar(sample_format) != 0 ? samples_frame.channels() : 1;
                   int data_size = avutil.av_samples_get_buffer_size((IntPointer)null, audio_c.channels(), samples_frame.nb_samples(), audio_c.sample_fmt(), 1) / planes;

                   if ((samples_buf == null) || (samples_buf.length != planes)) 

                       samples_ptr = new BytePointer[planes];
                       samples_buf = new Buffer[planes];
                   
                           BytePointer ptemp = samples_frame.data(0);
               BytePointer[] temp_ptr = new BytePointer[1];
               temp_ptr[0] = ptemp.capacity(sample_size);
               ByteBuffer btemp = ptemp.asBuffer();
               byte[] buftemp = new byte[sample_size];
               btemp.get(buftemp, 0, buftemp.length);

                           play the buftemp[] with audiotrack.....
        

但是只听到扬声器发出噪音,是否需要对我们从decode_audio4(...) 得到的AVFrame 进行任何处理。 传入的音频流使用 AAC 编解码器正确编码。 任何帮助,建议表示赞赏。 提前致谢。

【问题讨论】:

我也面临同样的问题,.:( @Ichigo Kurosaki 你找到解决办法了吗???如果你找到了,请在这里分享 【参考方案1】:

您可以使用 FFmpegFrameGrabber 类来捕获流。并使用 FloatBuffer 类提取音频。这是一个java示例

public class PlayVideoAndAudio extends Application

    private static final Logger LOG = Logger.getLogger(JavaFxPlayVideoAndAudio.class.getName());
    private static final double SC16 = (double) 0x7FFF + 0.4999999999999999;

    private static volatile Thread playThread;

    public static void main(String[] args)
    
        launch(args);
    

    @Override
    public void start(Stage primaryStage) throws Exception
    
        String source = "rtsp://184.72.239.149/vod/mp4:BigBuckBunny_115k.mov";

        StackPane root = new StackPane();
        ImageView imageView = new ImageView();

        root.getChildren().add(imageView);
        imageView.fitWidthProperty().bind(primaryStage.widthProperty());
        imageView.fitHeightProperty().bind(primaryStage.heightProperty());

        Scene scene = new Scene(root, 640, 480);

        primaryStage.setTitle("Video + audio");
        primaryStage.setScene(scene);
        primaryStage.show();

        playThread = new Thread(() -> 
            try 
                FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(source);
                grabber.start();
                primaryStage.setWidth(grabber.getImageWidth());
                primaryStage.setHeight(grabber.getImageHeight());
                AudioFormat audioFormat = new AudioFormat(grabber.getSampleRate(), 16, grabber.getAudioChannels(), true, true);

                DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat);
                SourceDataLine soundLine = (SourceDataLine) AudioSystem.getLine(info);
                soundLine.open(audioFormat);
                soundLine.start();

                Java2DFrameConverter converter = new Java2DFrameConverter();

                ExecutorService executor = Executors.newSingleThreadExecutor();

                while (!Thread.interrupted()) 
                    Frame frame = grabber.grab();
                    if (frame == null) 
                        break;
                    
                    if (frame.image != null) 
                        Image image = SwingFXUtils.toFXImage(converter.convert(frame), null);
                        Platform.runLater(() -> 
                            imageView.setImage(image);
                        );
                     else if (frame.samples != null) 
                        FloatBuffer channelSamplesFloatBuffer = (FloatBuffer) frame.samples[0];
                        channelSamplesFloatBuffer.rewind();

                        ByteBuffer outBuffer = ByteBuffer.allocate(channelSamplesFloatBuffer.capacity() * 2);

                        for (int i = 0; i < channelSamplesFloatBuffer.capacity(); i++) 
                            short val = (short)((double) channelSamplesFloatBuffer.get(i) * SC16);
                            outBuffer.putShort(val);
                        

                        /**
                         * We need this because soundLine.write ignores
                         * interruptions during writing.
                         */
                        try 
                            executor.submit(() -> 
                                soundLine.write(outBuffer.array(), 0, outBuffer.capacity());
                                outBuffer.clear();
                            ).get();
                         catch (InterruptedException interruptedException) 
                            Thread.currentThread().interrupt();
                        
                    
                
                executor.shutdownNow();
                executor.awaitTermination(10, TimeUnit.SECONDS);
                soundLine.stop();
                grabber.stop();
                grabber.release();
                Platform.exit();
             catch (Exception exception) 
                LOG.log(Level.SEVERE, null, exception);
                System.exit(1);
            
        );
        playThread.start();
    

    @Override
    public void stop() throws Exception
    
        playThread.interrupt();
    

【讨论】:

【参考方案2】:

因为你得到的buftemp[]的数据是这个AV_SAMPLE_FMT_FLTP格式的,你必须用SwrContext把它改成AV_SAMPLE_FMT_S16格式,然后你的问题就解决了。

【讨论】:

以上是关于如何在Android用FFmpeg解码图像的主要内容,如果未能解决你的问题,请参考以下文章

如何在 Android 中通过 FFmpeg 解码音频

一文读懂 Android FFmpeg 视频解码过程与实战分析

FFMPEG 视频图像解封装解码

使用 Audiotrack 在 Android 中播放 javacv-ffmpeg 解码的音频

Android音乐播放器-使用FFmpeg及OpenSLES

Android音乐播放器-使用FFmpeg及OpenSLES