:视频直播实现完整代码

Posted 薛萌

tags:

篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了:视频直播实现完整代码相关的知识,希望对你有一定的参考价值。


PushNative.class

package com.dongnaoedu.live.jni;

import com.dongnaoedu.live.listener.LiveStateChangeListener;

/**
 * 调用C代码进行编码与推流
 */
public class PushNative 

    public static final int CONNECT_FAILED = 101;
    public static final int INIT_FAILED = 102;

    LiveStateChangeListener liveStateChangeListener;

    /**
     * 接收Native层抛出的错误
     * @param code
     */
    public void throwNativeError(int code)
        if(liveStateChangeListener != null)
            liveStateChangeListener.onError(code);
        
    


    public native void startPush(String url);

    public native void stopPush();

    public native void release();

    /**
     * 设置视频参数
     * @param width
     * @param height
     * @param bitrate
     * @param fps
     */
    public native void setVideoOptions(int width, int height, int bitrate, int fps);

    /**
     * 设置音频参数
     * @param sampleRateInHz
     * @param channel
     */
    public native void setAudioOptions(int sampleRateInHz, int channel);


    /**
     * 发送视频数据
     * @param data
     */
    public native void fireVideo(byte[] data);

    /**
     * 发送音频数据
     * @param data
     * @param len
     */
    public native void fireAudio(byte[] data, int len);


    public void setLiveStateChangeListener(LiveStateChangeListener liveStateChangeListener) 
        this.liveStateChangeListener = liveStateChangeListener;
    

    public void removeLiveStateChangeListener()
        this.liveStateChangeListener = null;
    

    static
        System.loadLibrary("dn_live");
    

LiveStateChangeListener.class

package com.dongnaoedu.live.listener;

public interface LiveStateChangeListener 

    /**
     * 发送错误
     * @param code
     */
    void onError(int code);

AudioParam.class

package com.dongnaoedu.live.params;

public class AudioParam 

    // 采样率
    private int sampleRateInHz = 44100;
    // 声道个数
    private int channel = 1;

    public AudioParam() 
    

    public AudioParam(int sampleRateInHz, int channel) 
        super();
        this.sampleRateInHz = sampleRateInHz;
        this.channel = channel;
    

    public int getSampleRateInHz() 
        return sampleRateInHz;
    

    public void setSampleRateInHz(int sampleRateInHz) 
        this.sampleRateInHz = sampleRateInHz;
    

    public int getChannel() 
        return channel;
    

    public void setChannel(int channel) 
        this.channel = channel;
    


VideoParam.class

package com.dongnaoedu.live.params;

/**
 * 视频数据参数
 * 
 */
public class VideoParam 

    private int width;
    private int height;
    // 码率480kbps
    private int bitrate = 480000;
    // 帧频默认25帧/s
    private int fps = 25;
    private int cameraId;

    public VideoParam(int width, int height, int cameraId) 
        super();
        this.width = width;
        this.height = height;
        this.cameraId = cameraId;
    

    public int getWidth() 
        return width;
    

    public void setWidth(int width) 
        this.width = width;
    

    public int getHeight() 
        return height;
    

    public void setHeight(int height) 
        this.height = height;
    

    public int getCameraId() 
        return cameraId;
    

    public void setCameraId(int cameraId) 
        this.cameraId = cameraId;
    

    public int getBitrate() 
        return bitrate;
    

    public void setBitrate(int bitrate) 
        this.bitrate = bitrate;
    

    public int getFps() 
        return fps;
    

    public void setFps(int fps) 
        this.fps = fps;
    


AudioPusher.class

package com.dongnaoedu.live.pusher;

import com.dongnaoedu.live.jni.PushNative;
import com.dongnaoedu.live.params.AudioParam;

import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder.Audiosource;

public class AudioPusher extends Pusher

    private AudioParam audioParam;
    private AudioRecord audioRecord;
    private boolean isPushing = false;
    private int minBufferSize;
    private PushNative pushNative;

    public AudioPusher(AudioParam audioParam, PushNative pushNative) 
        this.audioParam = audioParam;
        this.pushNative = pushNative;

        int channelConfig = audioParam.getChannel() == 1 ? 
                AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO; 
        //最小缓冲区大小
        minBufferSize = AudioRecord.getMinBufferSize(audioParam.getSampleRateInHz(), channelConfig, AudioFormat.ENCODING_PCM_16BIT);
        audioRecord = new AudioRecord(AudioSource.MIC, 
                audioParam.getSampleRateInHz(), 
                channelConfig, 
                AudioFormat.ENCODING_PCM_16BIT, minBufferSize);
    


    @Override
    public void startPush() 
        isPushing = true;
        pushNative.setAudioOptions(audioParam.getSampleRateInHz(), audioParam.getChannel());
        //启动一个录音子线程
        new Thread(new AudioRecordTask()).start();
    

    @Override
    public void stopPush() 
        isPushing = false;
        audioRecord.stop();
    

    @Override
    public void release() 
        if(audioRecord != null)
            audioRecord.release();
            audioRecord = null;
        
    

    class AudioRecordTask implements Runnable

        @Override
        public void run() 
            //开始录音
            audioRecord.startRecording();

            while(isPushing)
                //通过AudioRecord不断读取音频数据
                byte[] buffer = new byte[minBufferSize];
                int len = audioRecord.read(buffer, 0, buffer.length);
                if(len > 0)
                    //传给Native代码,进行音频编码
                    pushNative.fireAudio(buffer, len);
                
            
        

    


LivePusher.class

package com.dongnaoedu.live.pusher;

import com.dongnaoedu.live.jni.PushNative;
import com.dongnaoedu.live.listener.LiveStateChangeListener;
import com.dongnaoedu.live.params.AudioParam;
import com.dongnaoedu.live.params.VideoParam;

import android.hardware.Camera.CameraInfo;
import android.view.SurfaceHolder;
import android.view.SurfaceHolder.Callback;

public class LivePusher implements Callback 

    private SurfaceHolder surfaceHolder;
    private VideoPusher videoPusher;
    private AudioPusher audioPusher;
    private PushNative pushNative;

    public LivePusher(SurfaceHolder surfaceHolder) 
        this.surfaceHolder = surfaceHolder;
        surfaceHolder.addCallback(this);
        prepare();
    

    /**
     * 预览准备
     */
    private void prepare() 
        pushNative = new PushNative();

        //实例化视频推流器
        VideoParam videoParam = new VideoParam(480, 320, CameraInfo.CAMERA_FACING_BACK);
        videoPusher = new VideoPusher(surfaceHolder,videoParam,pushNative);

        //实例化音频推流器
        AudioParam audioParam = new AudioParam();
        audioPusher = new AudioPusher(audioParam,pushNative);
    

    /**
     * 切换摄像头
     */
    public void switchCamera() 
        videoPusher.switchCamera();
    

    /**
     * 开始推流
     * @param url
     * @param liveStateChangeListener
     */
    public void startPush(String url,LiveStateChangeListener liveStateChangeListener) 
        videoPusher.startPush();
        audioPusher.startPush();
        pushNative.startPush(url);
        pushNative.setLiveStateChangeListener(liveStateChangeListener);
    


    /**
     * 停止推流
     */
    public void stopPush() 
        videoPusher.stopPush();
        audioPusher.stopPush();
        pushNative.stopPush();
        pushNative.removeLiveStateChangeListener();
    

    /**
     * 释放资源
     */
    private void release() 
        videoPusher.release();
        audioPusher.release();
        pushNative.release();
    

    @Override
    public void surfaceCreated(SurfaceHolder holder) 

    

    @Override
    public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) 

    

    @Override
    public void surfaceDestroyed(SurfaceHolder holder) 
        stopPush();
        release();
    


Pusher.class

package com.dongnaoedu.live.pusher;

public abstract class Pusher 

    public abstract void startPush();

    public abstract void stopPush();

    public abstract void release();


VideoPusher.class

package com.dongnaoedu.live.pusher;

import java.io.IOException;

import com.dongnaoedu.live.jni.PushNative;
import com.dongnaoedu.live.params.VideoParam;

import android.graphics.ImageFormat;
import android.hardware.Camera;
import android.hardware.Camera.CameraInfo;
import android.hardware.Camera.PreviewCallback;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceHolder.Callback;

public class VideoPusher extends Pusher implements Callback, PreviewCallback

    private SurfaceHolder surfaceHolder;
    private Camera mCamera;
    private VideoParam videoParams;
    private byte[] buffers;
    private boolean isPushing = false;
    private PushNative pushNative;

    public VideoPusher(SurfaceHolder surfaceHolder, VideoParam videoParams, PushNative pushNative) 
        this.surfaceHolder = surfaceHolder;
        this.videoParams = videoParams;
        this.pushNative = pushNative;
        surfaceHolder.addCallback(this);
    

    @Override
    public void startPush() 
        //设置视频参数
        pushNative.setVideoOptions(videoParams.getWidth(), 
                videoParams.getHeight(), videoParams.getBitrate(), videoParams.getFps());
        isPushing = true;
    

    @Override
    public void stopPush() 
        isPushing = false;
    

    @Override
    public void surfaceCreated(SurfaceHolder holder) 
        startPreview();
    

    @Override
    public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) 

    

    @Override
    public void surfaceDestroyed(SurfaceHolder holder) 

    

    @Override
    public void release() 
        stopPreview();
    


    /**
     * 切换摄像头
     */
    public void switchCamera() 
        if(videoParams.getCameraId() == CameraInfo.CAMERA_FACING_BACK)
            videoParams.setCameraId(CameraInfo.CAMERA_FACING_FRONT);
        else
            videoParams.setCameraId(CameraInfo.CAMERA_FACING_BACK);
        
        //重新预览
        stopPreview();
        startPreview();
    

    /**
     * 开始预览
     */
    private void startPreview() 
        try 
            //SurfaceView初始化完成,开始相机预览
            mCamera = Camera.open(videoParams.getCameraId());
            Camera.Parameters parameters = mCamera.getParameters();
            //设置相机参数
            parameters.setPreviewFormat(ImageFormat.NV21); //YUV 预览图像的像素格式
            parameters.setPreviewSize(videoParams.getWidth(), videoParams.getHeight()); //预览画面宽高
            mCamera.setParameters(parameters);
            //parameters.setPreviewFpsRange(videoParams.getFps()-1, videoParams.getFps());
            mCamera.setPreviewDisplay(surfaceHolder);
            //获取预览图像数据
            buffers = new byte[videoParams.getWidth() * videoParams.getHeight() * 4];
            mCamera.addCallbackBuffer(buffers);
            mCamera.setPreviewCallbackWithBuffer(this);
            mCamera.startPreview();
         catch (IOException e) 
            e.printStackTrace();
        
    

    /**
     * 停止预览
     */
    private void stopPreview() 
        if(mCamera != null)            
            mCamera.stopPreview();
            mCamera.release();
            mCamera = null;
        
    

    @Override
    public void onPreviewFrame(byte[] data, Camera camera) 
        if(mCamera != null)
            mCamera.addCallbackBuffer(buffers);
        

        if(isPushing)
            //回调函数中获取图像数据,然后给Native代码编码
            pushNative.fireVideo(data);
        
    



MainActivity.class

package com.dongnaoedu.live;

import com.dongnaoedu.live.jni.PushNative;
import com.dongnaoedu.live.listener.LiveStateChangeListener;
import com.dongnaoedu.live.pusher.LivePusher;

import android.app.Activity;
import android.os.Bundle;
import android.os.Handler;
import android.util.Log;
import android.view.SurfaceView;
import android.view.View;
import android.widget.Button;
import android.widget.Toast;

public class MainActivity extends Activity implements LiveStateChangeListener 

    static final String URL = "";
    private LivePusher live;

    private Handler handler = new Handler()
        public void handleMessage(android.os.Message msg) 
            switch (msg.what) 
            case PushNative.CONNECT_FAILED:
                Toast.makeText(MainActivity.this, "连接失败", Toast.LENGTH_SHORT).show();

                break;
            case PushNative.INIT_FAILED:
                Toast.makeText(MainActivity.this, "初始化失败", Toast.LENGTH_SHORT).show();
                break;  
            default:
                break;
            
        
    ;

    @Override
    protected void onCreate(Bundle savedInstanceState) 
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);
        SurfaceView surfaceView = (SurfaceView) findViewById(R.id.surface);
        //相机图像的预览
        live = new LivePusher(surfaceView.getHolder());
    

    /**
     * 开始直播
     * @param btn
     */
    public void mStartLive(View view) 
        Button btn = (Button)view;
        if(btn.getText().equals("开始直播"))
            live.startPush(URL,this);
            btn.setText("停止直播");
        else
            live.stopPush();
            btn.setText("开始直播");
        
    

    /**
     * 切换摄像头
     * @param btn
     */
    public void mSwitchCamera(View btn) 
        live.switchCamera();
    

    //改方法执行仍然在子线程中,发送消息到UI主线程
    @Override
    public void onError(int code) 
        handler.sendEmptyMessage(code);
    


activity_main.xml

<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
    xmlns:tools="http://schemas.android.com/tools"
    android:layout_width="match_parent"
    android:layout_height="match_parent" >

    <android.view.SurfaceView
        android:id="@+id/surface"
        android:layout_width="wrap_content"
        android:layout_height="wrap_content"
        android:layout_gravity="center" />

    <LinearLayout
        android:id="@+id/adcontainer"
        android:layout_width="match_parent"
        android:layout_height="wrap_content"
        android:layout_alignParentBottom="true"
        android:layout_alignParentLeft="true"
        android:orientation="horizontal" >

        <Button
            android:id="@+id/btn_push"
            android:layout_width="wrap_content"
            android:layout_height="match_parent"
            android:onClick="mStartLive"
            android:text="开始直播"/>

        <Button
            android:id="@+id/btn_camera_switch"
            android:layout_width="wrap_content"
            android:layout_height="match_parent"
            android:text="切换摄像头"
            android:onClick="mSwitchCamera"/>
    </LinearLayout>

</RelativeLayout>

看这些源码之前先阅读Android音视频学习第4章:视频直播实现之推送视频篇,再阅读第5章,最后看完整版源码

#include "com_dongnaoedu_live_jni_PushNative.h"

#include <android/log.h>
#include <android/native_window_jni.h>
#include <android/native_window.h>
#define LOGI(FORMAT,...) __android_log_print(ANDROID_LOG_INFO,"jason",FORMAT,##__VA_ARGS__)
#define LOGE(FORMAT,...) __android_log_print(ANDROID_LOG_ERROR,"jason",FORMAT,##__VA_ARGS__)

#include <pthread.h>
#include "queue.h"
#include "x264.h"
#include "rtmp.h"
#include "faac.h"

#ifndef TRUE
#define TRUE    1
#define FALSE   0
#endif

#define CONNECT_FAILED 101
#define INIT_FAILED 102

//x264编码输入图像YUV420P
x264_picture_t pic_in;
x264_picture_t pic_out;
//YUV个数
int y_len, u_len, v_len;
//x264编码处理器
x264_t *video_encode_handle;

unsigned int start_time;
//线程处理
pthread_mutex_t mutex;
pthread_cond_t cond;
//rtmp流媒体地址
char *rtmp_path;
//是否直播
int is_pushing = FALSE;
//faac音频编码处理器
faacEncHandle audio_encode_handle;

unsigned long nInputSamples; //输入的采样个数
unsigned long nMaxOutputBytes; //编码输出之后的字节数

jobject jobj_push_native; //Global ref
jclass jcls_push_native;
jmethodID jmid_throw_native_error;
JavaVM *javaVM;

/**
 * 添加AAC头信息
 */
void add_aac_sequence_header()
    //获取aac头信息的长度
    unsigned char *buf;
    unsigned long len; //长度
    faacEncGetDecoderSpecificInfo(audio_encode_handle,&buf,&len);
    int body_size = 2 + len;
    RTMPPacket *packet = malloc(sizeof(RTMPPacket));
    //RTMPPacket初始化
    RTMPPacket_Alloc(packet,body_size);
    RTMPPacket_Reset(packet);
    unsigned char * body = packet->m_body;
    //头信息配置
    /*AF 00 + AAC RAW data*/
    body[0] = 0xAF;//10 5 SoundFormat(4bits):10=AAC,SoundRate(2bits):3=44kHz,SoundSize(1bit):1=16-bit samples,SoundType(1bit):1=Stereo sound
    body[1] = 0x00;//AACPacketType:0表示AAC sequence header
    memcpy(&body[2], buf, len); /*spec_buf是AAC sequence header数据*/
    packet->m_packetType = RTMP_PACKET_TYPE_AUDIO;
    packet->m_nBodySize = body_size;
    packet->m_nChannel = 0x04;
    packet->m_hasAbsTimestamp = 0;
    packet->m_nTimeStamp = 0;
    packet->m_headerType = RTMP_PACKET_SIZE_MEDIUM;
    add_rtmp_packet(packet);
    free(buf);



/**
 * 添加AAC rtmp packet
 */
void add_aac_body(unsigned char *buf, int len)
    int body_size = 2 + len;
    RTMPPacket *packet = malloc(sizeof(RTMPPacket));
    //RTMPPacket初始化
    RTMPPacket_Alloc(packet,body_size);
    RTMPPacket_Reset(packet);
    unsigned char * body = packet->m_body;
    //头信息配置
    /*AF 00 + AAC RAW data*/
    body[0] = 0xAF;//10 5 SoundFormat(4bits):10=AAC,SoundRate(2bits):3=44kHz,SoundSize(1bit):1=16-bit samples,SoundType(1bit):1=Stereo sound
    body[1] = 0x01;//AACPacketType:1表示AAC raw
    memcpy(&body[2], buf, len); /*spec_buf是AAC raw数据*/
    packet->m_packetType = RTMP_PACKET_TYPE_AUDIO;
    packet->m_nBodySize = body_size;
    packet->m_nChannel = 0x04;
    packet->m_hasAbsTimestamp = 0;
    packet->m_headerType = RTMP_PACKET_SIZE_LARGE;
    packet->m_nTimeStamp = RTMP_GetTime() - start_time;
    add_rtmp_packet(packet);


//获取JavaVM
jint JNI_OnLoad(JavaVM* vm, void* reserved)
    javaVM = vm;
    return JNI_VERSION_1_4;


/**
 * 向Java层发送错误信息
 */
void throwNativeError(JNIEnv *env,int code)
    (*env)->CallVoidMethod(env,jobj_push_native,jmid_throw_native_error,code);


/**
 * 从队列中不断拉取RTMPPacket发送给流媒体服务器)
 */
void *push_thread(void * arg)
    JNIEnv* env;//获取当前线程JNIEnv
    (*javaVM)->AttachCurrentThread(javaVM,&env,NULL);

    //建立RTMP连接
    RTMP *rtmp = RTMP_Alloc();
    if(!rtmp)
        LOGE("rtmp初始化失败");
        goto end;
    
    RTMP_Init(rtmp);
    rtmp->Link.timeout = 5; //连接超时的时间
    //设置流媒体地址
    RTMP_SetupURL(rtmp,rtmp_path);
    //发布rtmp数据流
    RTMP_EnableWrite(rtmp);
    //建立连接
    if(!RTMP_Connect(rtmp,NULL))
        LOGE("%s","RTMP 连接失败");
        throwNativeError(env,CONNECT_FAILED);
        goto end;
    
    //计时
    start_time = RTMP_GetTime();
    if(!RTMP_ConnectStream(rtmp,0)) //连接流
        LOGE("%s","RTMP ConnectStream failed");
        throwNativeError(env,CONNECT_FAILED);
        goto end;
    
    is_pushing = TRUE;
    //发送AAC头信息
    add_aac_sequence_header();

    while(is_pushing)
        //发送
        pthread_mutex_lock(&mutex);
        pthread_cond_wait(&cond,&mutex);
        //取出队列中的RTMPPacket
        RTMPPacket *packet = queue_get_first();
        if(packet)
            queue_delete_first(); //移除
            packet->m_nInfoField2 = rtmp->m_stream_id; //RTMP协议,stream_id数据
            int i = RTMP_SendPacket(rtmp,packet,TRUE); //TRUE放入librtmp队列中,并不是立即发送
            if(!i)
                LOGE("RTMP 断开");
                RTMPPacket_Free(packet);
                pthread_mutex_unlock(&mutex);
                goto end;
            else
                LOGI("%s","rtmp send packet");
            
            RTMPPacket_Free(packet);
        

        pthread_mutex_unlock(&mutex);
    
end:
    LOGI("%s","释放资源");
    free(rtmp_path);
    RTMP_Close(rtmp);
    RTMP_Free(rtmp);
    (*javaVM)->DetachCurrentThread(javaVM);
    return 0;


JNIEXPORT void JNICALL Java_com_dongnaoedu_live_jni_PushNative_startPush
  (JNIEnv *env, jobject jobj, jstring url_jstr)
    //jobj(PushNative对象)
    jobj_push_native = (*env)->NewGlobalRef(env,jobj);

    jclass jcls_push_native_tmp = (*env)->GetObjectClass(env,jobj);
    jcls_push_native = (*env)->NewGlobalRef(env,jcls_push_native_tmp);
    if(jcls_push_native_tmp == NULL)
        LOGI("%s","NULL");
    else
        LOGI("%s","not NULL");
    
    //PushNative.throwNativeError
    jmid_throw_native_error = (*env)->GetMethodID(env,jcls_push_native_tmp,"throwNativeError","(I)V");

    //初始化的操作
    const char* url_cstr = (*env)->GetStringUTFChars(env,url_jstr,NULL);
    //复制url_cstr内容到rtmp_path
    rtmp_path = malloc(strlen(url_cstr) + 1);
    memset(rtmp_path,0,strlen(url_cstr) + 1);
    memcpy(rtmp_path,url_cstr,strlen(url_cstr));

    //初始化互斥锁与条件变量
    pthread_mutex_init(&mutex,NULL);
    pthread_cond_init(&cond,NULL);

    //创建队列
    create_queue();
    //启动消费者线程(从队列中不断拉取RTMPPacket发送给流媒体服务器)
    pthread_t push_thread_id;
    pthread_create(&push_thread_id, NULL,push_thread, NULL);

    (*env)->ReleaseStringUTFChars(env,url_jstr,url_cstr);



JNIEXPORT void JNICALL Java_com_dongnaoedu_live_jni_PushNative_stopPush
  (JNIEnv *env, jobject jobj)
    is_pushing = FALSE;



JNIEXPORT void JNICALL Java_com_dongnaoedu_live_jni_PushNative_release
  (JNIEnv *env, jobject jobj)
    (*env)->DeleteGlobalRef(env,jcls_push_native);
    (*env)->DeleteGlobalRef(env,jobj_push_native);
    (*env)->DeleteGlobalRef(env,jmid_throw_native_error);


/**
 * 设置视频参数
 */
JNIEXPORT void JNICALL Java_com_dongnaoedu_live_jni_PushNative_setVideoOptions
  (JNIEnv *env, jobject jobj, jint width, jint height, jint bitrate, jint fps)
    x264_param_t param;
    //x264_param_default_preset 设置
    x264_param_default_preset(&param,"ultrafast","zerolatency");
    //编码输入的像素格式YUV420P
    param.i_csp = X264_CSP_I420;
    param.i_width  = width;
    param.i_height = height;

    y_len = width * height;
    u_len = y_len / 4;
    v_len = u_len;

    //参数i_rc_method表示码率控制,CQP(恒定质量),CRF(恒定码率),ABR(平均码率)
    //恒定码率,会尽量控制在固定码率
    param.rc.i_rc_method = X264_RC_CRF;
    param.rc.i_bitrate = bitrate / 1000; //* 码率(比特率,单位Kbps)
    param.rc.i_vbv_max_bitrate = bitrate / 1000 * 1.2; //瞬时最大码率

    //码率控制不通过timebase和timestamp,而是fps
    param.b_vfr_input = 0;
    param.i_fps_num = fps; //* 帧率分子
    param.i_fps_den = 1; //* 帧率分母
    param.i_timebase_den = param.i_fps_num;
    param.i_timebase_num = param.i_fps_den;
    param.i_threads = 1;//并行编码线程数量,0默认为多线程

    //是否把SPS和PPS放入每一个关键帧
    //SPS Sequence Parameter Set 序列参数集,PPS Picture Parameter Set 图像参数集
    //为了提高图像的纠错能力
    param.b_repeat_headers = 1;
    //设置Level级别
    param.i_level_idc = 51;
    //设置Profile档次
    //baseline级别,没有B帧
    x264_param_apply_profile(&param,"baseline");

    //x264_picture_t(输入图像)初始化
    x264_picture_alloc(&pic_in, param.i_csp, param.i_width, param.i_height);
    pic_in.i_pts = 0;
    //打开编码器
    video_encode_handle = x264_encoder_open(&param);
    if(video_encode_handle)
        LOGI("打开视频编码器成功");
    else
        throwNativeError(env,INIT_FAILED);
    


/**
 * 音频编码器配置
 */
JNIEXPORT void JNICALL Java_com_dongnaoedu_live_jni_PushNative_setAudioOptions
  (JNIEnv *env, jobject jobj, jint sampleRateInHz, jint numChannels)
    audio_encode_handle = faacEncOpen(sampleRateInHz,numChannels,&nInputSamples,&nMaxOutputBytes);
    if(!audio_encode_handle)
        LOGE("音频编码器打开失败");
        return;
    
    //设置音频编码参数
    faacEncConfigurationPtr p_config = faacEncGetCurrentConfiguration(audio_encode_handle);
    p_config->mpegVersion = MPEG4;
    p_config->allowMidside = 1;
    p_config->aacObjectType = LOW;
    p_config->outputFormat = 0; //输出是否包含ADTS头
    p_config->useTns = 1; //时域噪音控制,大概就是消爆音
    p_config->useLfe = 0;
//  p_config->inputFormat = FAAC_INPUT_16BIT;
    p_config->quantqual = 100;
    p_config->bandWidth = 0; //频宽
    p_config->shortctl = SHORTCTL_NORMAL;

    if(!faacEncSetConfiguration(audio_encode_handle,p_config))
        LOGE("%s","音频编码器配置失败..");
        throwNativeError(env,INIT_FAILED);
        return;
    

    LOGI("%s","音频编码器配置成功");


/**
 * 加入RTMPPacket队列,等待发送线程发送
 */
void add_rtmp_packet(RTMPPacket *packet)
    pthread_mutex_lock(&mutex);
    if(is_pushing)
        queue_append_last(packet);
    
    pthread_cond_signal(&cond);
    pthread_mutex_unlock(&mutex);



/**
 * 发送h264 SPS与PPS参数集
 */
void add_264_sequence_header(unsigned char* pps,unsigned char* sps,int pps_len,int sps_len)
    int body_size = 16 + sps_len + pps_len; //按照H264标准配置SPS和PPS,共使用了16字节
    RTMPPacket *packet = malloc(sizeof(RTMPPacket));
    //RTMPPacket初始化
    RTMPPacket_Alloc(packet,body_size);
    RTMPPacket_Reset(packet);

    unsigned char * body = packet->m_body;
    int i = 0;
    //二进制表示:00010111
    body[i++] = 0x17;//VideoHeaderTag:FrameType(1=key frame)+CodecID(7=AVC)
    body[i++] = 0x00;//AVCPacketType = 0表示设置AVCDecoderConfigurationRecord
    //composition time 0x000000 24bit ?
    body[i++] = 0x00;
    body[i++] = 0x00;
    body[i++] = 0x00;

    /*AVCDecoderConfigurationRecord*/
    body[i++] = 0x01;//configurationVersion,版本为1
    body[i++] = sps[1];//AVCProfileIndication
    body[i++] = sps[2];//profile_compatibility
    body[i++] = sps[3];//AVCLevelIndication
    //?
    body[i++] = 0xFF;//lengthSizeMinusOne,H264 视频中 NALU的长度,计算方法是 1 + (lengthSizeMinusOne & 3),实际测试时发现总为FF,计算结果为4.

    /*sps*/
    body[i++] = 0xE1;//numOfSequenceParameterSets:SPS的个数,计算方法是 numOfSequenceParameterSets & 0x1F,实际测试时发现总为E1,计算结果为1.
    body[i++] = (sps_len >> 8) & 0xff;//sequenceParameterSetLength:SPS的长度
    body[i++] = sps_len & 0xff;//sequenceParameterSetNALUnits
    memcpy(&body[i], sps, sps_len);
    i += sps_len;

    /*pps*/
    body[i++] = 0x01;//numOfPictureParameterSets:PPS 的个数,计算方法是 numOfPictureParameterSets & 0x1F,实际测试时发现总为E1,计算结果为1.
    body[i++] = (pps_len >> 8) & 0xff;//pictureParameterSetLength:PPS的长度
    body[i++] = (pps_len) & 0xff;//PPS
    memcpy(&body[i], pps, pps_len);
    i += pps_len;

    //Message Type,RTMP_PACKET_TYPE_VIDEO:0x09
    packet->m_packetType = RTMP_PACKET_TYPE_VIDEO;
    //Payload Length
    packet->m_nBodySize = body_size;
    //Time Stamp:4字节
    //记录了每一个tag相对于第一个tag(File Header)的相对时间。
    //以毫秒为单位。而File Header的time stamp永远为0。
    packet->m_nTimeStamp = 0;
    packet->m_hasAbsTimestamp = 0;
    packet->m_nChannel = 0x04; //Channel ID,Audio和Vidio通道
    packet->m_headerType = RTMP_PACKET_SIZE_MEDIUM; //?
    //将RTMPPacket加入队列
    add_rtmp_packet(packet);



/**
 * 发送h264帧信息
 */
void add_264_body(unsigned char *buf ,int len)
    //去掉起始码(界定符)
    if(buf[2] == 0x00)  //00 00 00 01
        buf += 4;
        len -= 4;
    else if(buf[2] == 0x01) // 00 00 01
        buf += 3;
        len -= 3;
    
    int body_size = len + 9;
    RTMPPacket *packet = malloc(sizeof(RTMPPacket));
    RTMPPacket_Alloc(packet,body_size);

    unsigned char * body = packet->m_body;
    //当NAL头信息中,type(5位)等于5,说明这是关键帧NAL单元
    //buf[0] NAL Header与运算,获取type,根据type判断关键帧和普通帧
    //00000101 & 00011111(0x1f) = 00000101
    int type = buf[0] & 0x1f;
    //Inter Frame 帧间压缩
    body[0] = 0x27;//VideoHeaderTag:FrameType(2=Inter Frame)+CodecID(7=AVC)
    //IDR I帧图像
    if (type == NAL_SLICE_IDR) 
        body[0] = 0x17;//VideoHeaderTag:FrameType(1=key frame)+CodecID(7=AVC)
    
    //AVCPacketType = 1
    body[1] = 0x01; /*nal unit,NALUs(AVCPacketType == 1)*/
    body[2] = 0x00; //composition time 0x000000 24bit
    body[3] = 0x00;
    body[4] = 0x00;

    //写入NALU信息,右移8位,一个字节的读取?
    body[5] = (len >> 24) & 0xff;
    body[6] = (len >> 16) & 0xff;
    body[7] = (len >> 8) & 0xff;
    body[8] = (len) & 0xff;

    /*copy data*/
    memcpy(&body[9], buf, len);

    packet->m_hasAbsTimestamp = 0;
    packet->m_nBodySize = body_size;
    packet->m_packetType = RTMP_PACKET_TYPE_VIDEO;//当前packet的类型:Video
    packet->m_nChannel = 0x04;
    packet->m_headerType = RTMP_PACKET_SIZE_LARGE;
//  packet->m_nTimeStamp = -1;
    packet->m_nTimeStamp = RTMP_GetTime() - start_time;//记录了每一个tag相对于第一个tag(File Header)的相对时间
    add_rtmp_packet(packet);




/**
 * 将采集到视频数据进行编码
 */
JNIEXPORT void JNICALL Java_com_dongnaoedu_live_jni_PushNative_fireVideo
  (JNIEnv *env, jobject jobj, jbyteArray buffer)
    //视频数据转为YUV420P
    //NV21->YUV420P
    jbyte* nv21_buffer = (*env)->GetByteArrayElements(env,buffer,NULL);
    jbyte* u = pic_in.img.plane[1];
    jbyte* v = pic_in.img.plane[2];
    //nv21 4:2:0 Formats, 12 Bits per Pixel
    //nv21与yuv420p,y个数一致,uv位置对调
    //nv21转yuv420p  y = w*h,u/v=w*h/4
    //nv21 = yvu yuv420p=yuv y=y u=y+1+1 v=y&#

以上是关于:视频直播实现完整代码的主要内容,如果未能解决你的问题,请参考以下文章

数字图像处理帧差法与Kirsch边缘检测实现运动目标识别与分割

视频前处理:时域滤波MCTF技术学习

视频前处理:时域滤波MCTF技术学习

视频前处理:时域滤波MCTF技术学习

故障诊断分析基于matlab VMD内圈故障信号时域频域分析含Matlab源码 2344期

Matlab计算波形的总谐波失真--THD(附完整代码)