android解码播放G711音频流该怎么做
Posted
tags:
篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了android解码播放G711音频流该怎么做相关的知识,希望对你有一定的参考价值。
这是很久的代码了,有点忘记了,给你参考下。public final static String TAG = "DefaultTalkBackSession";
TalkBackSession session;
CodecLibObj obj;
private BlockingQueue<byte[]> inputQueue = new LinkedBlockingQueue<byte[]>();
@Override
public void onCreate(Bundle savedInstanceState)
super.onCreate(savedInstanceState);
try
session = new DefaultTalkBackSession("192.168.78.65",15528);
obj = session.codecAbilityQuery();
if (obj != null)
session.startLiveListening(new TalkBackSession.OnAudioDataReceiveListener()
@Override
public void onAudioDataReceived(byte[] data)
Log.d(TAG, "收到音频数据:" + Utils.toHex(data));
try
inputQueue.put(data);
catch (InterruptedException e)
, obj, 1);
session.startCall(obj, 1);
ulaw_play();
ulaw_recored();
catch (Exception e)
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
protected void onDestroy()
super.onDestroy();
if (session != null)
if (obj != null)
session.stopLiveListening();
session.endCall();
((DefaultTalkBackSession) session).stop();
ulaw_stop();
private void ulaw_stop()
if(ulaw_decode_thread != null)
ulaw_decode_thread.interrupt();
ulawRunning = false;
private void ulaw_play()
ulawRunning = true;
(ulaw_decode_thread = new Thread(ulaw_decode)).start();
boolean ulawRunning = false;
Thread ulaw_decode_thread;
/**
* 解码线程
*/
Runnable ulaw_decode = new Runnable()
public void run()
try
Thread.sleep(200);
catch (InterruptedException e)
e.printStackTrace();
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_AUDIO);
int samp_rate = 8000 ;
int maxjitter = AudioTrack.getMinBufferSize(samp_rate,
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT);
AudioTrack track = new AudioTrack(AudioManager.STREAM_VOICE_CALL,samp_rate,AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT,
maxjitter, AudioTrack.MODE_STREAM);
track.play();
try
while(ulawRunning)
byte[] dataForDecode = new byte[0];
try
dataForDecode = inputQueue.take();
catch (InterruptedException e)
short[] audioData = new short [dataForDecode.length];
//decode
G711.ulaw2linear(dataForDecode, audioData, audioData.length);
Log.d(TAG, "audioData=" + Utils.toHex(audioData)) ;
//play
track.write(audioData, 0, audioData.length);
catch (Exception e)
e.printStackTrace();
finally
track.release();
track = null;
;
Runnable ulaw_encode = new Runnable()
public void run()
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_AUDIO);
AudioRecord record = getAudioRecord();
int frame_size = 160;
// int frame_rate = 8000/frame_size ;
// long frame_period = 1000 / frame_rate;
short [] audioData = new short [frame_size];
byte[] encodeData = new byte[frame_size];
int num = 0;
try
while(ulawRunning)
num = record.read(audioData, 0, frame_size);
if(num<=0) continue;
calc2(audioData,0,num);
//进行pcmu编码
G711.linear2ulaw(audioData, 0, encodeData, num);
//send
session.sendAudioData(encodeData);
catch (Exception e)
e.printStackTrace();
finally
record.release();
;
void calc2(short[] lin,int off,int len)
int i,j;
for (i = 0; i < len; i++)
j = lin[i+off];
lin[i+off] = (short)(j>>1);
private void ulaw_recored()
new Thread (ulaw_encode).start();
private AudioRecord getAudioRecord()
int samp_rate = 8000 ;
int min = AudioRecord.getMinBufferSize(samp_rate,
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT);
Log.e(TAG, "min buffer size:"+min);
AudioRecord record = null;
record = new AudioRecord(
MediaRecorder.Audiosource.MIC,//the recording source
samp_rate, //采样频率,一般为8000hz/s
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT,
min);
record.startRecording();
return record;
参考技术A 这是很久的代码了,有点忘记了,给你参考下。 public final static String TAG = "DefaultTalkBackSession"; TalkBackSession session; CodecLibObj obj; private BlockingQueue<byte[]> inputQueue = new LinkedBlockingQueue<byte[]>(); @Override public void onCreate(Bundle savedInstanceState) super.onCreate(savedInstanceState); try session = new DefaultTalkBackSession("192.168.78.65",15528); obj = session.codecAbilityQuery(); if (obj != null) session.startLiveListening(new TalkBackSession.OnAudioDataReceiveListener() @Override public void onAudioDataReceived(byte[] data) Log.d(TAG, "收到音频数据:" + Utils.toHex(data)); try inputQueue.put(data); catch (InterruptedException e) , obj, 1); session.startCall(obj, 1); ulaw_play(); ulaw_recored(); catch (Exception e) e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates. protected void onDestroy() super.onDestroy(); if (session != null) if (obj != null) session.stopLiveListening(); session.endCall(); ((DefaultTalkBackSession) session).stop(); ulaw_stop(); private void ulaw_stop() if(ulaw_decode_thread != null) ulaw_decode_thread.interrupt(); ulawRunning = false; private void ulaw_play() ulawRunning = true; (ulaw_decode_thread = new Thread(ulaw_decode)).start(); boolean ulawRunning = false; Thread ulaw_decode_thread; /** * 解码线程 */ Runnable ulaw_decode = new Runnable() public void run() try Thread.sleep(200); catch (InterruptedException e) e.printStackTrace(); android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_AUDIO); int samp_rate = 8000 ; int maxjitter = AudioTrack.getMinBufferSize(samp_rate, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT); AudioTrack track = new AudioTrack(AudioManager.STREAM_VOICE_CALL,samp_rate,AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, maxjitter, AudioTrack.MODE_STREAM); track.play(); try while(ulawRunning) byte[] dataForDecode = new byte[0]; try dataForDecode = inputQueue.take(); catch (InterruptedException e) short[] audioData = new short [dataForDecode.length]; //decode G711.ulaw2linear(dataForDecode, audioData, audioData.length); Log.d(TAG, "audioData=" + Utils.toHex(audioData)) ; //play track.write(audioData, 0, audioData.length); catch (Exception e) e.printStackTrace(); finally track.release(); track = null; ; Runnable ulaw_encode = new Runnable() public void run() android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_AUDIO); AudioRecord record = getAudioRecord(); int frame_size = 160;// int frame_rate = 8000/frame_size ;// long frame_period = 1000 / frame_rate; short [] audioData = new short [frame_size]; byte[] encodeData = new byte[frame_size]; int num = 0; try while(ulawRunning) num = record.read(audioData, 0, frame_size); if(num<=0) continue; calc2(audioData,0,num); //进行pcmu编码 G711.linear2ulaw(audioData, 0, encodeData, num); //send session.sendAudioData(encodeData); catch (Exception e) e.printStackTrace(); finally record.release(); ; void calc2(short[] lin,int off,int len) int i,j; for (i = 0; i < len; i++) j = lin[i+off]; lin[i+off] = (short)(j>>1); private void ulaw_recored() new Thread (ulaw_encode).start(); private AudioRecord getAudioRecord() int samp_rate = 8000 ; int min = AudioRecord.getMinBufferSize(samp_rate, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT); Log.e(TAG, "min buffer size:"+min); AudioRecord record = null; record = new AudioRecord( MediaRecorder.AudioSource.MIC,//the recording source samp_rate, //采样频率,一般为8000hz/s AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, min); record.startRecording(); return record;
(四)Android通过ffmpeg解码音频
参考技术A 音频解码与视频解码的流程大致相同,唯一的区别只有处理帧数据的时候,视频是像素转换并显示,而音频则是重采样并播放。所以基于这一点,在以后做架构的时候,可以将音频、视频这两部分,相同的逻辑放在共同的父类当中,自身子类则各自处理视频显示和声音播放等逻辑。
然后,就是将重采样后的数据,交给OpenSLES去处理。
OpenSL ES 全称为: Open Sound Library for Embedded Systems,是一个针对嵌入式系统的开放硬件音频加速库,支持音频的采集和播放,它提供了一套高性能、低延迟的音频功能实现方法,并且实现了软硬件音频性能的跨平台部署,大大降低了上层处理音频应用的开发难度。
Object 和 Interface 是OpenSL ES 中的两大基本概念,可以类比为 Java 中的对象和接口。在 OpenSL ES 中, 每个 Object 可以存在一系列的 Interface ,并且为每个对象都提供了一系列的基本操作,如 Realize,GetState,Destroy 等。
重要的一点,只有通过 GetInterface 方法拿到 Object 的 Interface ,才能使用 Object 提供的功能。
这里的例子是播放一个手机里的视频文件,所以只介绍OpenSLES Audio Player 播放音频的过程。
音频播放的大致流程就是这样,其实还有音频录入的功能的,这个以后再介绍。音频的解码,大部分都和视频解码的流程一致,只要你熟悉OpenGLES的几个API和流程,基本都能播放成功。
以上是关于android解码播放G711音频流该怎么做的主要内容,如果未能解决你的问题,请参考以下文章