android ffmpeg+opensl 音訊解碼播放、暫停、進度seek、時間、上/下一首
類似文章太多,但是大多程式碼都有記憶體溢位的問題,而且都缺少c層呼叫java層的例子,實際上有了參考博文後,還是有很多坑需要自己填。不過,看了很多博主和帖子後還是能夠解決一些問題,但是有些問題,根本找不到,所以我把音訊解碼播放還有控制部分做了比較詳細的例子。
ffmpeg的編譯請參考我之前的文章,有編好的庫檔案在我的下載資源裡,github的demo裡也有。
opensl庫的引用也很簡單,cmak里加入opensl就可以了,像這樣
target_link_libraries( native-lib
android
ffmpeg
OpenSLES
${log -lib} )
opensl使用步驟大概分三步
- 建立OpenSLES引擎
//建立OpenSLES引擎
extern "C"
void createEngine() {
SLresult result;
//建立引擎
result = slCreateEngine(&engineObject, 0, NULL, 0, NULL, NULL);
assert(SL_RESULT_SUCCESS == result);
(void) result;
//關聯引擎
result = (*engineObject)->Realize(engineObject, SL_BOOLEAN_FALSE);
assert(SL_RESULT_SUCCESS == result);
(void ) result;
//獲取引擎介面, which is needed in order to create other objects
result = (*engineObject)->GetInterface(engineObject, SL_IID_ENGINE, &engineEngine);
assert(SL_RESULT_SUCCESS == result);
(void) result;
//建立輸出混音器, with environmental reverb specified as a non-required interface
const SLInterfaceID ids[1] = {SL_IID_ENVIRONMENTALREVERB};
const SLboolean req[1] = {SL_BOOLEAN_FALSE};
result = (*engineEngine)->CreateOutputMix(engineEngine, &outputMixObject, 1, ids, req);
assert(SL_RESULT_SUCCESS == result);
(void) result;
//關聯輸出混音器
result = (*outputMixObject)->Realize(outputMixObject, SL_BOOLEAN_FALSE);
assert(SL_RESULT_SUCCESS == result);
(void) result;
// get the environmental reverb interface
// this could fail if the environmental reverb effect is not available,
// either because the feature is not present, excessive CPU load, or
// the required MODIFY_AUDIO_SETTINGS permission was not requested and granted
//獲取reverb介面
result = (*outputMixObject)->GetInterface(outputMixObject, SL_IID_ENVIRONMENTALREVERB,
&outputMixEnvironmentalReverb);
if (SL_RESULT_SUCCESS == result) {
result = (*outputMixEnvironmentalReverb)->SetEnvironmentalReverbProperties(
outputMixEnvironmentalReverb, &reverbSettings);
(void) result;
}
// ignore unsuccessful result codes for environmental reverb, as it is optional for this example
}
- 建立快取佇列和opensl播放器
// create buffer queue audio player
extern "C"
void createBufferQueueAudioPlayer(int sampleRate, int channel) {
SLresult result;
if (sampleRate >= 0) {
bqPlayerSampleRate = sampleRate * 1000;
}
//配置音訊源
SLDataLocator_AndroidSimpleBufferQueue loc_bufq = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, 2};
SLDataFormat_PCM format_pcm = {SL_DATAFORMAT_PCM, 1, SL_SAMPLINGRATE_8,
SL_PCMSAMPLEFORMAT_FIXED_16, SL_PCMSAMPLEFORMAT_FIXED_16,
SL_SPEAKER_FRONT_CENTER, SL_BYTEORDER_LITTLEENDIAN};
if (bqPlayerSampleRate) {
format_pcm.samplesPerSec = bqPlayerSampleRate; //sample rate in mili second
}
format_pcm.numChannels = (SLuint32) channel;
if (channel == 2) {
format_pcm.channelMask = SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT;
} else {
format_pcm.channelMask = SL_SPEAKER_FRONT_CENTER;
}
SLDataSource audioSrc = {&loc_bufq, &format_pcm};
//配置音訊池
SLDataLocator_OutputMix loc_outmix = {SL_DATALOCATOR_OUTPUTMIX, outputMixObject};
SLDataSink audioSnk = {&loc_outmix, NULL};
/*
* create audio player:
* fast audio does not support when SL_IID_EFFECTSEND is required, skip it
* for fast audio case
*/
const SLInterfaceID ids[3] = {SL_IID_BUFFERQUEUE, SL_IID_VOLUME, SL_IID_EFFECTSEND,
/*SL_IID_MUTESOLO,*/};
const SLboolean req[3] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE,
/*SL_BOOLEAN_TRUE,*/ };
//建立音訊播放器
result = (*engineEngine)->CreateAudioPlayer(engineEngine, &bqPlayerObject, &audioSrc, &audioSnk,
bqPlayerSampleRate ? 2 : 3, ids, req);
assert(SL_RESULT_SUCCESS == result);
(void) result;
// 關聯播放器
result = (*bqPlayerObject)->Realize(bqPlayerObject, SL_BOOLEAN_FALSE);
assert(SL_RESULT_SUCCESS == result);
(void) result;
// 獲取播放介面
result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_PLAY, &bqPlayerPlay);
assert(SL_RESULT_SUCCESS == result);
(void) result;
// 獲取緩衝佇列介面
result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_BUFFERQUEUE,
&bqPlayerBufferQueue);
assert(SL_RESULT_SUCCESS == result);
(void) result;
// 註冊緩衝佇列回撥
result = (*bqPlayerBufferQueue)->RegisterCallback(bqPlayerBufferQueue, bqPlayerCallback, NULL);
assert(SL_RESULT_SUCCESS == result);
(void) result;
// 獲取音效介面
bqPlayerEffectSend = NULL;
if (0 == bqPlayerSampleRate) {
result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_EFFECTSEND,
&bqPlayerEffectSend);
assert(SL_RESULT_SUCCESS == result);
(void) result;
}
#if 0 // mute/solo is not supported for sources that are known to be mono, as this is
// get the mute/solo interface
result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_MUTESOLO, &bqPlayerMuteSolo);
assert(SL_RESULT_SUCCESS == result);
(void)result;
#endif
// 獲取音量介面
result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_VOLUME, &bqPlayerVolume);
assert(SL_RESULT_SUCCESS == result);
(void) result;
//註冊事件回撥
result = (*bqPlayerPlay)->RegisterCallback(bqPlayerPlay, playOverEvent, NULL);
assert(SL_RESULT_SUCCESS == result);
(void) result;
//設定播放結束回撥
result = (*bqPlayerPlay)->SetCallbackEventsMask(bqPlayerPlay, SL_PLAYEVENT_HEADATEND);
assert(SL_RESULT_SUCCESS == result);
(void) result;
// 開始播放音樂
result = (*bqPlayerPlay)->SetPlayState(bqPlayerPlay, SL_PLAYSTATE_PAUSED);
assert(SL_RESULT_SUCCESS == result);
(void) result;
}
- 實現資料回撥方法
void releaseResampleBuf(void) {
if (0 == bqPlayerSampleRate) {
/*
* we are not using fast path, so we were not creating buffers, nothing to do
*/
return;
}
free(resampleBuf);
resampleBuf = NULL;
}
// this callback handler is called every time a buffer finishes playing
extern "C"
void bqPlayerCallback(SLAndroidSimpleBufferQueueItf bq, void *context) {
assert(bq == bqPlayerBufferQueue);
assert(NULL == context);
// for streaming playback, replace this test by logic to find and fill the next buffer
if (getPCM() < 0) {//解碼音訊檔案
pthread_mutex_unlock(&audioEngineLock);
return;
}
if (NULL != nextBuffer && 0 != nextSize) {
SLresult result;
// enqueue another buffer
result = (*bqPlayerBufferQueue)->Enqueue(bqPlayerBufferQueue, nextBuffer, nextSize);
// the most likely other result is SL_RESULT_BUFFER_INSUFFICIENT,
// which for this code example would indicate a programming error
if (SL_RESULT_SUCCESS != result) {
pthread_mutex_unlock(&audioEngineLock);
}
(void) result;
} else {
releaseResampleBuf();
pthread_mutex_unlock(&audioEngineLock);
}
}
基本就這三步,在我理解裡,其實opensl就是一個播放器的api,我們這裡也就是簡單呼叫封裝好的方法而已。
播放音訊當然還需要資料,opensl本身可以讀assert、uri的音訊資料、pcm資料,我這裡因為是為了後面音視訊做鋪墊,所以加入了ffmpeg來解碼音訊資料得到pcm資料後,扔到opensl快取佇列就可以了。
具體的解碼過程如下:
extern "C"
//int createFFmpegAudioPlay(const char *file_name) {
int Java_com_lake_ndkaudiotest_MainActivity_play(JNIEnv *env, jobject thiz, jstring url) {
isEnd = false;
int i;
AVCodec *pCodec;
//讀取輸入的音訊檔案地址
const char *file_name = env->GetStringUTFChars(url, NULL);
LOGI("file_name:%s\n", file_name);
//初始化
av_register_all();
//分配一個AVFormatContext結構
pFormatCtx = avformat_alloc_context();
//開啟檔案
if (avformat_open_input(&pFormatCtx, file_name, NULL, NULL) != 0) {
LOGE("Couldn't open input stream.\n");
return -1;
}
//查詢檔案的流資訊
if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
LOGE("Couldn't find stream information.\n");
return -1;
}
//在流資訊中找到音訊流
audioindex = -1;
for (i = 0; i < pFormatCtx->nb_streams; i++) {
if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
audioindex = i;
break;
}
}
if (audioindex == -1) {
LOGE("Couldn't find a video stream.\n");
return -1;
}
iTotalSeconds = (int) pFormatCtx->duration / 1000000;
//獲取相應音訊流的解碼器
AVCodecParameters *pCodecPar = pFormatCtx->streams[audioindex]->codecpar;
pCodec = avcodec_find_decoder(pCodecPar->codec_id);
assert(pCodec != NULL);
pCodecCtx = avcodec_alloc_context3(pCodec);
// Copy context
if (avcodec_parameters_to_context(pCodecCtx, pCodecPar) != 0) {
fprintf(stderr, "Couldn't copy codec context");
return -1; // Error copying codec context
}
//開啟解碼器
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
LOGE("Couldn't open codec.\n");
return -1;
}
//分配一個幀指標,指向解碼後的原始幀
pFrame = av_frame_alloc();
//設定格式轉換
swr = swr_alloc();
swr = swr_alloc_set_opts(NULL,
pCodecCtx->channel_layout,
AV_SAMPLE_FMT_S16,
pCodecCtx->sample_rate,
pCodecCtx->channel_layout,
pCodecCtx->sample_fmt,
pCodecCtx->sample_rate,
0, NULL);
if (!swr || swr_init(swr) < 0) {
swr_free(&swr);
return -1;
}
swr_init(swr);
//分配輸入快取
int outputBufferSize = 8192;
outputBuffer = (uint8_t *) malloc(sizeof(uint8_t) * outputBufferSize);
// 建立播放引擎
createEngine();
// 建立緩衝佇列音訊播放器
createBufferQueueAudioPlayer(pCodecCtx->sample_rate, pCodecCtx->channels);
// 啟動音訊播放
bqPlayerCallback(bqPlayerBufferQueue, NULL);
return 0;
}
opensl獲取pcm資料方法
/**
* 讀取pcm資料
* @return
*/
int getPCM() {
while (av_read_frame(pFormatCtx, &packet) >= 0) {
if (packet.stream_index == audioindex) {
int ret = avcodec_send_packet(pCodecCtx, &packet);
timestamp = packet.pts * av_q2d(pFormatCtx->streams[audioindex]->time_base);
if (ret < 0 && ret != AVERROR(EAGAIN) && ret != AVERROR_EOF)
break;
ret = avcodec_receive_frame(pCodecCtx, pFrame);
if (ret < 0 && ret != AVERROR_EOF)
break;
//處理不同的格式
if (pCodecCtx->sample_fmt == AV_SAMPLE_FMT_S16P) {
nextSize = av_samples_get_buffer_size(pFrame->linesize, pCodecCtx->channels,
pCodecCtx->frame_size, pCodecCtx->sample_fmt,
1);
} else {
av_samples_get_buffer_size(&nextSize, pCodecCtx->channels, pCodecCtx->frame_size,
pCodecCtx->sample_fmt, 1);
}
// 音訊格式轉換
swr_convert(swr, &outputBuffer, pFrame->nb_samples,
(uint8_t const **) (pFrame->extended_data),
pFrame->nb_samples);
nextBuffer = outputBuffer;
av_packet_unref(&packet);
return 0;
}
av_packet_unref(&packet);//packet不用了一定要釋放掉記憶體,網上很多例子的記憶體溢位就是因為沒有釋放packet的記憶體。
}
LOGI("getPCM_shutdown");
return -1;
}
有了以上方法,例子參考,基本播放音訊是沒有問題了。其餘的問題都是看個人需求了,我這裡就把我的做法分享一下,由於本人c不是太好,雖然實現了,但是從架構上不清楚是不是這樣做合不合理,僅供大家參考。
我先說一下我的需求,c層音訊播放出來了,但是在java層,我想顯示時間,還有控制音訊的播放/暫停,以及seek拖動控制,播放完成自動跳下一首。
一開始,我試了一下opensl本身seek功能,結果獲取seek介面就失敗了,不得不從ffmpeg解碼層考慮seek功能,還好,找到了ffmpeg的seek功能,實現起來完全沒有問題,後面的視訊seek可以參考一下。
其實就是呼叫以下方法
//跳轉的位置(秒為單位)
int64_t seek_pos = (int64_t) (seekTime /av_q2d(pFormatCtx->streams[audioindex]->time_base));
//跳轉方法 音訊跳轉非常快,沒有延遲感
if (av_seek_frame(pFormatCtx, audioindex, seek_pos, AVSEEK_FLAG_BACKWARD) < 0) {
LOGE("%s, av_seek_frame() seek to %.3f failed!", __FUNCTION__,(double) seek_pos / AV_TIME_BASE);
return -2;
}
//清空buffer
avcodec_flush_buffers(pCodecCtx);
時間位置獲取也非常簡單,根據packet獲取就可以了
timestamp = packet.pts * av_q2d(pFormatCtx->streams[audioindex]->time_base);
然後java層來呼叫以下c層實現的seek方法就可以了
然後下來就時間的回撥,之前想直接在回撥函式裡去調java層的方法,但是回撥觸發到第二遍的時候,就報錯了,不知道是不是JNIEnv使用的關係,但是搞了半天就是不可以,沒辦法,只能自己起一個方法,用一個while迴圈去監聽我所需要回調的資料,然後在這個while裡去調java的方法就不會崩潰,所以想直接在opensl的回撥方法去呼叫java層方法感覺有點不科學,不知道能不能實現。
extern "C"
void Java_com_lake_ndkaudiotest_MainActivity_showtime(JNIEnv *env, jobject thiz) {
int seconds = -1;
int totalSeconds = -1;
bool end = false;
jclass jclazz = env->GetObjectClass(thiz);
jmethodID jmethodIDS = env->GetMethodID(jclazz, "showTime", "(I)V");
jmethodID jmethodIDT = env->GetMethodID(jclazz, "setToatalTime", "(I)V");
jmethodID jmethodIDE = env->GetMethodID(jclazz, "isPlayEnd", "(Z)V");
// make sure the asset audio player was created
// seek
while (true) {
if (timestamp != -1) {//告訴java 音訊播放當前時間
if (seconds != timestamp) {
seconds = timestamp;
env->CallVoidMethod(thiz, jmethodIDS, (jint) timestamp);
}
}
if (iTotalSeconds != -1) {//告訴java 音訊總時間
if (totalSeconds != iTotalSeconds) {
totalSeconds = iTotalSeconds;
env->CallVoidMethod(thiz, jmethodIDT, (jint) iTotalSeconds);
}
}
if (isEnd != end) {//告訴java層 音訊播放完成
end = isEnd;
env->CallVoidMethod(thiz, jmethodIDE, (jboolean) isEnd);
}
usleep(100000);//睡0.1秒 不然一直死迴圈非常佔用cpu資源
}
}
這裡利用了監聽全域性變數,只要一變化就呼叫java層方法。
java層根據傳過來的引數再進行其他處理。
java層程式碼:
package com.lake.ndkaudiotest;
import android.content.Context;
import android.graphics.Color;
import android.os.Environment;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.BaseAdapter;
import android.widget.Button;
import android.widget.ListView;
import android.widget.SeekBar;
import android.widget.TextView;
import java.io.File;
public class MainActivity extends AppCompatActivity implements View.OnClickListener {
private SeekBar mSeekBar;
private Thread timeThread;
private int mProgress;//播放進度
private ListView listview;
private TextView tVTime;//當前時間
private TextView tVName;//名稱
private TextView tTTime;//總時間
private int toTalTime;//總時間
private Button mBtnPlayOrPause;
private Button mBtnLast;
private Button mBtnNext;
private String inputurl;//檔案路徑
boolean isFirst = true;
private int curItem = 0;//當前序號
boolean playing = false;//播放狀態
private int length = 0;//列表長度
// Used to load the 'native-lib' library on application startup.
static {
System.loadLibrary("native-lib");
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
initView();
mSeekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
Log.e("lake", "onProgressChanged: " + progress);
mProgress = progress;
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
pause(true);
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
seek(mProgress);
}
});
}
/**
* 介面初始化
*/
private void initView() {
listview = findViewById(R.id.listview);
tVName = findViewById(R.id.filename);
tVTime = findViewById(R.id.showtime);
tTTime = findViewById(R.id.totaltime);
mSeekBar = findViewById(R.id.seekbar);
mBtnPlayOrPause = findViewById(R.id.playorpause);
mBtnLast = findViewById(R.id.last);
mBtnNext = findViewById(R.id.next);
mBtnPlayOrPause.setOnClickListener(this);
mBtnLast.setOnClickListener(this);
mBtnNext.setOnClickListener(this);
final String folderurl = Environment.getExternalStorageDirectory().getPath();
final File[] files = new File(folderurl + "/MyLocalPlayer").listFiles();
length = files.length;
final ListFileAdapter myListAdapter = new ListFileAdapter(this, files);
listview.setAdapter(myListAdapter);
listview.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
curItem = position;
myListAdapter.setSelectItem(position);
myListAdapter.notifyDataSetInvalidated();
inputurl = folderurl + "/MyLocalPlayer/" + files[position].getName();
tVName.setText(files[position].getName().substring(0,files[position].getName().length()-4));
if (!isFirst) {
stop();
mSeekBar.setProgress(0);
}
play(inputurl);
if (isFirst) {
isFirst = false;
timeThread = new Thread(new Runnable() {
@Override
public void run() {
showtime();
}
});
timeThread.start();
}
pause(!playing);
}
});
clickListItem(curItem);
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.playorpause://播放或者暫停
pause(playing);
mBtnPlayOrPause.setText(playing ? "Play" : "Pause");
playing = !playing;
break;
case R.id.last: {//上一首
int item = (curItem - 1) < 0 ? length-1 : curItem - 1;
clickListItem(item);
break;
}
case R.id.next: {//下一首
int item = (curItem + 1) >= length ? 0 : curItem + 1;
clickListItem(item);
break;
}
default:
break;
}
}
/**
* 點選列表
* @param position 第幾項
*/
public void clickListItem(int position){
AdapterView.OnItemClickListener onItemClickListener = listview.getOnItemClickListener();
if (onItemClickListener != null) {
onItemClickListener.onItemClick(listview, null, position, position);
listview.setSelection(position);
}
}
/**
* 關閉播放器
*/
public void shutdown() {
stop();
mSeekBar.setProgress(0);
play(inputurl);
}
/**
* 顯示實時進度時間
*
* @param time
*/
public void showTime(final int time) {
final String n = resetTimeInt(time / 3600) + ":" + resetTimeInt(time % 3600 / 60) + ":" + resetTimeInt(time % 60);
runOnUiThread(new Runnable() {
@Override
public void run() {
tVTime.setText(n);
mSeekBar.setProgress(time);
}
});
Log.e("lake", "showTime: " + n);
}
/**
* 設定總時間
*
* @param total
*/
public void setToatalTime(int total) {
toTalTime = total;
mSeekBar.setMax(total);
Log.e("lake", "toTalTime: " + toTalTime);
final String t = resetTimeInt(total / 3600) + ":" + resetTimeInt(total % 3600 / 60) + ":" + resetTimeInt(total % 60);
runOnUiThread(new Runnable() {
@Override
public void run() {
tTTime.setText(t);
}
});
}
/**
* 播放結束
*
* @param isEnd
*/
public void isPlayEnd(boolean isEnd) {
Log.e("lake", "isPlayEnd: " + isEnd);
if (isEnd) {
runOnUiThread(new Runnable() {
@Override
public void run() {
int item = (curItem + 1) >= length ? 0 : curItem + 1;
clickListItem(item);
}
});
}
}
public native void play(String url);
public native void stop();
public native void pause(boolean play);
public native void seek(int seekTime);
public native void showtime();
public String resetTimeInt(int time) {
if (time < 10) {
return "0" + time;
} else {
return time + "";
}
}
class ListFileAdapter extends BaseAdapter {
private Context context;
private File[] files;
public ListFileAdapter(Context context, File[] files) {
this.context = context;
this.files = files;
}
@Override
public int getCount() {
return files.length;
}
@Override
public Object getItem(int position) {
return files[position];
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
ViewHolder viewHolder = null;
if (convertView == null) {
viewHolder = new ViewHolder();
convertView = LayoutInflater.from(context).inflate(R.layout.list_item, null);
viewHolder.mTextView = (TextView) convertView.findViewById(R.id.filename);
convertView.setTag(viewHolder);
} else {
viewHolder = (ViewHolder) convertView.getTag();
}
viewHolder.mTextView.setText(files[position].getName());
if (position == selectItem) {
convertView.setBackgroundColor(Color.GRAY);
} else {
convertView.setBackgroundColor(Color.WHITE);
}
return convertView;
}
class ViewHolder {
TextView mTextView;
}
public void setSelectItem(int selectItem) {
this.selectItem = selectItem;
}
private int selectItem = -1;
}
}