Android全格式播放器開發(第五節ffplay android移植)
第五節ffplay android移植
工程地址:http://code.google.com/p/kudou-player/
--------------------------------------------之前我們已經做了好了準備工作,現在還做ffplay的移植,這是最難的一關,也是最複雜的一關。這裡主要是要分為兩步:ffplay使用sdl-1.3移植和ffplay的android介面編寫除錯。
1. Ffplay sdl-1.3移植
android上能夠使用的sdl版本必須是sdl-1.3及以上,sdl-1.3的移植如下,主要修改視訊顯示介面,將SDL_DisplayYUVOverlay(vp->bmp, &rect) 改為SDL_CreateTexture()方式,主要是將YUV的顯示模式改為RGB的顯示模式。主要修改點如下:
變數定義修改
以前使用SDL_Surfaces顯示,新版使用SDL_Window作為顯示區域,新版支援多視窗顯示,SDL_Renderer是顯示程式,SDL_Texture為資料容器。
#ifdef SDL12
SDL_Overlay *bmp;
#else
AVFrame *pFrameRGB;
int numBytes;
uint8_t *buffer;
#endif
#ifdef SDL12
static SDL_Surface*screen = NULL;
#else
staticSDL_Window *window = NULL;
staticSDL_Renderer *renderer = NULL;
staticSDL_Texture *texture = NULL;
#endif
以上是變數定義部分,我們使用的是texture代替Overlay的方式
顯示建立修改
使用SDL_CreateRenderer來建立用於顯示的Render:
static int video_open(VideoState *is, intforce_set_video_mode)的修改:
#ifdef SDL12
if (screen && is->width ==screen->w && screen->w == w
&& is->height==screen->h && screen->h == h && !force_set_video_mode)
return 0;
screen = SDL_SetVideoMode(w, h, 0, flags);
if(!screen) {
fprintf(stderr, "SDL: could notset video mode - exiting\n");
do_exit(is);
}
if (!window_title)
window_title = input_filename;
SDL_WM_SetCaption(window_title,window_title);
is->width = screen->w;
is->height = screen->h;
#else
//SDL_SetVideoMode(w, h, 32, 0);
window =SDL_CreateWindow("MySDL", SDL_WINDOWPOS_CENTERED,
SDL_WINDOWPOS_CENTERED, w, h,SDL_WINDOW_SHOWN|SDL_WINDOW_OPENGL);
if (window == NULL ) {
fprintf(stderr, "SDL: could notset video window - exiting\n");
exit(1);
}
//renderer = SDL_CreateRenderer(window, -1,SDL_RENDERER_ACCELERATED);
renderer = SDL_CreateRenderer(window, -1,SDL_RENDERER_TARGETTEXTURE);
if (!renderer) {
fprintf(stderr, "Couldn't setcreate renderer: %s\n", SDL_GetError());
exit(1);
}
is->width = w;
is->height = h;
#endif
圖片記憶體空間分配修改
static void alloc_picture(AllocEventProps*event_props) 圖片獲取部分修改
#ifdef SDL12
if (vp->bmp)
SDL_FreeYUVOverlay(vp->bmp);
#else
if (vp->pFrameRGB) {
// we already have one make another,bigger/smaller
if (vp->pFrameRGB) {
av_free(vp->pFrameRGB);
vp->pFrameRGB = 0;
}
if (vp->buffer) {
av_free(vp->buffer);
vp->buffer = 0;
}
}
#endif
vp->width = frame->width;
vp->height = frame->height;
vp->pix_fmt = frame->format;
//video_open(event_props->is, 0);
#ifdef SDL12
vp->bmp =SDL_CreateYUVOverlay(vp->width, vp->height,
SDL_YV12_OVERLAY,
screen);
if (!vp->bmp ||vp->bmp->pitches[0] < vp->width) {
/* SDL allocates a buffer smaller thanrequested if the video
* overlay hardware is unable tosupport the requested size. */
fprintf(stderr, "Error: the videosystem does not support an image\n"
"size of %dx%d pixels. Tryusing -lowres or -vf \"scale=w:h\"\n"
"to reduce the imagesize.\n", vp->width, vp->height );
do_exit(is);
}
#else
vp->pFrameRGB = avcodec_alloc_frame();
vp->width =is->video_st->codec->width;
vp->height = is->video_st->codec->height;
vp->numBytes =avpicture_get_size(dst_fix_fmt, vp->width, vp->height);
vp->buffer = (uint8_t *)av_malloc(vp->numBytes * sizeof(uint8_t));
if (!vp->pFrameRGB || !vp->buffer) {
printf("can not get frame memory,exit\n");
}
avpicture_fill((AVPicture*)vp->pFrameRGB, vp->buffer, dst_fix_fmt,
vp->width, vp->height);
#endif
色彩轉換方面修改
在static int queue_picture(VideoState *is,…)函式中,之前版本使用的YUV顯示,Android只能顯示RGB資料,因此改用RGB顯示,主要使用的sws_scale(…)函式,此函式是一個非常耗時函式,對此函式的優化對畫面顯示和程式效率都有很大幫助。 sws_scale(…)的畫面顯示是沒有問題,但是顯示效率不夠優化,此內容在後面優化部分再說
#ifdef SDL12
if (vp->bmp) {
#else
if (vp->pFrameRGB) {
#endif
#ifdef SDL12
AVPicture pict = { {0}};
/* get a pointer on the bitmap */
SDL_LockYUVOverlay (vp->bmp);
pict.data[0] =vp->bmp->pixels[0];
pict.data[1] =vp->bmp->pixels[2];
pict.data[2] = vp->bmp->pixels[1];
SDL_CondWait(is->pictq_cond,is->pictq_mutex);
}
}
SDL_UnlockMutex(is->pictq_mutex);
if (is->videoq.abort_request)
return -1;
}
/* if the frame is not skipped, thendisplay it */
#ifdef SDL12
if (vp->bmp) {
#else
if (vp->pFrameRGB) {
#endif
#ifdef SDL12
AVPicture pict = { {0}};
/* get a pointer on the bitmap */
SDL_LockYUVOverlay (vp->bmp);
pict.data[0] = vp->bmp->pixels[0];
pict.data[1] =vp->bmp->pixels[2];
pict.data[2] =vp->bmp->pixels[1];
pict.linesize[0] =vp->bmp->pitches[0];
pict.linesize[1] =vp->bmp->pitches[2];
pict.linesize[2] =vp->bmp->pitches[1];
is->img_convert_ctx =sws_getCachedContext(is->img_convert_ctx,
vp->width, vp->height,vp->pix_fmt, vp->width, vp->height,
PIX_FMT_YUV420P, sws_flags,NULL, NULL, NULL);
if (is->img_convert_ctx == NULL) {
LOGV( "Cannot initialize theconversion context\n");
exit(1);
}
sws_scale(is->img_convert_ctx,src_frame->data, src_frame->linesize,
0, vp->height, pict.data,pict.linesize);
vp->sample_aspect_ratio =av_guess_sample_aspect_ratio(is->ic, is->video_st, src_frame);
/* update the bitmap content */
SDL_UnlockYUVOverlay(vp->bmp);
#else
if (is->img_convert_ctx == NULL ) {
is->img_convert_ctx =sws_getCachedContext(is->img_convert_ctx,
vp->width, vp->height,vp->pix_fmt, vp->width, vp->height,
dst_fix_fmt, sws_flags,NULL, NULL, NULL );
if (is->img_convert_ctx == NULL) {
LOGV( "Cannot initializethe conversion context!\n");
exit(1);
}
}
sws_scale(is->img_convert_ctx,src_frame->data, src_frame->linesize, 0,
is->video_st->codec->height, vp->pFrameRGB->data,
vp->pFrameRGB->linesize);
vp->sample_aspect_ratio =av_guess_sample_aspect_ratio(is->ic,
is->video_st, src_frame);
#endif
顯示部分修改
在static void video_image_display(VideoState *is)中,使用SDL_UpdateTexture(…)來填充顯示區域,使用SDL_RenderPresent(...)來將Rect中的資料重新整理到螢幕:
#ifdef SDL12
if (vp->bmp) {
#else
if (vp->pFrameRGB) {
#endif
if (vp->sample_aspect_ratio.num ==0)
aspect_ratio = 0;
else
aspect_ratio =av_q2d(vp->sample_aspect_ratio);
if (aspect_ratio <= 0.0)
aspect_ratio = 1.0;
aspect_ratio *= (float) vp->width /(float) vp->height;
/* XXX: we suppose the screen has a 1.0pixel ratio */
height = is->height;
width = ((int) rint(height *aspect_ratio)) & ~1;
if (width > is->width) {
width = is->width;
height = ((int) rint(width /aspect_ratio)) & ~1;
}
x= (is->width - width) / 2;
y = (is->height - height) / 2;
is->no_background = 0;
rect.x = is->xleft + x;
rect.y = is->ytop + y;
rect.w = FFMAX(width, 1);
rect.h = FFMAX(height, 1);
#ifdef SDL12
SDL_DisplayYUVOverlay(vp->bmp,&rect);
#else
if (NULL == texture) {
texture =SDL_CreateTexture(renderer, display_fix_fmt,
SDL_TEXTUREACCESS_STATIC,is->video_st->codec->width,
is->video_st->codec->height);
if (!texture) {
LOGV( "Couldn't set createtexture: %s\n", SDL_GetError());
exit(1);
}
SDL_SetTextureBlendMode(texture,SDL_BLENDMODE_BLEND );
//best scale mode
//SDL_SetTextureScaleMode(texture,SDL_TEXTURESCALEMODE_BEST);
}
SDL_RenderClear(renderer);
SDL_UpdateTexture(texture, NULL,vp->pFrameRGB->data[0],
vp->pFrameRGB->linesize[0]);
SDL_RenderCopy(renderer, texture, NULL,&rect);
g_current_duration = (int)get_master_clock(is) * 1000;
SDL_RenderPresent(renderer);
#endif
流關閉修改
static void stream_close(VideoState *is) 修改如下:
#ifdef SDL12
if (vp->bmp) {
SDL_FreeYUVOverlay(vp->bmp);
vp->bmp = NULL;
}
#else
if (vp->pFrameRGB) {
av_free(vp->pFrameRGB);
vp->pFrameRGB = 0;
}
if (vp->buffer) {
av_free(vp->buffer);
vp->buffer = 0;
}
#endif
以上是基本上是所以基於SDL-1.3的ffplay的修改。主要是將YUV(Overlay)的方式改為RGB(Texture)方式,SDL內部使用的OpenGL ES方式進行渲染。由於SDL已經封裝好了,我們這裡也就不去探究了。如果不想使用SDL作為顯示,可以直接顯示OpenGL ES介面進行,但是移植的代價就大些。
2. Android介面編寫
對於播放器介面的設計,我們可以參考Android的MediaPlayer設計(http://developer.android.com/reference/android/media/MediaPlayer.html)。播放器中,最關鍵的有Init(初始化)、InitEd(初始化完成,可以設定回撥函式)、Play(播放)、Stop(停止),Seek(拖動)、Exit(退出),getDuration(獲取播放位置)、getTotalDuration(獲取播放時長)。建立一個叫native.cpp的檔案,介面設計如下:
#include<unistd.h>
#include<jni.h>
#include<android/log.h>
extern"C" {
#include"play.h"
}
#ifdefANDROID
/* Includethe SDL main definition header */
/*******************************************************************************
Functions called by JNI
*******************************************************************************/
// Libraryinit
//extern"C" jint JNI_OnLoad(JavaVM* vm, void* reserved)
//{
// return JNI_VERSION_1_4;
//}
// Start upthe SDL app
extern"C" int Java_org_libsdl_app_SDLActivity_PlayerInit(JNIEnv* env, jobject obj)
{
return player_init();
}
extern"C" int Java_org_libsdl_app_SDLActivity_PlayerPrepare(JNIEnv*env, jobject obj, jstring jfileName)
{
jboolean isCopy;
char localFileName[1024];
const char *fileString = env->GetStringUTFChars(jfileName,&isCopy);
strncpy(localFileName, fileString,1024);
env->ReleaseStringUTFChars(jfileName, fileString);
return player_prepare(localFileName);
}
extern"C" int Java_org_libsdl_app_SDLActivity_PlayerMain(JNIEnv* env, jobject obj)
{
return player_main();
}
extern"C" int Java_org_libsdl_app_SDLActivity_PlayerExit(JNIEnv* env, jobject obj)
{
return player_exit();
}
extern"C" int Java_org_libsdl_app_SDLActivity_PlayerSeekTo(JNIEnv*env, jobject obj, jint msec)
{
int pos = msec;
return seekTo(pos);
}
extern"C" int Java_org_libsdl_app_SDLActivity_PlayerPause(JNIEnv* env, jobject obj)
{
return streamPause();
}
extern"C" int Java_org_libsdl_app_SDLActivity_PlayerIsPlay(JNIEnv*env, jobject obj)
{
return isPlay();
}
extern"C" int Java_org_libsdl_app_SDLActivity_PlayerGetDuration(JNIEnv*env, jobject obj)
{
return getDuration();
}
extern"C" intJava_org_libsdl_app_SDLActivity_PlayergetCurrentPosition(JNIEnv* env, jobject obj)
{
return getCurrentPosition();
}
#endif /*ANDROID */
3. Jni和Java通訊
Java呼叫Jni介面是比較簡單的,但是如果Jni中的程式想通知Java程式某事事件已經發生,就需要Jni和Java通訊了。Jni上也有C呼叫Java的例子,主要分為兩種情況:呼叫程序為Jvm程序或者為子程序,方法有點不同。
我們本次設計的通訊介面是採用類似訊號的方式,在C中呼叫Java中預先寫好的方法,然後觸發事件,C中函式如下:
開啟第四節中工程,找到jni/SDL/src/core/android/SDL_android.cpp
新增:
extern"C" void Android_Notify(int id){
int status;
JNIEnv *env;
static bool isAttached = false;
status = mJavaVM->GetEnv((void **)&env, JNI_VERSION_1_4);
if(status < 0) {
LOGE("callback_handler: failed toget JNI environment, assuming native thread");
status =mJavaVM->AttachCurrentThread(&env, NULL);
if(status < 0) {
LOGE("callback_handler: failedto attach current thread");
return;
}
isAttached = true;
}
env->CallStaticVoidMethod(mActivityClass,mNotify, id);
if (isAttached) {
mJavaVM->DetachCurrentThread();
}
}
將標頭檔案新增到
jni/SDL/include/SDL_notify.h中
如下:
#ifndefNOTIFY_H
#defineNOTIFY_H
voidAndroid_Notify(int id);
#endif
這樣就完成了
4. 編譯工程
修改jni/src/Android.mk如下:
LOCAL_PATH :=$(call my-dir)
include$(CLEAR_VARS)
LOCAL_MODULE:= main
SDL_PATH :=../SDL
FFMPEG_PATH =../ffmpeg
LOCAL_C_INCLUDES:= $(LOCAL_PATH)/$(SDL_PATH)/include \
$(LOCAL_PATH)/$(FFMPEG_PATH)/include\
$(LOCAL_PATH)/include
# Add your applicationsource files here...
LOCAL_SRC_FILES:= $(SDL_PATH)/src/main/android/SDL_android_main.cpp kuplayer_android.c native.cpp
LOCAL_CFLAGS+= -DANDROID
LOCAL_SHARED_LIBRARIES:= SDL
LOCAL_LDLIBS:= -lGLESv1_CM -llog
LOCAL_LDLIBS+= $(LOCAL_PATH)/"libffmpeg.so"
include$(BUILD_SHARED_LIBRARY)
include$(CLEAR_VARS)
LOCAL_MODULE:= ffmpeg
NDK_MODULE_PATH:= $(LOCAL_PATH)
LOCAL_SRC_FILES:= libffmpeg.so
include$(PREBUILT_SHARED_LIBRARY)
將第三節中的編譯的ffmpeg的標頭檔案拷貝過來
cd /home/xdpan/work/android-project/jni/
cp -R ../../ffmpeg-0.11.1/android/armv7-a/include./
cp ../../ffmpeg-0.11.1/android/armv7-a/lib/libffmpeg.so ./src/
在jni目下執行ndk-build –j2就編譯好了
原始檔如下(或者到虛擬機器相應的目錄中提取)