1.大局初始化

第一次创立IjkMediaPlayer目标时,会去load so。

public IjkMediaPlayer(IjkLibLoader libLoader) {
    initPlayer(libLoader);
}
private void initPlayer(IjkLibLoader libLoader) {
  	// load so
    loadLibrariesOnce(libLoader);
  	// 只打印了一段log
    initNativeOnce();
    Looper looper;
    if ((looper = Looper.myLooper()) != null) {
        mEventHandler = new EventHandler(this, looper);
    } else if ((looper = Looper.getMainLooper()) != null) {
        mEventHandler = new EventHandler(this, looper);
    } else {
        mEventHandler = null;
    }
    /*
     * Native setup requires a weak reference to our object. It's easier to
     * create it here than in C++.
     */
    native_setup(new WeakReference<IjkMediaPlayer>(this));
}
public static void loadLibrariesOnce(IjkLibLoader libLoader) {
    synchronized (IjkMediaPlayer.class) {
        if (!mIsLibLoaded) {
            if (libLoader == null)
                libLoader = sLocalLibLoader;
            libLoader.loadLibrary("ijkffmpeg");
            libLoader.loadLibrary("ijkplayer");
            mIsLibLoaded = true;
        }
    }
}

当so加载成功往后,链接器会调用so中的JNI_OnLoad函数。在这儿面会进行一些大局的初始化操作。如注册jni函数,初始化FFmpeg中的解封装器,解协议器,解码器。

// ijkplayer_jni.c
JNIEXPORT jint JNI_OnLoad(JavaVM *vm, void *reserved)
{
    JNIEnv* env = NULL;
    g_jvm = vm;
    if ((*vm)->GetEnv(vm, (void**) &env, JNI_VERSION_1_4) != JNI_OK) {
        return -1;
    }
    assert(env != NULL);
    pthread_mutex_init(&g_clazz.mutex, NULL );
    // FindClass returns LocalReference
    // 查找java class并赋值给g_clazz.clazz
    IJK_FIND_JAVA_CLASS(env, g_clazz.clazz, JNI_CLASS_IJKPLAYER);
    // 注册jni函数
    (*env)->RegisterNatives(env, g_clazz.clazz, g_methods, NELEM(g_methods) );
    ijkmp_global_init();
    ijkmp_global_set_inject_callback(inject_callback);
    FFmpegApi_global_init(env);
    SDL_JNI_OnLoad(vm,reserved);
    return JNI_VERSION_1_4;
}
// ijkplayer.c
void ijkmp_global_init()
{
    ffp_global_init();
}
// ff_ffplay.c
void ffp_global_init()
{
    if (g_ffmpeg_global_inited)
        return;
    ALOGD("ijkmediaplayer version : %s", ijkmp_version());
    // 注册编译进去的解封装,解协议,解码器
    avcodec_register_all();
    av_register_all();
    // 注册ijk扩展的一些协议
    ijkav_register_all();
    // 初始化网络相关
    avformat_network_init();
    av_lockmgr_register(lockmgr);
    av_log_set_callback(ffp_log_callback_brief);
    // 初始化flush pkt
    av_init_packet(&flush_pkt);
    flush_pkt.data = (uint8_t *)&flush_pkt;
    g_ffmpeg_global_inited = true;
}

2.目标初始化

native IjkMediaPlayer初始化

接着来看目标的初始化,在创立好java层IjkMediaPlayer会调用native_setup创立native IjkMediaPlayer并初始化。native IjkMediaPlayer持有FFPlayer,后续的播映操作都是FFPlayer完结的。

struct IjkMediaPlayer {
    volatile int ref_count;
    pthread_mutex_t mutex;
    FFPlayer *ffplayer; // 真正的播映器
    // native音讯相关
    int (*msg_loop)(void*);
    SDL_Thread *msg_thread;
    SDL_Thread _msg_thread;
    // 播映器状况,与java层对应
    int mp_state;
    // 数据源
    char *data_source;
    // java层播映器弱引证
    void *weak_thiz;
    int restart;
    int restart_from_beginning;
    // seek请求相关
    int seek_req;
    long seek_msec;
};
// ijkplayer_jni.c
static void
IjkMediaPlayer_native_setup(JNIEnv *env, jobject thiz, jobject weak_this)
{
    MPTRACE("%s\n", __func__);
    // 初始化IjkMediaPlayer
    IjkMediaPlayer *mp = ijkmp_android_create(message_loop);
    JNI_CHECK_GOTO(mp, env, "java/lang/OutOfMemoryError", "mpjni: native_setup: ijkmp_create() failed", LABEL_RETURN);
    // 向java层设置指针
    jni_set_media_player(env, thiz, mp);
    ijkmp_set_weak_thiz(mp, (*env)->NewGlobalRef(env, weak_this));
    // 初始化自定义协议相关
    ijkmp_set_inject_opaque(mp, ijkmp_get_weak_thiz(mp));
    ijkmp_set_ijkio_inject_opaque(mp, ijkmp_get_weak_thiz(mp));
    // 设置硬解码适配查找回调
    ijkmp_android_set_mediacodec_select_callback(mp, mediacodec_select_callback, ijkmp_get_weak_thiz(mp));
LABEL_RETURN:
    ijkmp_dec_ref_p(&mp);
}
// ijkplayer_android.c
IjkMediaPlayer *ijkmp_android_create(int(*msg_loop)(void*))
{   //结构IjkMediaPlayer
    IjkMediaPlayer *mp = ijkmp_create(msg_loop);
    if (!mp)
        goto fail;
    //初始化显示相关数据
    mp->ffplayer->vout = SDL_VoutAndroid_CreateForAndroidSurface();
    if (!mp->ffplayer->vout)
        goto fail;
    //装备音视频解码和输出接口
    mp->ffplayer->pipeline = ffpipeline_create_from_android(mp->ffplayer);
    if (!mp->ffplayer->pipeline)
        goto fail;
    ffpipeline_set_vout(mp->ffplayer->pipeline, mp->ffplayer->vout);
    return mp;
fail:
    ijkmp_dec_ref_p(&mp);
    return NULL;
}
// ijkplayer.c
IjkMediaPlayer *ijkmp_create(int (*msg_loop)(void*))
{
    IjkMediaPlayer *mp = (IjkMediaPlayer *) mallocz(sizeof(IjkMediaPlayer));
    if (!mp)
        goto fail;
    // 初始化 ffplayer
    mp->ffplayer = ffp_create();
    if (!mp->ffplayer)
        goto fail;
		// 音讯循环,prepare时开启
    mp->msg_loop = msg_loop;
    ijkmp_inc_ref(mp);
    pthread_mutex_init(&mp->mutex, NULL);
    return mp;
    fail:
    ijkmp_destroy_p(&mp);
    return NULL;
}
// ff_ffplay.c
FFPlayer *ffp_create()
{
    av_log(NULL, AV_LOG_INFO, "av_version_info: %s\n", av_version_info());
    av_log(NULL, AV_LOG_INFO, "ijk_version_info: %s\n", ijk_version_info());
    FFPlayer* ffp = (FFPlayer*) av_mallocz(sizeof(FFPlayer));
    if (!ffp)
        return NULL;
    // 创立音讯队列
    msg_queue_init(&ffp->msg_queue);
    ffp->af_mutex = SDL_CreateMutex();
    ffp->vf_mutex = SDL_CreateMutex();
    // 重置ffp
    ffp_reset_internal(ffp);
    ffp->av_class = &ffp_context_class;
    ffp->meta = ijkmeta_create();
    // 经过AVClass装备默认参数
    av_opt_set_defaults(ffp);
    return ffp;
}
SDL_Vout初始化

SDL 是 Simple DirectMedia Layer 的缩写,它是一个开源的跨渠道多媒体库,封装了杂乱的音视频底层操作,简化了音视频处理的难度。之前提到过ijkPlayer是在ffplay的基础上开发而来的,ffplay中运用到了SDL,ijk中依然沿用了ffplay 中SDL相关结构体,并在其基础上适配移动渠道。SDL_Vout结构体用于烘托视频,SDL_Aout结构体用于播映音频。SDL_Vout在IjkMediaPlayer初始化时初始化, SDL_Aout在prepare的时分初始化。

struct SDL_Vout {
    SDL_mutex *mutex;
    SDL_Class       *opaque_class;
    SDL_Vout_Opaque *opaque;
    SDL_VoutOverlay *(*create_overlay)(int width, int height, int frame_format, SDL_Vout *vout);
    void (*free_l)(SDL_Vout *vout);
    int (*display_overlay)(SDL_Vout *vout, SDL_VoutOverlay *overlay);
    Uint32 overlay_format;
};
// ijksdl_vout_android_nativewindow.c
SDL_Vout *SDL_VoutAndroid_CreateForANativeWindow()
{
    SDL_Vout *vout = SDL_Vout_CreateInternal(sizeof(SDL_Vout_Opaque));
    if (!vout)
        return NULL;
    SDL_Vout_Opaque *opaque = vout->opaque;
    opaque->native_window = NULL;
    //初始化容量
    if (ISDL_Array__init(&opaque->overlay_manager, 32))
        goto fail;
    if (ISDL_Array__init(&opaque->overlay_pool, 32))
        goto fail;
    //结构IJK_EGL
    opaque->egl = IJK_EGL_create();
    if (!opaque->egl)
        goto fail;
    av_log(NULL, AV_LOG_WARNING, "hyc SDL_VoutAndroid_CreateForANativeWindow ");
    //装备相关接口
    vout->opaque_class    = &g_nativewindow_class;
    vout->create_overlay  = func_create_overlay;
    vout->free_l          = func_free_l;
    vout->display_overlay = func_display_overlay;
    return vout;
fail:
    func_free_l(vout);
    return NULL;
}
IJKFF_Pipeline初始化

IJKFF_Pipeline装备了视频解码和音频输出的操作接口。

IJKFF_Pipeline *ffpipeline_create_from_android(FFPlayer *ffp)
{
    ALOGD("ffpipeline_create_from_android()\n");
    IJKFF_Pipeline *pipeline = ffpipeline_alloc(&g_pipeline_class, sizeof(IJKFF_Pipeline_Opaque));
    if (!pipeline)
        return pipeline;
    IJKFF_Pipeline_Opaque *opaque = pipeline->opaque;
    opaque->ffp                   = ffp;
    opaque->surface_mutex         = SDL_CreateMutex();
    opaque->left_volume           = 1.0f;
    opaque->right_volume          = 1.0f;
    if (!opaque->surface_mutex) {
        ALOGE("ffpipeline-android:create SDL_CreateMutex failed\n");
        goto fail;
    }
    pipeline->func_destroy              = func_destroy;
    // 翻开视频解码器
    pipeline->func_open_video_decoder   = func_open_video_decoder;
    // 结构SDL_Aout
    pipeline->func_open_audio_output    = func_open_audio_output;
    // 提前初始化视频解码器
    pipeline->func_init_video_decoder   = func_init_video_decoder;
    // 装备视频解码器
    pipeline->func_config_video_decoder = func_config_video_decoder;
    return pipeline;
fail:
    ffpipeline_free_p(&pipeline);
    return NULL;
}

3.setDataSource

经过盯梢代码,setDataSource会在native IjkMediaPlayer结构体中保存data_source,并设置状况为INITIALIZED。

// ijkplayer_jni.c
static void
IjkMediaPlayer_setDataSourceAndHeaders(
    JNIEnv *env, jobject thiz, jstring path,
    jobjectArray keys, jobjectArray values)
{
    MPTRACE("%s\n", __func__);
    int retval = 0;
    const char *c_path = NULL;
    IjkMediaPlayer *mp = jni_get_media_player(env, thiz);
    JNI_CHECK_GOTO(path, env, "java/lang/IllegalArgumentException", "mpjni: setDataSource: null path", LABEL_RETURN);
    JNI_CHECK_GOTO(mp, env, "java/lang/IllegalStateException", "mpjni: setDataSource: null mp", LABEL_RETURN);
    c_path = (*env)->GetStringUTFChars(env, path, NULL );
    JNI_CHECK_GOTO(c_path, env, "java/lang/OutOfMemoryError", "mpjni: setDataSource: path.string oom", LABEL_RETURN);
    ALOGV("setDataSource: path %s", c_path);
    retval = ijkmp_set_data_source(mp, c_path);
    (*env)->ReleaseStringUTFChars(env, path, c_path);
    IJK_CHECK_MPRET_GOTO(retval, env, LABEL_RETURN);
LABEL_RETURN:
    ijkmp_dec_ref_p(&mp);
}
// ijkplayer.c
int ijkmp_set_data_source(IjkMediaPlayer *mp, const char *url)
{
    assert(mp);
    assert(url);
    MPTRACE("ijkmp_set_data_source(url=\"%s\")\n", url);
    pthread_mutex_lock(&mp->mutex);
    int retval = ijkmp_set_data_source_l(mp, url);
    pthread_mutex_unlock(&mp->mutex);
    MPTRACE("ijkmp_set_data_source(url=\"%s\")=%d\n", url, retval);
    return retval;
}
static int ijkmp_set_data_source_l(IjkMediaPlayer *mp, const char *url)
{
    assert(mp);
    assert(url);
    // MPST_RET_IF_EQ(mp->mp_state, MP_STATE_IDLE);
    MPST_RET_IF_EQ(mp->mp_state, MP_STATE_INITIALIZED);
    MPST_RET_IF_EQ(mp->mp_state, MP_STATE_ASYNC_PREPARING);
    MPST_RET_IF_EQ(mp->mp_state, MP_STATE_PREPARED);
    MPST_RET_IF_EQ(mp->mp_state, MP_STATE_STARTED);
    MPST_RET_IF_EQ(mp->mp_state, MP_STATE_PAUSED);
    MPST_RET_IF_EQ(mp->mp_state, MP_STATE_COMPLETED);
    MPST_RET_IF_EQ(mp->mp_state, MP_STATE_STOPPED);
    MPST_RET_IF_EQ(mp->mp_state, MP_STATE_ERROR);
    MPST_RET_IF_EQ(mp->mp_state, MP_STATE_END);
    freep((void**)&mp->data_source);
    // url 在jni调用完结后会被开释,所以这儿仿制一个新的字符串
    mp->data_source = strdup(url);
    if (!mp->data_source)
        return EIJK_OUT_OF_MEMORY;
    // 更改状况到INITIALIZED
    ijkmp_change_state_l(mp, MP_STATE_INITIALIZED);
    return 0;
}

在上面中有一段比较有意思的代码,这段代码是用于状况检测的,被注释掉的那个状况表示这个状况下能调用这个函数,否则会return EIJK_INVALID_STATE。后面还有很多操作都会做状况检测,如pasue,start,prepare等。

// MPST_RET_IF_EQ(mp->mp_state, MP_STATE_IDLE);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_INITIALIZED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_ASYNC_PREPARING);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_PREPARED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_STARTED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_PAUSED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_COMPLETED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_STOPPED);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_ERROR);
MPST_RET_IF_EQ(mp->mp_state, MP_STATE_END);
#define MPST_RET_IF_EQ_INT(real, expected, errcode) \
    do { \
        if ((real) == (expected)) return (errcode); \
    } while(0)
#define MPST_RET_IF_EQ(real, expected) \
    MPST_RET_IF_EQ_INT(real, expected, EIJK_INVALID_STATE)

4.prepare

prepare过程中做了下面这些工作:

  • 初始化音讯队列,开启音讯告诉线程
  • 装备参数
  • 初始化SDL_Aout
  • 翻开音视频流
// ijkplayer.c
static int ijkmp_prepare_async_l(IjkMediaPlayer *mp)
{
    assert(mp);
    MPST_RET_IF_EQ(mp->mp_state, MP_STATE_IDLE);
    // MPST_RET_IF_EQ(mp->mp_state, MP_STATE_INITIALIZED);
    MPST_RET_IF_EQ(mp->mp_state, MP_STATE_ASYNC_PREPARING);
    MPST_RET_IF_EQ(mp->mp_state, MP_STATE_PREPARED);
    MPST_RET_IF_EQ(mp->mp_state, MP_STATE_STARTED);
    MPST_RET_IF_EQ(mp->mp_state, MP_STATE_PAUSED);
    MPST_RET_IF_EQ(mp->mp_state, MP_STATE_COMPLETED);
    // MPST_RET_IF_EQ(mp->mp_state, MP_STATE_STOPPED);
    MPST_RET_IF_EQ(mp->mp_state, MP_STATE_ERROR);
    MPST_RET_IF_EQ(mp->mp_state, MP_STATE_END);
    assert(mp->data_source);
    ijkmp_change_state_l(mp, MP_STATE_ASYNC_PREPARING);
		// 音讯队列
    msg_queue_start(&mp->ffplayer->msg_queue);
    // released in msg_loop
    ijkmp_inc_ref(mp);
    //创立音讯告诉线程
    mp->msg_thread = SDL_CreateThreadEx(&mp->_msg_thread, ijkmp_msg_loop, mp, "ff_msg_loop");
    // msg_thread is detached inside msg_loop
    // TODO: 9 release weak_thiz if pthread_create() failed;
    char sdk[128] = "0";
    __system_property_get("ro.build.version.sdk", sdk);
    int sdk_verison = atoi(sdk);
    mp->ffplayer->sdkVersion = sdk_verison;
    int retval = ffp_prepare_async_l(mp->ffplayer, mp->data_source);
    if (retval < 0) {
        ijkmp_change_state_l(mp, MP_STATE_ERROR);
        return retval;
    }
    return 0;
}
// ff_ffplay.c
int ffp_prepare_async_l(FFPlayer *ffp, const char *file_name)
{
    assert(ffp);
    assert(!ffp->is);
    assert(file_name);
    if (av_stristart(file_name, "rtmp", NULL) ||
        av_stristart(file_name, "rtsp", NULL)) {
        // There is total different meaning for 'timeout' option in rtmp
        av_log(ffp, AV_LOG_WARNING, "remove 'timeout' option for rtmp.\n");
        av_dict_set(&ffp->format_opts, "timeout", NULL, 0);
    }
    /* there is a length limit in avformat */
    if (strlen(file_name) + 1 > 1024) {
        av_log(ffp, AV_LOG_ERROR, "%s too long url\n", __func__);
        if (avio_find_protocol_name("ijklongurl:")) {
            av_dict_set(&ffp->format_opts, "ijklongurl-url", file_name, 0);
            file_name = "ijklongurl:";
        }
    }
    //装备参数
    av_opt_set_dict(ffp, &ffp->player_opts);
    if (!ffp->aout) {
      	// 初始化SDL_Aout
        ffp->aout = ffpipeline_open_audio_output(ffp->pipeline, ffp);
        if (!ffp->aout)
            return -1;
    }
		// 翻开音视频流
    VideoState *is = stream_open(ffp, file_name, NULL);
    if (!is) {
        av_log(NULL, AV_LOG_WARNING, "ffp_prepare_async_l: stream_open failed OOM");
        return EIJK_OUT_OF_MEMORY;
    }
    ffp->is = is;
    ffp->input_filename = av_strdup(file_name);
    return 0;
}
初始化SDL_Aout

ijk中有两种音频播映完成,运用OpenSL ES和AudioTrack。SDL_Aout中定义了音频播映相关函数指针,分别在OpenSl ES和AudioTrack中调用相关API完成这些函数指针即可。我在原有的基础上完成了oboe音频播映的扩展,代码比较简单,这儿就不贴了。

struct SDL_Aout {
    SDL_mutex *mutex;
    double     minimal_latency_seconds;
    SDL_Class       *opaque_class;
    SDL_Aout_Opaque *opaque;
    void (*free_l)(SDL_Aout *vout);
    int (*open_audio)(SDL_Aout *aout, const SDL_AudioSpec *desired, SDL_AudioSpec *obtained);
    void (*pause_audio)(SDL_Aout *aout, int pause_on);
    void (*flush_audio)(SDL_Aout *aout);
    void (*set_volume)(SDL_Aout *aout, float left, float right);
    void (*close_audio)(SDL_Aout *aout);
    double (*func_get_latency_seconds)(SDL_Aout *aout);
    void   (*func_set_default_latency_seconds)(SDL_Aout *aout, double latency);
    // optional
    void   (*func_set_playback_rate)(SDL_Aout *aout, float playbackRate);
    void   (*func_set_playback_volume)(SDL_Aout *aout, float playbackVolume);
    int    (*func_get_audio_persecond_callbacks)(SDL_Aout *aout);
    // Android only
    int    (*func_get_audio_session_id)(SDL_Aout *aout);
};
static SDL_Aout *func_open_audio_output(IJKFF_Pipeline *pipeline, FFPlayer *ffp)
{
    SDL_Aout *aout = NULL;
    if (ffp->use_oboe) {
        aout = SDL_AoutAndroid_CreateForOboe();
    } else if (ffp->opensles) {
        // 装备成opensl接口
        aout = SDL_AoutAndroid_CreateForOpenSLES();
    } else {
        // 装备成AudioTrack接口
         aout = SDL_AoutAndroid_CreateForAudioTrack();
    }
    if (aout)
        SDL_AoutSetStereoVolume(aout, pipeline->opaque->left_volume, pipeline->opaque->right_volume);
    return aout;
}

总结

大局初始化:

  • 动态注册jni函数
  • 初始化FFmpeg中的解封装器,解协议器,解码器

播映器目标初始化:

  • 创立IjkMediaPlayer和FFPlayer目标
  • 初始化SDL_Vout,装备烘托适配相关操作接口
  • 初始化IJKFF_Pipeline,装备视频解码和音频输出的操作接口。
  • 向java层设置native指针,以便上层API进行调用

setDataSource

  • 保存上层设置的URL
  • 播映器进入INITIALIZED状况

prepare

  • 创立音讯告诉线程,便利播映器向java层进行音讯告诉
  • 装备播映器参数
  • 初始化SDL_Aout,装备音频播映相关操作接口
  • 翻开音视频流

翻开音视频流因为篇幅较大,放在下一节进行盯梢。

以上是个人见地,假如错误或不同见地,欢迎指正和交流。