MediaPlayer的中心-NuPlayer

之前整理过Android MediaPlayer源码剖析,知道MediaPlayer的中心是NuPlayer完成视频的解码、烘托、同步、输出,这篇深入剖析NuPlayer相关的常识体系

整体规划架构

下图是MediaPlayer在Android架构中的作业流程

MediaPlayer的核心-NuPlayer

下图是NuPlayer的作业流程

MediaPlayer的核心-NuPlayer

可以看到Android傍边选用NuPlayer作为主流播映器,NuPlayer选用StageFright引擎,StageFright内部集成了OpenMAX编解码器,会话管理、时刻同步、烘托、传输操控和DRM(Digital Rights Management)

AHandler机制

在NuPlayer源码傍边处处能看到AHandler的影子,NuPlayer便是经过AHandler建立各个成员之间的通信和交互,相似Android Java层中的Handler一样,AHandler是Android native层完成的一个异步音讯机制,在这个机制中所有的处理都是异步的,将变量封装到一个音讯AMessage结构体中,然后放到行列中去,后台专门有一个线程会从这个行列中取出音讯然后履行,履行函数便是onMessageReceived。 原理如下图:

MediaPlayer的核心-NuPlayer

AHandler机制包括以下几个中心类

  • AMessage

    音讯类,用于构造音讯,经过deliver()办法运用AHandler发送出去,经过post办法运用ALooper投递给AHandler处理

    //运用AHandler发送音讯
    void AMessage::deliver() {
        sp<AHandler> handler = mHandler.promote();
        if (handler == NULL) {
            ALOGW("failed to deliver message as target handler %d is gone.", mTarget);
            return;
        }
        handler->deliverMessage(this);
    }
    //运用ALooper投递给AHandler处理
    status_t AMessage::post(int64_t delayUs) {
        sp<ALooper> looper = mLooper.promote();
        if (looper == NULL) {
            ALOGW("failed to post message as target looper for handler %d is gone.", mTarget);
            return -ENOENT;
        }
        looper->post(this, delayUs);
        return OK;
    }
    
  • AHandler

    音讯处理类,一般当做父类,继承该类的子类需求完成onMessageReceived办法,相似于java层Handler中的handleMessage(msg)办法

    void AHandler::deliverMessage(const sp<AMessage> &msg) {
        //交由该办法处理,该办法是一个虚函数,由AHandler的子类来完成该办法,NuPlayer继承自AHandler,里边完成了该办法的功用
        onMessageReceived(msg);
        mMessageCounter++;
    //省掉部分代码
    }
    
  • ALooper

    与Ahander一一对应,担任存储音讯并分发Ahandler的音讯,与AMessage一对多关系

    // posts a message on this looper with the given timeout
    void ALooper::post(const sp<AMessage> &msg, int64_t delayUs) {
        Mutex::Autolock autoLock(mLock);
        int64_t whenUs;
        if (delayUs > 0) {
            whenUs = GetNowUs() + delayUs;
        } else {
            whenUs = GetNowUs();
        }
        List<Event>::iterator it = mEventQueue.begin();
        while (it != mEventQueue.end() && (*it).mWhenUs <= whenUs) {
            ++it;
        }
        Event event;
        event.mWhenUs = whenUs;
        event.mMessage = msg;
        if (it == mEventQueue.begin()) {
            mQueueChangedCondition.signal();
        }
        //将音讯插入到音讯行列傍边,mEventQueue是一个List<Event>调集
        mEventQueue.insert(it, event);
    }
    status_t ALooper::start(
            bool runOnCallingThread, bool canCallJava, int32_t priority) {
        if (runOnCallingThread) {
            {
                Mutex::Autolock autoLock(mLock);
                if (mThread != NULL || mRunningLocally) {
                    return INVALID_OPERATION;
                }
                mRunningLocally = true;
            }
            //循环取出音讯,办法体为空,取音讯动作在loop()办法中
            do {
            } while (loop());
            return OK;
        }
        Mutex::Autolock autoLock(mLock);
        if (mThread != NULL || mRunningLocally) {
            return INVALID_OPERATION;
        }
        mThread = new LooperThread(this, canCallJava);
        status_t err = mThread->run(
                mName.empty() ? "ALooper" : mName.c_str(), priority);
        if (err != OK) {
            mThread.clear();
        }
        return err;
    }
    bool ALooper::loop() {
        Event event;
        {
            Mutex::Autolock autoLock(mLock);
            if (mThread == NULL && !mRunningLocally) {
                return false;
            }
            if (mEventQueue.empty()) {
                mQueueChangedCondition.wait(mLock);
                return true;
            }
            int64_t whenUs = (*mEventQueue.begin()).mWhenUs;
            int64_t nowUs = GetNowUs();
            if (whenUs > nowUs) {
                int64_t delayUs = whenUs - nowUs;
                mQueueChangedCondition.waitRelative(mLock, delayUs * 1000ll);
                return true;
            }
            event = *mEventQueue.begin();
            mEventQueue.erase(mEventQueue.begin());
        }
        event.mMessage->deliver();
    //省掉部分代码    
    }
    

NuPlayer

NuPlayer自身继承自AHandler,具有发送/处理音讯的才能

struct NuPlayer : public AHandler {
    explicit NuPlayer(pid_t pid, const sp<MediaClock> &mediaClock);
    void setUID(uid_t uid);
    void init(const wp<NuPlayerDriver> &driver);
    void setDataSourceAsync(const sp<IStreamSource> &source);
    virtual void setDataSourceAsync(
            const sp<IMediaHTTPService> &httpService,
            const char *url,
            const KeyedVector<String8, String8> *headers);
    void setDataSourceAsync(int fd, int64_t offset, int64_t length);
    void setDataSourceAsync(const sp<DataSource> &source);
//省掉部分代码
}

下图是正常播映器的履行流程

MediaPlayer的核心-NuPlayer

下图是NuPlayer的架构规划

MediaPlayer的核心-NuPlayer

NuPlayer的创立进程在之前的文章中已经整理过了,见Android MediaPlayer源码剖析,创立进程中涉及到几个重要的成员,逐一剖析下

  • NuPlayerDriver

    NuPlayerDriver是对NuPlayer的封装,继承MediaPlayerInterface接口。经过NuPlayer来完成播映的功用。看这部分代码的办法便是先看NuPlayerDriver里边干了啥,回头就去找NuPlayer里边的完成,一般都要再去NuPlayer的onMessageReceive中看音讯的响应,最终回到NuPlayerDriver的各种notify中看流程的周转,下面是播映器状态机流通图

    MediaPlayer的核心-NuPlayer

struct NuPlayerDriver : public MediaPlayerInterface {
    explicit NuPlayerDriver(pid_t pid);
    virtual status_t initCheck();
    virtual status_t setUID(uid_t uid);
    virtual status_t setDataSource(
            const sp<IMediaHTTPService> &httpService,
            const char *url,
            const KeyedVector<String8, String8> *headers);
    virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
    virtual status_t setDataSource(const sp<IStreamSource> &source);
    virtual status_t setDataSource(const sp<DataSource>& dataSource);
    virtual status_t setVideoSurfaceTexture(
            const sp<IGraphicBufferProducer> &bufferProducer);
    virtual status_t getBufferingSettings(
            BufferingSettings* buffering /* nonnull */) override;
    virtual status_t setBufferingSettings(const BufferingSettings& buffering) override;
    virtual status_t prepare();
    virtual status_t prepareAsync();
    virtual status_t start();
    virtual status_t stop();
    virtual status_t pause();
    virtual bool isPlaying();
    virtual status_t setPlaybackSettings(const AudioPlaybackRate &rate);
    virtual status_t getPlaybackSettings(AudioPlaybackRate *rate);
    virtual status_t setSyncSettings(const AVSyncSettings &sync, float videoFpsHint);
    virtual status_t getSyncSettings(AVSyncSettings *sync, float *videoFps);
    virtual status_t seekTo(
            int msec, MediaPlayerSeekMode mode = MediaPlayerSeekMode::SEEK_PREVIOUS_SYNC);
    virtual status_t getCurrentPosition(int *msec);
    virtual status_t getDuration(int *msec);
    virtual status_t reset();
    virtual status_t notifyAt(int64_t mediaTimeUs) override;
    virtual status_t setLooping(int loop);
    virtual player_type playerType();
    virtual status_t invoke(const Parcel &request, Parcel *reply);
    virtual void setAudioSink(const sp<AudioSink> &audioSink);
    virtual status_t setParameter(int key, const Parcel &request);
    virtual status_t getParameter(int key, Parcel *reply);
    virtual status_t getMetadata(
            const media::Metadata::Filter& ids, Parcel *records);
    virtual status_t dump(int fd, const Vector<String16> &args) const;
    void notifySetDataSourceCompleted(status_t err);
    void notifyPrepareCompleted(status_t err);
    void notifyResetComplete();
    void notifySetSurfaceComplete();
    void notifyDuration(int64_t durationUs);
    void notifyMorePlayingTimeUs(int64_t timeUs);
    void notifyMoreRebufferingTimeUs(int64_t timeUs);
    void notifyRebufferingWhenExit(bool status);
    void notifySeekComplete();
    void notifySeekComplete_l();
    void notifyListener(int msg, int ext1 = 0, int ext2 = 0, const Parcel *in = NULL);
    void notifyFlagsChanged(uint32_t flags);
    // Modular DRM
    virtual status_t prepareDrm(const uint8_t uuid[16], const Vector<uint8_t> &drmSessionId);
    virtual status_t releaseDrm();
//省掉部分代码
}
  • AVNuFactory

    担任要害组件的创立,包括decoder,renderer,经过它能看到数据流从Source-Decoder-Renderer,中心由AMessages驱动

    struct AVNuFactory {
        virtual sp<NuPlayer> createNuPlayer(pid_t pid, const sp<MediaClock> &mediaClock);
        virtual sp<NuPlayer::DecoderBase> createPassThruDecoder(
                const sp<AMessage> &notify,
                const sp<NuPlayer::Source> &source,
                const sp<NuPlayer::Renderer> &renderer);
        virtual sp<NuPlayer::DecoderBase> createDecoder(
                const sp<AMessage> &notify,
                const sp<NuPlayer::Source> &source,
                pid_t pid,
                uid_t uid,
                const sp<NuPlayer::Renderer> &renderer);
        virtual sp<NuPlayer::Renderer> createRenderer(
                const sp<MediaPlayerBase::AudioSink> &sink,
                const sp<MediaClock> &mediaClock,
                const sp<AMessage> &notify,
                uint32_t flags);
        // ----- NO TRESSPASSING BEYOND THIS LINE ------
        DECLARE_LOADABLE_SINGLETON(AVNuFactory);
    };
    
  • Source

    表明数据源,同时里边包括解析模块MediaExtractor,功用相似FFmpeg的avformat,并且Source继承自AHandler,同样拥有发送/处理音讯的才能

    struct NuPlayer::Source : public AHandler {
        enum Flags {
            FLAG_CAN_PAUSE          = 1,
            FLAG_CAN_SEEK_BACKWARD  = 2,  // the "10 sec back button"
            FLAG_CAN_SEEK_FORWARD   = 4,  // the "10 sec forward button"
            FLAG_CAN_SEEK           = 8,  // the "seek bar"
            FLAG_DYNAMIC_DURATION   = 16,
            FLAG_SECURE             = 32, // Secure codec is required.
            FLAG_PROTECTED          = 64, // The screen needs to be protected (screenshot is disabled).
        };
    //省掉部分代码
    }
    

    数据源一般分三种,在NuPlayer::setDataSourceAsync()中会对数据源进行判别区别,不同的数据源需求用到对应的解析模块

    void NuPlayer::setDataSourceAsync(
            const sp<IMediaHTTPService> &httpService,
            const char *url,
            const KeyedVector<String8, String8> *headers) {
        sp<AMessage> msg = new AMessage(kWhatSetDataSource, this);
        size_t len = strlen(url);
        sp<AMessage> notify = new AMessage(kWhatSourceNotify, this);
        sp<Source> source;
        //判别数据源的类型
        if (IsHTTPLiveURL(url)) {
            //流媒体
            source = new HTTPLiveSource(notify, httpService, url, headers);
            ALOGV("setDataSourceAsync HTTPLiveSource %s", url);
            mDataSourceType = DATA_SOURCE_TYPE_HTTP_LIVE;
        } else if (!strncasecmp(url, "rtsp://", 7)) {
            //直播
            source = new RTSPSource(
                    notify, httpService, url, headers, mUIDValid, mUID);
            ALOGV("setDataSourceAsync RTSPSource %s", url);
            mDataSourceType = DATA_SOURCE_TYPE_RTSP;
        } else if ((!strncasecmp(url, "http://", 7)
                    || !strncasecmp(url, "https://", 8))
                        && ((len >= 4 && !strcasecmp(".sdp", &url[len - 4]))
                        || strstr(url, ".sdp?"))) {
            //直播
            source = new RTSPSource(
                    notify, httpService, url, headers, mUIDValid, mUID, true);
            ALOGV("setDataSourceAsync RTSPSource http/https/.sdp %s", url);
            mDataSourceType = DATA_SOURCE_TYPE_RTSP;
        } else {
            //本地视频
            ALOGV("setDataSourceAsync GenericSource %s", url);
            sp<GenericSource> genericSource =
                    new GenericSource(notify, mUIDValid, mUID, mMediaClock);
            status_t err = genericSource->setDataSource(httpService, url, headers);
            if (err == OK) {
                source = genericSource;
            } else {
                ALOGE("Failed to set data source!");
            }
            // regardless of success/failure
            mDataSourceType = DATA_SOURCE_TYPE_GENERIC_URL;
        }
        msg->setObject("source", source);
        msg->post();
    }
    
    • GenericSource:表明本地数据源

      struct NuPlayer::GenericSource : public NuPlayer::Source,
                                       public MediaBufferObserver // Modular DRM
      {
          GenericSource(const sp<AMessage> &notify, bool uidValid, uid_t uid,
                        const sp<MediaClock> &mediaClock);
          status_t setDataSource(
                  const sp<IMediaHTTPService> &httpService,
                  const char *url,
                  const KeyedVector<String8, String8> *headers);
          status_t setDataSource(int fd, int64_t offset, int64_t length);
          status_t setDataSource(const sp<DataSource>& dataSource);
          virtual status_t getBufferingSettings(
                  BufferingSettings* buffering /* nonnull */) override;
          virtual status_t setBufferingSettings(const BufferingSettings& buffering) override;
          virtual void prepareAsync();
          virtual void start();
          virtual void stop();
          virtual void pause();
          virtual void resume();
          virtual void disconnect();
          virtual status_t feedMoreTSData();
          virtual sp<MetaData> getFileFormatMeta() const;
          virtual status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit);
          virtual status_t getDuration(int64_t *durationUs);
          virtual size_t getTrackCount() const;
          virtual sp<AMessage> getTrackInfo(size_t trackIndex) const;
          virtual ssize_t getSelectedTrack(media_track_type type) const;
          virtual status_t selectTrack(size_t trackIndex, bool select, int64_t timeUs);
          virtual status_t seekTo(
              int64_t seekTimeUs,
              MediaPlayerSeekMode mode = MediaPlayerSeekMode::SEEK_PREVIOUS_SYNC) override;
      //省掉部分代码
      }
      
    • HTTPLiveSource:表明网络数据源,一般指流媒体

      struct NuPlayer::HTTPLiveSource : public NuPlayer::Source {
          HTTPLiveSource(
                  const sp<AMessage> &notify,
                  const sp<IMediaHTTPService> &httpService,
                  const char *url,
                  const KeyedVector<String8, String8> *headers);
          virtual status_t getBufferingSettings(
                  BufferingSettings* buffering /* nonnull */) override;
          virtual status_t setBufferingSettings(const BufferingSettings& buffering) override;
          virtual void prepareAsync();
          virtual void start();
          virtual status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit);
          virtual sp<MetaData> getFormatMeta(bool audio);
          virtual sp<AMessage> getFormat(bool audio);
          virtual status_t feedMoreTSData();
          virtual status_t getDuration(int64_t *durationUs);
          virtual size_t getTrackCount() const;
          virtual sp<AMessage> getTrackInfo(size_t trackIndex) const;
          virtual ssize_t getSelectedTrack(media_track_type /* type */) const;
          virtual status_t selectTrack(size_t trackIndex, bool select, int64_t timeUs);
          virtual status_t seekTo(
                  int64_t seekTimeUs,
                  MediaPlayerSeekMode mode = MediaPlayerSeekMode::SEEK_PREVIOUS_SYNC) override;
       //省掉部分代码
      }
      
    • RTSPSource:表明直播数据源

      struct NuPlayer::RTSPSource : public NuPlayer::Source {
          RTSPSource(
                  const sp<AMessage> &notify,
                  const sp<IMediaHTTPService> &httpService,
                  const char *url,
                  const KeyedVector<String8, String8> *headers,
                  bool uidValid = false,
                  uid_t uid = 0,
                  bool isSDP = false);
          virtual status_t getBufferingSettings(
                  BufferingSettings* buffering /* nonnull */) override;
          virtual status_t setBufferingSettings(const BufferingSettings& buffering) override;
          virtual void prepareAsync();
          virtual void start();
          virtual void stop();
          virtual status_t feedMoreTSData();
          virtual status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit);
          virtual status_t getDuration(int64_t *durationUs);
          virtual status_t seekTo(
                  int64_t seekTimeUs,
                  MediaPlayerSeekMode mode = MediaPlayerSeekMode::SEEK_PREVIOUS_SYNC) override;
          void onMessageReceived(const sp<AMessage> &msg);
      //省掉部分代码
      }
      
  • Decoder

    解码器,功用相似FFmpeg的avcodec,封装了用于AVC、AAC解码的接口,经过ACodec完成解码(包括OMX硬解码和软解码),它的初始化在NuPlayer::onMessageReceived()办法中

    void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
    //省掉部分代码
     case kWhatPause:
            {
                onPause();
                mPausedByClient = true;
                break;
            }
            case kWhatSourceNotify:
            {
                //在此处创立Decoder
                onSourceNotify(msg);
                break;
            }
            case kWhatClosedCaptionNotify:
            {
                onClosedCaptionNotify(msg);
                break;
            }
    //省掉部分代码
    }
    ---------------------------------------------------------------
    void NuPlayer::onSourceNotify(const sp<AMessage> &msg) {
    //省掉部分代码
        switch (what) {
            case Source::kWhatInstantiateSecureDecoders:
            {
                if (mSource == NULL) {
                    // This is a stale notification from a source that was
                    // asynchronously preparing when the client called reset().
                    // We handled the reset, the source is gone.
                    break;
                }
                sp<AMessage> reply;
                CHECK(msg->findMessage("reply", &reply));
                //此处创立decoder
                status_t err = onInstantiateSecureDecoders();
                reply->setInt32("err", err);
                reply->post();
                break;
            }
    //省掉部分代码
    }
    ---------------------------------------------------------------
    status_t NuPlayer::onInstantiateSecureDecoders() {
        status_t err;
        if (!(mSourceFlags & Source::FLAG_SECURE)) {
            return BAD_TYPE;
        }
        if (mRenderer != NULL) {
            ALOGE("renderer should not be set when instantiating secure decoders");
            return UNKNOWN_ERROR;
        }
        // TRICKY: We rely on mRenderer being null, so that decoder does not start requesting
        // data on instantiation.
        if (mSurface != NULL) {
            //创立VideoDecoder
            err = instantiateDecoder(false, &mVideoDecoder);
            if (err != OK) {
                return err;
            }
        }
        if (mAudioSink != NULL) {
             //创立AudioDecoder
            err = instantiateDecoder(true, &mAudioDecoder);
            if (err != OK) {
                return err;
            }
        }
        return OK;
    }
    ---------------------------------------------------------------
    status_t NuPlayer::instantiateDecoder(
            bool audio, sp<DecoderBase> *decoder, bool checkAudioModeChange) {
    //省掉部分代码
        if (audio) {
            //创立音频解码器
            sp<AMessage> notify = new AMessage(kWhatAudioNotify, this);
            ++mAudioDecoderGeneration;
            notify->setInt32("generation", mAudioDecoderGeneration);
            if (checkAudioModeChange) {
                determineAudioModeChange(format);
            }
            if (mOffloadAudio) {
                mSource->setOffloadAudio(true /* offload */);
                const bool hasVideo = (mSource->getFormat(false /*audio */) != NULL);
                format->setInt32("has-video", hasVideo);
                *decoder = AVNuFactory::get()->createPassThruDecoder(notify, mSource, mRenderer);
                ALOGV("instantiateDecoder audio DecoderPassThrough hasVideo: %d", hasVideo);
            } else {
                AVNuUtils::get()->setCodecOutputFormat(format);
                mSource->setOffloadAudio(false /* offload */);
                *decoder = AVNuFactory::get()->createDecoder(notify, mSource, mPID, mUID, mRenderer);
                ALOGV("instantiateDecoder audio Decoder");
            }
            mAudioDecoderError = false;
        } else {
            //创立视频解码器
            sp<AMessage> notify = new AMessage(kWhatVideoNotify, this);
            ++mVideoDecoderGeneration;
            notify->setInt32("generation", mVideoDecoderGeneration);
            *decoder = new Decoder(
                    notify, mSource, mPID, mUID, mRenderer, mSurface, mCCDecoder);
            mVideoDecoderError = false;
            // enable FRC if high-quality AV sync is requested, even if not
            // directly queuing to display, as this will even improve textureview
            // playback.
            {
                if (property_get_bool("persist.sys.media.avsync", false)) {
                    format->setInt32("auto-frc", 1);
                }
            }
        }
        (*decoder)->init();
    //省掉部分代码
    }
    
  • Renderer

    烘托器,功用相似声卡驱动和显卡驱动,首要用于音视频烘托和同步,与NativeWindow有关,创立进程在NuPlayer::onStart()中

    void NuPlayer::onStart(int64_t startPositionUs, MediaPlayerSeekMode mode) {
    //省掉部分代码
        sp<AMessage> notify = new AMessage(kWhatRendererNotify, this);
        ++mRendererGeneration;
        notify->setInt32("generation", mRendererGeneration);
        //创立renderer
        mRenderer = AVNuFactory::get()->createRenderer(mAudioSink, mMediaClock, notify, flags);
        mRendererLooper = new ALooper;
        mRendererLooper->setName("NuPlayerRenderer");
        mRendererLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
        mRendererLooper->registerHandler(mRenderer);
    //省掉部分代码
    }
    

    数据的输入在NuPlayer:Decoder::handleAnOutputBuffer()中

    bool NuPlayer::Decoder::handleAnOutputBuffer(
            size_t index,
            size_t offset,
            size_t size,
            int64_t timeUs,
            int32_t flags) {
    //省掉部分代码
    if (mRenderer != NULL) {
            // send the buffer to renderer.把Buffer送到Renderer
            mRenderer->queueBuffer(mIsAudio, buffer, reply);
            if (eos && !isDiscontinuityPending()) {
                mRenderer->queueEOS(mIsAudio, ERROR_END_OF_STREAM);
            }
        }
    //省掉部分代码
    }
    

    数据的输出在NuPlayer::Renderer::postDrainVideoQueue()中

    void NuPlayer::Renderer::postDrainVideoQueue() {
    //省掉部分代码
        sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, this);
        msg->setInt32("drainGeneration", getDrainGeneration(false /* audio */));
    //省掉部分代码
    }
    --------------------------------------------------------
    case kWhatDrainVideoQueue:
            {
                int32_t generation;
                CHECK(msg->findInt32("drainGeneration", &generation));
                if (generation != getDrainGeneration(false /* audio */)) {
                    break;
                }
                mDrainVideoQueuePending = false;
                //输出视频缓冲行列的内容
                onDrainVideoQueue();
                postDrainVideoQueue();
                break;
            }
    --------------------------------------------------------
     void NuPlayer::Renderer::onDrainVideoQueue() {
        //省掉部分代码
       mVideoSampleReceived = true;
        if (!mPaused) {
            if (!mVideoRenderingStarted) {
                mVideoRenderingStarted = true;
              //告诉Renderer开始烘托
                notifyVideoRenderingStart();
            }
            Mutex::Autolock autoLock(mLock);
            notifyIfMediaRenderingStarted_l();
        }
    }
    --------------------------------------------------------
    void NuPlayer::Renderer::notifyVideoRenderingStart() {
        sp<AMessage> notify = mNotify->dup();
        notify->setInt32("what", kWhatVideoRenderingStart);
        notify->post();
    }
    
    void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
        switch (msg->what()) {
        //省掉部分代码
        case kWhatRendererNotify:
            //处理Renderer发送过来的音讯
            {
                int32_t requesterGeneration = mRendererGeneration - 1;
                CHECK(msg->findInt32("generation", &requesterGeneration));
                if (requesterGeneration != mRendererGeneration) {
                    ALOGV("got message from old renderer, generation(%d:%d)",
                            requesterGeneration, mRendererGeneration);
                    return;
                }
                int32_t what;
                CHECK(msg->findInt32("what", &what));
                if (what == Renderer::kWhatEOS) {
                   //省掉部分代码
                } else if (what == Renderer::kWhatFlushComplete) {
              //省掉部分代码
                } else if (what == Renderer::kWhatVideoRenderingStart) {
                    //这儿处理kWhatVideoRenderingStart音讯
                    notifyListener(MEDIA_INFO, MEDIA_INFO_RENDERING_START, 0);
                } else if (what == Renderer::kWhatMediaRenderingStart) {
                    ALOGV("media rendering started");
                    notifyListener(MEDIA_STARTED, 0, 0);
                } else if (!mPaused && what == Renderer::kWhatVideoPrerollComplete) {
                    // If NuPlayer is paused too, don't resume renderer. The pause may be called by
                    // client, wait for client to resume NuPlayer
                    ALOGI("NOTE: Video preroll complete.. resume renderer..");
                    mRenderer->resume();
                } else if (what == Renderer::kWhatAudioTearDown) {
              //省掉部分代码
                }
                break;
            }
        }
    }