准备工作
准备工作流程如下:
前面几个方法没啥好讲解的,无非就是调用下一个方法或者加把锁,没有实质内容,我们直接跳到ijkmp_prepare_async_l():
static int ijkmp_prepare_async_l(IjkMediaPlayer *mp) {assert(mp);MPST_RET_IF_EQ(mp->mp_state, MP_STATE_IDLE);// MPST_RET_IF_EQ(mp->mp_state, MP_STATE_INITIALIZED);MPST_RET_IF_EQ(mp->mp_state, MP_STATE_ASYNC_PREPARING);MPST_RET_IF_EQ(mp->mp_state, MP_STATE_PREPARED);MPST_RET_IF_EQ(mp->mp_state, MP_STATE_STARTED);MPST_RET_IF_EQ(mp->mp_state, MP_STATE_PAUSED);MPST_RET_IF_EQ(mp->mp_state, MP_STATE_COMPLETED);// MPST_RET_IF_EQ(mp->mp_state, MP_STATE_STOPPED);MPST_RET_IF_EQ(mp->mp_state, MP_STATE_ERROR);MPST_RET_IF_EQ(mp->mp_state, MP_STATE_END);// setDataSource操作必须在prepare之前&#xff0c;setDataSource之后状态变更为MP_STATE_INITIALIZEDassert(mp->data_source);// 状态变更为MP_STATE_ASYNC_PREPARING并通知外部ijkmp_change_state_l(mp, MP_STATE_ASYNC_PREPARING);// 启用消息队列msg_queue_start(&mp->ffplayer->msg_queue);// released in msg_loopijkmp_inc_ref(mp);// 创建消息队列模型中的消费者线程mp->msg_thread &#61; SDL_CreateThreadEx(&mp->_msg_thread, ijkmp_msg_loop, mp, "ff_msg_loop");// msg_thread is detached inside msg_loop// TODO: 9 release weak_thiz if pthread_create() failed;int retval &#61; ffp_prepare_async_l(mp->ffplayer, mp->data_source);if (retval <0) {ijkmp_change_state_l(mp, MP_STATE_ERROR);return retval;}return 0;
}
前面一堆代码是一如既往的合法性验证&#xff0c;如果当前状态不是INITIALIZED或者STOPPED则验证不通过&#xff0c;这说明了两点&#xff1a;
- prepare要在setDataSource之后调用。
- stop之后可以再次调用prepare重新进入正常流程。
合法性验证过了后&#xff0c;将当前状态置为MP_STATE_ASYNC_PREPARING&#xff0c;并作为一条消息添加到消息队列中&#xff0c;然后启用消息队列。接着创建了消息队列模型中的消费者线程&#xff0c;这个线程的运行最终会指向创建IjkMediaPlayer时传入的message_loop方法。
最后调用了ffp_prepare_async_l()方法&#xff0c;我们来看下这个方法的代码&#xff1a;
int ffp_prepare_async_l(FFPlayer *ffp, const char *file_name) {assert(ffp);assert(!ffp->is);assert(file_name);if (av_stristart(file_name, "rtmp", NULL) ||av_stristart(file_name, "rtsp", NULL)) {// There is total different meaning for &#39;timeout&#39; option in rtmpav_log(ffp, AV_LOG_WARNING, "remove &#39;timeout&#39; option for rtmp.\n");av_dict_set(&ffp->format_opts, "timeout", NULL, 0);}/* there is a length limit in avformat */if (strlen(file_name) &#43; 1 > 1024) {av_log(ffp, AV_LOG_ERROR, "%s too long url\n", __func__);if (avio_find_protocol_name("ijklongurl:")) {av_dict_set(&ffp->format_opts, "ijklongurl-url", file_name, 0);file_name &#61; "ijklongurl:";}}av_log(NULL, AV_LOG_INFO, "&#61;&#61;&#61;&#61;&#61; versions &#61;&#61;&#61;&#61;&#61;\n");ffp_show_version_str(ffp, "ijkplayer", ijk_version_info());ffp_show_version_str(ffp, "FFmpeg", av_version_info());ffp_show_version_int(ffp, "libavutil", avutil_version());ffp_show_version_int(ffp, "libavcodec", avcodec_version());ffp_show_version_int(ffp, "libavformat", avformat_version());ffp_show_version_int(ffp, "libswscale", swscale_version());ffp_show_version_int(ffp, "libswresample", swresample_version());av_log(NULL, AV_LOG_INFO, "&#61;&#61;&#61;&#61;&#61; options &#61;&#61;&#61;&#61;&#61;\n");ffp_show_dict(ffp, "player-opts", ffp->player_opts);ffp_show_dict(ffp, "format-opts", ffp->format_opts);ffp_show_dict(ffp, "codec-opts ", ffp->codec_opts);ffp_show_dict(ffp, "sws-opts ", ffp->sws_dict);ffp_show_dict(ffp, "swr-opts ", ffp->swr_opts);av_log(NULL, AV_LOG_INFO, "&#61;&#61;&#61;&#61;&#61;&#61;&#61;&#61;&#61;&#61;&#61;&#61;&#61;&#61;&#61;&#61;&#61;&#61;&#61;\n");av_opt_set_dict(ffp, &ffp->player_opts);if (!ffp->aout) {ffp->aout &#61; ffpipeline_open_audio_output(ffp->pipeline, ffp);if (!ffp->aout)return -1;}#if CONFIG_AVFILTERif (ffp->vfilter0) {GROW_ARRAY(ffp->vfilters_list, ffp->nb_vfilters);ffp->vfilters_list[ffp->nb_vfilters - 1] &#61; ffp->vfilter0;}
#endif// 开启流VideoState *is &#61; stream_open(ffp, file_name, NULL);if (!is) {av_log(NULL, AV_LOG_WARNING, "ffp_prepare_async_l: stream_open failed OOM");return EIJK_OUT_OF_MEMORY;}ffp->is &#61; is;ffp->input_filename &#61; av_strdup(file_name);return 0;
}
这个方法主要做了以下三件事&#xff1a;
- 将外部配置的信息赋给ffp的相关字段。
- 开启Audio Output&#xff0c;对应的Android就是AudioTrack或者opensles。
- 开启流。
从这里我们可以看到&#xff0c;流其实在prepare阶段已经开启&#xff0c;而非start阶段。
下面我们来看下stream_open()&#xff1a;
static VideoState *stream_open(FFPlayer *ffp, const char *filename, AVInputFormat *iformat) {assert(!ffp->is);VideoState *is;is &#61; av_mallocz(sizeof(VideoState));if (!is)return NULL;is->filename &#61; av_strdup(filename);if (!is->filename)goto fail;is->iformat &#61; iformat;is->ytop &#61; 0;is->xleft &#61; 0;// 初始化音视频解码数据包队列if (frame_queue_init(&is->pictq, &is->videoq, ffp->pictq_size, 1) <0)goto fail;if (frame_queue_init(&is->subpq, &is->subtitleq, SUBPICTURE_QUEUE_SIZE, 0) <0)goto fail;if (frame_queue_init(&is->sampq, &is->audioq, SAMPLE_QUEUE_SIZE, 1) <0)goto fail;// 初始化音视频原始数据包队列if (packet_queue_init(&is->videoq) <0 ||packet_queue_init(&is->audioq) <0 ||packet_queue_init(&is->subtitleq) <0)goto fail;if (!(is->continue_read_thread &#61; SDL_CreateCond())) {av_log(NULL, AV_LOG_FATAL, "SDL_CreateCond(): %s\n", SDL_GetError());goto fail;}// 初始化时间点init_clock(&is->vidclk, &is->videoq.serial);init_clock(&is->audclk, &is->audioq.serial);init_clock(&is->extclk, &is->extclk.serial);is->audio_clock_serial &#61; -1;is->audio_volume &#61; SDL_MIX_MAXVOLUME;is->muted &#61; 0;is->av_sync_type &#61; ffp->av_sync_type;is->play_mutex &#61; SDL_CreateMutex();ffp->is &#61; is;is->pause_req &#61; !ffp->start_on_prepared;// 启动视频渲染线程is->video_refresh_tid &#61; SDL_CreateThreadEx(&is->_video_refresh_tid, video_refresh_thread, ffp, "ff_vout");if (!is->video_refresh_tid) {av_freep(&ffp->is);return NULL;}// 启动流读取线程is->read_tid &#61; SDL_CreateThreadEx(&is->_read_tid, read_thread, ffp, "ff_read");if (!is->read_tid) {av_log(NULL, AV_LOG_FATAL, "SDL_CreateThread(): %s\n", SDL_GetError());
fail:is->abort_request &#61; true;if (is->video_refresh_tid)SDL_WaitThread(is->video_refresh_tid, NULL);stream_close(ffp);return NULL;}return is;
}
这个方法最主要的工作如下&#xff1a;
- 初始化jitter buffer
- 初始化时间点
- 启动了两个线程&#xff0c;read_thread处理流的读取&#xff0c;video_refresh_thread处理视频渲染。这两个线程均是ijkplayer的核心线程之一&#xff0c;我们会在其他文章中单