#include "video.h" #include "packet.h" #include "frame.h" #include "player.h" static int queue_picture(player_stat_t *is, AVFrame *src_frame, double pts, double duration, int64_t pos) { frame_t *vp = NULL; if (!(vp = frame_queue_peek_writable(&is->video_frm_queue))) { return -1; } vp->sar = src_frame->sample_aspect_ratio; vp->uploaded = 0; vp->width = src_frame->width; vp->height = src_frame->height; vp->format = src_frame->format; vp->pts = pts; vp->duration = duration; vp->pos = pos; //vp->serial = serial; //set_default_window_size(vp->width, vp->height, vp->sar); // 将AVFrame拷入队列相应位置 av_frame_move_ref(vp->frame, src_frame); // 更新队列计数及写索引 frame_queue_push(&is->video_frm_queue); return 0; } // 从packet_queue中取一个packet,解码生成frame static int video_decode_frame(AVCodecContext *p_codec_ctx, packet_queue_t *p_pkt_queue, AVFrame *frame, CMediaHostApi* hostapi) { int ret = -1; while (0 == p_pkt_queue->abort_flag) { AVPacket pkt = {0}; av_init_packet(&pkt); while (0 == p_pkt_queue->abort_flag) { // 3. 从解码器接收frame // 3.1 一个视频packet含一个视频frame // 解码器缓存一定数量的packet后,才有解码后的frame输出 // frame输出顺序是按pts的顺序,如IBBPBBP // frame->pkt_pos变量是此frame对应的packet在视频文件中的偏移地址,值同pkt.pos ret = avcodec_receive_frame(p_codec_ctx, frame); if (ret < 0) { if (ret == AVERROR_EOF) { hostapi->Debug(MEDIA_LOG_DEBUG, "video avcodec_receive_frame(): the decoder has been fully flushed."); avcodec_flush_buffers(p_codec_ctx); return 0; } else if (ret == AVERROR(EAGAIN)) { //hostapi->Debug(MEDIA_LOG_DEBUG,"video avcodec_receive_frame(): output is not available in this state - " "user must try to send new input"); break; } else { hostapi->Debug(MEDIA_LOG_DEBUG, "video avcodec_receive_frame(): other errors."); continue; } } else { frame->pts = frame->best_effort_timestamp; //frame->pts = frame->pkt_dts; return 1; // 成功解码得到一个视频帧,则返回 } } // 1. 取出一个packet。使用pkt对应的serial赋值给d->pkt_serial if (packet_queue_get(p_pkt_queue, &pkt, true, hostapi) < 0){ return -1; } if (pkt.data == NULL){ // 复位解码器内部状态/刷新内部缓冲区。 avcodec_flush_buffers(p_codec_ctx); } else{ // 2. 将packet发送给解码器 // 发送packet的顺序是按dts递增的顺序,如IPBBPBB // pkt.pos变量可以标识当前packet在视频文件中的地址偏移 int isend_ret = -1; isend_ret = avcodec_send_packet(p_codec_ctx, &pkt); if (0 != isend_ret){ if (AVERROR(EAGAIN) == isend_ret) { hostapi->Debug(MEDIA_LOG_DEBUG, "receive_frame and send_packet both returned EAGAIN, which is an API violation."); } else if (AVERROR_EOF == isend_ret) { hostapi->Debug(MEDIA_LOG_DEBUG, "the decoder has been flushed, and no new packets can be sent to it"); } else if (AVERROR(EINVAL) == isend_ret) { hostapi->Debug(MEDIA_LOG_DEBUG, "codec not opened, it is an encoder, or requires flush"); } else if (AVERROR(ENOMEM) == isend_ret) { hostapi->Debug(MEDIA_LOG_DEBUG, "failed to add packet to internal queue, or similar"); } else { hostapi->Debug(MEDIA_LOG_DEBUG, "legitimate decoding errors and avcodec_send_packet result is %d.", isend_ret); } } av_packet_unref(&pkt); } } } // 将视频包解码得到视频帧,然后写入picture队列 static int video_decode_thread(void *arg) { player_stat_t *is = (player_stat_t *)arg; AVFrame *p_frame = av_frame_alloc(); double pts = 0.0; double duration = 0.0; int ret=0; int got_picture = 0; if (p_frame == NULL){ is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "av_frame_alloc() for p_frame failed."); return AVERROR(ENOMEM); } while (false == is->buser_stop) { AVRational tb = is->m_pvideo_stream[is->m_icurrent_index]->time_base; AVRational frame_rate = av_guess_frame_rate(is->m_pfmt_ctx[is->m_icurrent_index], is->m_pvideo_stream[is->m_icurrent_index], NULL); got_picture = video_decode_frame(is->m_pvcodec_ctx[is->m_icurrent_index], &is->video_pkt_queue, p_frame, is->rvc_hostapi); if (got_picture < 0){ goto exit; } AVRational tbdata = { frame_rate.den, frame_rate.num }; duration = (frame_rate.num && frame_rate.den ? av_q2d(tbdata) : 0); // 当前帧播放时长 //duration = (frame_rate.num && frame_rate.den ? av_q2d((AVRational){frame_rate.den, frame_rate.num}) : 0); // 当前帧播放时长 pts = (p_frame->pts == AV_NOPTS_VALUE) ? NAN : p_frame->pts * av_q2d(tb); // 当前帧显示时间戳 ret = queue_picture(is, p_frame, pts, duration, p_frame->pkt_pos); // 将当前帧压入frame_queue av_frame_unref(p_frame); if (ret < 0){ goto exit; } else{ //is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "queue_picture success!"); } } exit: av_frame_free(&p_frame); is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "video decode thread exit, thread id is %u, and user_stop flag is %s.", SDL_ThreadID(), is->buser_stop ? "true" : "false"); is->m_bvideo_decode_finished = true; return 0; } // 根据视频时钟与同步时钟(如音频时钟)的差值,校正delay值,使视频时钟追赶或等待同步时钟 // 输入参数delay是上一帧播放时长,即上一帧播放后应延时多长时间后再播放当前帧,通过调节此值来调节当前帧播放快慢 // 返回值delay是将输入参数delay经校正后得到的值 static double compute_target_delay(double delay, player_stat_t *is) { double sync_threshold, diff = 0; /* update delay to follow master synchronisation source */ /* if video is slave, we try to correct big delays by duplicating or deleting a frame */ // 视频时钟与同步时钟(如音频时钟)的差异,时钟值是上一帧pts值(实为:上一帧pts + 上一帧至今流逝的时间差) diff = get_clock(&is->video_clk) - get_clock(&is->audio_clk); // delay是上一帧播放时长:当前帧(待播放的帧)播放时间与上一帧播放时间差理论值 // diff是视频时钟与同步时钟的差值 /* skip or repeat frame. We take into account the delay to compute the threshold. I still don't know if it is the best guess */ // 若delay < AV_SYNC_THRESHOLD_MIN,则同步域值为AV_SYNC_THRESHOLD_MIN // 若delay > AV_SYNC_THRESHOLD_MAX,则同步域值为AV_SYNC_THRESHOLD_MAX // 若AV_SYNC_THRESHOLD_MIN < delay < AV_SYNC_THRESHOLD_MAX,则同步域值为delay sync_threshold = FFMAX(AV_SYNC_THRESHOLD_MIN, FFMIN(AV_SYNC_THRESHOLD_MAX, delay)); if (!isnan(diff)) { if (diff <= -sync_threshold) // 视频时钟落后于同步时钟,且超过同步域值 delay = FFMAX(0, delay + diff); // 当前帧播放时刻落后于同步时钟(delay+diff<0)则delay=0(视频追赶,立即播放),否则delay=delay+diff else if (diff >= sync_threshold && delay > AV_SYNC_FRAMEDUP_THRESHOLD) // 视频时钟超前于同步时钟,且超过同步域值,但上一帧播放时长超长 delay = delay + diff; // 仅仅校正为delay=delay+diff,主要是AV_SYNC_FRAMEDUP_THRESHOLD参数的作用 else if (diff >= sync_threshold) // 视频时钟超前于同步时钟,且超过同步域值 delay = 2 * delay; // 视频播放要放慢脚步,delay扩大至2倍 } //is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "video: delay=%0.3f A-V=%f", delay, -diff); return delay; } static double vp_duration(player_stat_t *is, frame_t *vp, frame_t *nextvp) { if (vp->serial == nextvp->serial){ double duration = nextvp->pts - vp->pts; if (isnan(duration) || duration <= 0) return vp->duration; else return duration; } else { return 0.0; } } static void update_video_pts(player_stat_t *is, double pts, int64_t pos, int serial) { /* update current video pts */ set_clock(&is->video_clk, pts, serial); // 更新vidclock //-sync_clock_to_slave(&is->extclk, &is->vidclk); // 将extclock同步到vidclock } static void video_display(player_stat_t *is) { frame_t *vp = NULL; vp = frame_queue_peek_last(&is->video_frm_queue); if (0 == vp->frame->height || 0 == vp->frame->width){ return; } // 图像转换:p_frm_raw->data ==> p_frm_yuv->data // 将源图像中一片连续的区域经过处理后更新到目标图像对应区域,处理的图像区域必须逐行连续 // plane: 如YUV有Y、U、V三个plane,RGB有R、G、B三个plane // slice: 图像中一片连续的行,必须是连续的,顺序由顶部到底部或由底部到顶部 // stride/pitch: 一行图像所占的字节数,Stride=BytesPerPixel*Width+Padding,注意对齐 // AVFrame.*data[]: 每个数组元素指向对应plane // AVFrame.linesize[]: 每个数组元素表示对应plane中一行图像所占的字节数 sws_scale(is->m_pimg_convert_ctx[is->m_icurrent_index], // sws context (const uint8_t *const *)vp->frame->data, // src slice vp->frame->linesize, // src stride 0, // src slice y is->m_pvcodec_ctx[is->m_icurrent_index]->height, // src slice height is->m_pfrm_yuv[is->m_icurrent_index]->data, // dst planes is->m_pfrm_yuv[is->m_icurrent_index]->linesize // dst strides ); //SDL_Event rvcevent; //SDL_WaitEvent(&rvcevent); //if(REFRESH_EVENT == rvcevent.type){ //SDL_ShowWindow(is->sdl_video.window); // 使用新的YUV像素数据更新SDL_Rect SDL_UpdateYUVTexture(is->sdl_video.texture, // sdl texture &is->sdl_video.rect, // sdl rect is->m_pfrm_yuv[is->m_icurrent_index]->data[0], // y plane is->m_pfrm_yuv[is->m_icurrent_index]->linesize[0], // y pitch is->m_pfrm_yuv[is->m_icurrent_index]->data[1], // u plane is->m_pfrm_yuv[is->m_icurrent_index]->linesize[1], // u pitch is->m_pfrm_yuv[is->m_icurrent_index]->data[2], // v plane is->m_pfrm_yuv[is->m_icurrent_index]->linesize[2] // v pitch ); // 使用特定颜色清空当前渲染目标 SDL_RenderClear(is->sdl_video.renderer); // 使用部分图像数据(texture)更新当前渲染目标 SDL_RenderCopy(is->sdl_video.renderer, // sdl renderer is->sdl_video.texture, // sdl texture NULL, // src rect, if NULL copy texture &is->sdl_video.rect // dst rect ); // 执行渲染,更新屏幕显示 SDL_RenderPresent(is->sdl_video.renderer); SDL_Delay(1); //} } /* called to display each frame */ static void video_refresh(void *opaque, double *remaining_time) { player_stat_t *is = (player_stat_t *)opaque; double time; static bool first_frame = true; retry: if (frame_queue_nb_remaining(&is->video_frm_queue) == 0) // 所有帧已显示 { // nothing to do, no picture to display in the queue //is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "%s:%d, nothing to do, no picture to display in the queue.", __FUNCTION__, __LINE__); av_usleep(100*1000); return; } double last_duration, duration, delay; frame_t *vp, *lastvp; /* dequeue the picture */ lastvp = frame_queue_peek_last(&is->video_frm_queue); // 上一帧:上次已显示的帧 vp = frame_queue_peek(&is->video_frm_queue); // 当前帧:当前待显示的帧 // lastvp和vp不是同一播放序列(一个seek会开始一个新播放序列),将frame_timer更新为当前时间 if (first_frame){ is->frame_timer = av_gettime_relative() / 1000000.0; first_frame = false; } // 暂停处理:不停播放上一帧图像 if (is->m_ipaused){ goto display; } /* compute nominal last_duration */ last_duration = vp_duration(is, lastvp, vp); // 上一帧播放时长:vp->pts - lastvp->pts delay = compute_target_delay(last_duration, is); // 根据视频时钟和同步时钟的差值,计算delay值 time= av_gettime_relative()/1000000.0; // 当前帧播放时刻(is->frame_timer+delay)大于当前时刻(time),表示播放时刻未到 if (time < is->frame_timer + delay) { // 播放时刻未到,则更新刷新时间remaining_time为当前时刻到下一播放时刻的时间差 *remaining_time = FFMIN(is->frame_timer + delay - time, *remaining_time); // 播放时刻未到,则不播放,直接返回 return; } // 更新frame_timer值 is->frame_timer += delay; // 校正frame_timer值:若frame_timer落后于当前系统时间太久(超过最大同步域值),则更新为当前系统时间 if (delay > 0 && time - is->frame_timer > AV_SYNC_THRESHOLD_MAX){ is->frame_timer = time; } SDL_LockMutex(is->video_frm_queue.frame_mutex); if (!isnan(vp->pts)){ update_video_pts(is, vp->pts, vp->pos, vp->serial); // 更新视频时钟:时间戳、时钟时间 } SDL_UnlockMutex(is->video_frm_queue.frame_mutex); // 是否要丢弃未能及时播放的视频帧 if (frame_queue_nb_remaining(&is->video_frm_queue) > 1) // 队列中未显示帧数>1(只有一帧则不考虑丢帧) { frame_t *nextvp = frame_queue_peek_next(&is->video_frm_queue); // 下一帧:下一待显示的帧 duration = vp_duration(is, vp, nextvp); // 当前帧vp播放时长 = nextvp->pts - vp->pts // 当前帧vp未能及时播放,即下一帧播放时刻(is->frame_timer+duration)小于当前系统时刻(time) if (time > is->frame_timer + duration){ frame_queue_next(&is->video_frm_queue); // 删除上一帧已显示帧,即删除lastvp,读指针加1(从lastvp更新到vp) goto retry; } } // 删除当前读指针元素,读指针+1。若未丢帧,读指针从lastvp更新到vp;若有丢帧,读指针从vp更新到nextvp frame_queue_next(&is->video_frm_queue); display: video_display(is); // 取出当前帧vp(若有丢帧是nextvp)进行播放 } static uint32_t get_video_playing_wind_flag(m_eWindType_t eType) { uint32_t uFlag = SDL_WINDOW_BORDERLESS | SDL_WINDOW_OPENGL | SDL_WINDOW_ALWAYS_ON_TOP | SDL_WINDOW_SKIP_TASKBAR | SDL_WINDOW_POPUP_MENU | SDL_WINDOW_SHOWN; #ifndef _WIN32 uFlag = SDL_WINDOW_OPENGL|SDL_WINDOW_BORDERLESS|SDL_WINDOW_ALWAYS_ON_TOP|SDL_WINDOW_POPUP_MENU; #endif return uFlag; } static int video_playing_thread(void *arg) { player_stat_t *is = (player_stat_t *)arg; double remaining_time = 0.0; uint32_t uWindFlag = get_video_playing_wind_flag(is->m_eWindType); // 1. 创建SDL窗口,SDL 2.0支持多窗口 // SDL_Window即运行程序后弹出的视频窗口,同SDL 1.x中的SDL_Surface is->sdl_video.window = SDL_CreateWindow("player", is->iDisplayCx, is->iDisplayCy, is->sdl_video.rect.w, is->sdl_video.rect.h, uWindFlag ); if (is->sdl_video.window == NULL) { is->rvc_hostapi->Debug(MEDIA_LOG_ERROR, "SDL_CreateWindow() failed: %s.", SDL_GetError()); return -1; } else { //SDL_HideWindow(is->sdl_video.window); #ifdef _WIN32 SDL_SysWMinfo info; HWND hwnd; SDL_VERSION(&info.version); if (SDL_GetWindowWMInfo(is->sdl_video.window, &info)) { hwnd = info.info.win.window; SetWindowPos(hwnd, HWND_TOPMOST, is->iDisplayCx, is->iDisplayCy, is->sdl_video.rect.w, is->sdl_video.rect.h, SWP_NOMOVE | SWP_NOSIZE); } else { is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "SDL_GetWindowWMInfo failed."); } #else #endif // _WIN32 } int cx = 0, cy = 0; SDL_GetWindowPosition(is->sdl_video.window, &cx, &cy); // 2. 创建SDL_Renderer // SDL_Renderer:渲染器 int iNum = SDL_GetNumRenderDrivers(); int iRenderindex = -1; for (int index = 0; index < iNum; index++){ SDL_RendererInfo info = {0}; SDL_GetRenderDriverInfo(index, &info); #ifdef _WIN32 #else if (strstr(info.name, "software")) { iRenderindex = index; } #endif // _WIN32 } //SDL_RendererFlags is->sdl_video.renderer = SDL_CreateRenderer(is->sdl_video.window, iRenderindex, SDL_RENDERER_TARGETTEXTURE | SDL_RENDERER_PRESENTVSYNC | SDL_RENDERER_ACCELERATED); if (NULL == is->sdl_video.renderer){ is->rvc_hostapi->Debug(MEDIA_LOG_ERROR, "SDL_CreateRenderer() failed: %s", SDL_GetError()); return -1; } SDL_RendererInfo RenderInfo; SDL_GetRendererInfo(is->sdl_video.renderer, &RenderInfo); // 3. 创建SDL_Texture // 一个SDL_Texture对应一帧YUV数据,同SDL 1.x中的SDL_Overlay is->sdl_video.texture = SDL_CreateTexture(is->sdl_video.renderer, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STATIC, is->sdl_video.rect.w, is->sdl_video.rect.h ); if (NULL == is->sdl_video.texture){ is->rvc_hostapi->Debug(MEDIA_LOG_ERROR, "SDL_CreateTexture() failed: %s", SDL_GetError()); return -1; } SDL_ShowWindow(is->sdl_video.window); while ((false == is->buser_stop) && (false == is->m_bvideo_decode_finished)){ if (remaining_time > 0.0){ av_usleep((unsigned)(remaining_time * 1000000.0)); } remaining_time = REFRESH_RATE; // 立即显示当前帧,或延时remaining_time后再显示 video_refresh(is, &remaining_time); //remaining_time += 0.020; SDL_Event event; while(SDL_PollEvent(&event)) { switch(event.type) { case SDL_QUIT: break; } } } is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "video playing thread exit, thread id is %u, and user_stop flag is %s.", SDL_ThreadID(), is->buser_stop ? "true" : "false"); return 0; } static int open_video_playing(void* arg) { player_stat_t* is = (player_stat_t*)arg; int iret = -1; int buf_size = 0; uint8_t* buffer = NULL; //SDL_Surface* IconSurface = NULL; for (size_t index = 0; index < is->m_uFilesCount; index++) { is->m_pfrm_yuv[index] = av_frame_alloc(); if (NULL == is->m_pfrm_yuv[index]) { is->rvc_hostapi->Debug(MEDIA_LOG_ERROR, "av_frame_alloc() for p_frm_raw failed"); return iret; } int iplay_video_width = 0; if (NULL != is->m_pvcodec_ctx[index]){ iplay_video_width = is->m_pvcodec_ctx[index]->width; } int iplay_video_height = 0; if (NULL != is->m_pvcodec_ctx[index]){ iplay_video_height = is->m_pvcodec_ctx[index]->height; } if (eFullScreen_Type == is->m_eWindType || eSpecified_Type == is->m_eWindType) { iplay_video_width = is->iDisplayWidth; iplay_video_height = is->iDisplayHeight; } // 为AVFrame.*data[]手工分配缓冲区,用于存储sws_scale()中目的帧视频数据 buf_size = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, iplay_video_width, iplay_video_height, 1 ); // buffer将作为p_frm_yuv的视频数据缓冲区 buffer = (uint8_t*)av_malloc(buf_size); if (NULL == buffer) { is->rvc_hostapi->Debug(MEDIA_LOG_ERROR, "av_malloc() for buffer failed!"); return iret; } is->m_pvideo_buffer[index] = buffer; // 使用给定参数设定m_pfrm_yuv->data和m_pfrm_yuv->linesize iret = av_image_fill_arrays(is->m_pfrm_yuv[index]->data, // dst data[] is->m_pfrm_yuv[index]->linesize, // dst linesize[] is->m_pvideo_buffer[index], // src buffer AV_PIX_FMT_YUV420P, // pixel format iplay_video_width, // width iplay_video_height, // height 1 // align ); if (iret < 0) { is->rvc_hostapi->Debug(MEDIA_LOG_ERROR, "av_image_fill_arrays() failed %d", iret); return iret; } // A2. 初始化SWS context,用于后续图像转换 // 此处第6个参数使用的是FFmpeg中的像素格式,对比参考注释B3 // FFmpeg中的像素格式AV_PIX_FMT_YUV420P对应SDL中的像素格式SDL_PIXELFORMAT_IYUV // 如果解码后得到图像的不被SDL支持,不进行图像转换的话,SDL是无法正常显示图像的 // 如果解码后得到图像的能被SDL支持,则不必进行图像转换 // 这里为了编码简便,统一转换为SDL支持的格式AV_PIX_FMT_YUV420P==>SDL_PIXELFORMAT_IYUV is->m_pimg_convert_ctx[index] = sws_getContext(is->m_pvcodec_ctx[index]->width, // src width is->m_pvcodec_ctx[index]->height, // src height is->m_pvcodec_ctx[index]->pix_fmt, // src format iplay_video_width, // dst width iplay_video_height, // dst height AV_PIX_FMT_YUV420P, // dst format SWS_BICUBIC, // flags NULL, // src filter NULL, // dst filter NULL // param ); if (NULL == is->m_pimg_convert_ctx[index]) { is->rvc_hostapi->Debug(MEDIA_LOG_ERROR, "sws_getContext() failed."); return iret; } // SDL_Rect赋值 is->sdl_video.rect.x = 0; is->sdl_video.rect.y = 0; is->sdl_video.rect.w = iplay_video_width; is->sdl_video.rect.h = iplay_video_height; is->m_video_playing_tid = SDL_CreateThread(video_playing_thread, "video playing thread", is); if (NULL == is->m_video_playing_tid) { is->rvc_hostapi->Debug(MEDIA_LOG_ERROR, "SDL_Create video playing thread failed: %s.", SDL_GetError()); return iret; } else { is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "create %s success, and thread id is %u.", SDL_GetThreadName(is->m_video_playing_tid), SDL_GetThreadID(is->m_video_playing_tid)); iret = 0; } } return iret; } static int open_video_stream(player_stat_t *is) { AVCodecParameters* p_codec_par = NULL; AVCodec* p_codec = NULL; AVCodecContext* p_codec_ctx = NULL; int iret = -1; for (size_t index = 0; index < is->m_uFilesCount; index++){ AVStream* p_stream = is->m_pvideo_stream[index]; // 1. 为视频流构建解码器AVCodecContext // 1.1 获取解码器参数AVCodecParameters p_codec_par = p_stream->codecpar; // 1.2 获取解码器 p_codec = avcodec_find_decoder(p_codec_par->codec_id); if (p_codec == NULL) { is->rvc_hostapi->Debug(MEDIA_LOG_ERROR, "can not find codec!"); return iret; } // 1.3 构建解码器AVCodecContext // 1.3.1 p_codec_ctx初始化:分配结构体,使用p_codec初始化相应成员为默认值 p_codec_ctx = avcodec_alloc_context3(p_codec); if (p_codec_ctx == NULL) { is->rvc_hostapi->Debug(MEDIA_LOG_ERROR, "avcodec_alloc_context3() failed"); return iret; } // 1.3.2 p_codec_ctx初始化:p_codec_par ==> p_codec_ctx,初始化相应成员 iret = avcodec_parameters_to_context(p_codec_ctx, p_codec_par); if (iret < 0) { is->rvc_hostapi->Debug(MEDIA_LOG_ERROR, "avcodec_parameters_to_context() failed"); avcodec_close(p_codec_ctx); avcodec_free_context(&p_codec_ctx); return iret; } // 1.3.3 p_codec_ctx初始化:使用p_codec初始化p_codec_ctx,初始化完成 iret = avcodec_open2(p_codec_ctx, p_codec, NULL); if (iret < 0) { is->rvc_hostapi->Debug(MEDIA_LOG_ERROR, "avcodec_open2() failed %d", iret); avcodec_close(p_codec_ctx); avcodec_free_context(&p_codec_ctx); return iret; } is->m_pvcodec_ctx[index] = p_codec_ctx; } // 2. 创建视频解码线程 is->m_video_decode_tid = SDL_CreateThread(video_decode_thread, "video decode thread", is); if (NULL == is->m_video_decode_tid) { is->rvc_hostapi->Debug(MEDIA_LOG_ERROR, "SDL_Create video decode thread failed: %s.", SDL_GetError()); return iret; } else { is->rvc_hostapi->Debug(MEDIA_LOG_DEBUG, "create %s success, and thread id is %u.", SDL_GetThreadName(is->m_video_decode_tid), SDL_GetThreadID(is->m_video_decode_tid)); iret = 0; } return iret; } int open_video(player_stat_t *is) { int iret = -1; if (0 == open_video_stream(is)) { iret = open_video_playing(is); } return iret; }