video.cpp 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546
  1. #include "video.h"
  2. #include "packet.h"
  3. #include "frame.h"
  4. #include "player.h"
  5. static int queue_picture(player_stat_t *is, AVFrame *src_frame, double pts, double duration, int64_t pos)
  6. {
  7. frame_t *vp;
  8. if (!(vp = frame_queue_peek_writable(&is->video_frm_queue)))
  9. return -1;
  10. vp->sar = src_frame->sample_aspect_ratio;
  11. vp->uploaded = 0;
  12. vp->width = src_frame->width;
  13. vp->height = src_frame->height;
  14. vp->format = src_frame->format;
  15. vp->pts = pts;
  16. vp->duration = duration;
  17. vp->pos = pos;
  18. //vp->serial = serial;
  19. //set_default_window_size(vp->width, vp->height, vp->sar);
  20. // 将AVFrame拷入队列相应位置
  21. av_frame_move_ref(vp->frame, src_frame);
  22. // 更新队列计数及写索引
  23. frame_queue_push(&is->video_frm_queue);
  24. return 0;
  25. }
  26. // 从packet_queue中取一个packet,解码生成frame
  27. static int video_decode_frame(AVCodecContext *p_codec_ctx, packet_queue_t *p_pkt_queue, AVFrame *frame, play_logfun rvclog)
  28. {
  29. int ret;
  30. while (1)
  31. {
  32. AVPacket pkt;
  33. while (1)
  34. {
  35. // 3. 从解码器接收frame
  36. // 3.1 一个视频packet含一个视频frame
  37. // 解码器缓存一定数量的packet后,才有解码后的frame输出
  38. // frame输出顺序是按pts的顺序,如IBBPBBP
  39. // frame->pkt_pos变量是此frame对应的packet在视频文件中的偏移地址,值同pkt.pos
  40. ret = avcodec_receive_frame(p_codec_ctx, frame);
  41. if (ret < 0)
  42. {
  43. if (ret == AVERROR_EOF)
  44. {
  45. //av_log(NULL, AV_LOG_INFO, "video avcodec_receive_frame(): the decoder has been fully flushed\n");
  46. rvclog("video avcodec_receive_frame(): the decoder has been fully flushed.");
  47. avcodec_flush_buffers(p_codec_ctx);
  48. return 0;
  49. }
  50. else if (ret == AVERROR(EAGAIN))
  51. {
  52. //av_log(NULL, AV_LOG_INFO, "video avcodec_receive_frame(): output is not available in this state - "
  53. // "user must try to send new input\n");
  54. //rvclog("video avcodec_receive_frame(): output is not available in this state - "
  55. // "user must try to send new input");
  56. break;
  57. }
  58. else
  59. {
  60. //av_log(NULL, AV_LOG_ERROR, "video avcodec_receive_frame(): other errors\n");
  61. rvclog("video avcodec_receive_frame(): other errors.");
  62. continue;
  63. }
  64. }
  65. else
  66. {
  67. frame->pts = frame->best_effort_timestamp;
  68. //frame->pts = frame->pkt_dts;
  69. return 1; // 成功解码得到一个视频帧或一个音频帧,则返回
  70. }
  71. }
  72. // 1. 取出一个packet。使用pkt对应的serial赋值给d->pkt_serial
  73. if (packet_queue_get(p_pkt_queue, &pkt, true) < 0)
  74. {
  75. return -1;
  76. }
  77. if (pkt.data == NULL)
  78. {
  79. // 复位解码器内部状态/刷新内部缓冲区。
  80. avcodec_flush_buffers(p_codec_ctx);
  81. }
  82. else
  83. {
  84. // 2. 将packet发送给解码器
  85. // 发送packet的顺序是按dts递增的顺序,如IPBBPBB
  86. // pkt.pos变量可以标识当前packet在视频文件中的地址偏移
  87. if (avcodec_send_packet(p_codec_ctx, &pkt) == AVERROR(EAGAIN))
  88. {
  89. //av_log(NULL, AV_LOG_ERROR, "receive_frame and send_packet both returned EAGAIN, which is an API violation.\n");
  90. rvclog("receive_frame and send_packet both returned EAGAIN, which is an API violation.");
  91. }
  92. av_packet_unref(&pkt);
  93. }
  94. }
  95. }
  96. // 将视频包解码得到视频帧,然后写入picture队列
  97. static int video_decode_thread(void *arg)
  98. {
  99. player_stat_t *is = (player_stat_t *)arg;
  100. AVFrame *p_frame = av_frame_alloc();
  101. double pts;
  102. double duration;
  103. int ret;
  104. int got_picture;
  105. AVRational tb = is->p_video_stream->time_base;
  106. AVRational frame_rate = av_guess_frame_rate(is->p_fmt_ctx, is->p_video_stream, NULL);
  107. if (p_frame == NULL)
  108. {
  109. //av_log(NULL, AV_LOG_ERROR, "av_frame_alloc() for p_frame failed\n");
  110. is->rvc_log("av_frame_alloc() for p_frame failed.");
  111. return AVERROR(ENOMEM);
  112. }
  113. is->rvc_log("video_decode_thread is->abort_request == %d.", is->abort_request);
  114. while (0 == is->abort_request)
  115. {
  116. got_picture = video_decode_frame(is->p_vcodec_ctx, &is->video_pkt_queue, p_frame, is->rvc_log);
  117. if (got_picture < 0)
  118. {
  119. goto exit;
  120. }
  121. AVRational tbdata{ frame_rate.den, frame_rate.num };
  122. duration = (frame_rate.num && frame_rate.den ? av_q2d(tbdata) : 0); // 当前帧播放时长
  123. //duration = (frame_rate.num && frame_rate.den ? av_q2d((AVRational){frame_rate.den, frame_rate.num}) : 0); // 当前帧播放时长
  124. pts = (p_frame->pts == AV_NOPTS_VALUE) ? NAN : p_frame->pts * av_q2d(tb); // 当前帧显示时间戳
  125. ret = queue_picture(is, p_frame, pts, duration, p_frame->pkt_pos); // 将当前帧压入frame_queue
  126. av_frame_unref(p_frame);
  127. if (ret < 0)
  128. {
  129. goto exit;
  130. }
  131. }
  132. exit:
  133. av_frame_free(&p_frame);
  134. return 0;
  135. }
  136. // 根据视频时钟与同步时钟(如音频时钟)的差值,校正delay值,使视频时钟追赶或等待同步时钟
  137. // 输入参数delay是上一帧播放时长,即上一帧播放后应延时多长时间后再播放当前帧,通过调节此值来调节当前帧播放快慢
  138. // 返回值delay是将输入参数delay经校正后得到的值
  139. static double compute_target_delay(double delay, player_stat_t *is)
  140. {
  141. double sync_threshold, diff = 0;
  142. /* update delay to follow master synchronisation source */
  143. /* if video is slave, we try to correct big delays by
  144. duplicating or deleting a frame */
  145. // 视频时钟与同步时钟(如音频时钟)的差异,时钟值是上一帧pts值(实为:上一帧pts + 上一帧至今流逝的时间差)
  146. diff = get_clock(&is->video_clk) - get_clock(&is->audio_clk);
  147. // delay是上一帧播放时长:当前帧(待播放的帧)播放时间与上一帧播放时间差理论值
  148. // diff是视频时钟与同步时钟的差值
  149. /* skip or repeat frame. We take into account the
  150. delay to compute the threshold. I still don't know
  151. if it is the best guess */
  152. // 若delay < AV_SYNC_THRESHOLD_MIN,则同步域值为AV_SYNC_THRESHOLD_MIN
  153. // 若delay > AV_SYNC_THRESHOLD_MAX,则同步域值为AV_SYNC_THRESHOLD_MAX
  154. // 若AV_SYNC_THRESHOLD_MIN < delay < AV_SYNC_THRESHOLD_MAX,则同步域值为delay
  155. sync_threshold = FFMAX(AV_SYNC_THRESHOLD_MIN, FFMIN(AV_SYNC_THRESHOLD_MAX, delay));
  156. if (!isnan(diff))
  157. {
  158. if (diff <= -sync_threshold) // 视频时钟落后于同步时钟,且超过同步域值
  159. delay = FFMAX(0, delay + diff); // 当前帧播放时刻落后于同步时钟(delay+diff<0)则delay=0(视频追赶,立即播放),否则delay=delay+diff
  160. else if (diff >= sync_threshold && delay > AV_SYNC_FRAMEDUP_THRESHOLD) // 视频时钟超前于同步时钟,且超过同步域值,但上一帧播放时长超长
  161. delay = delay + diff; // 仅仅校正为delay=delay+diff,主要是AV_SYNC_FRAMEDUP_THRESHOLD参数的作用
  162. else if (diff >= sync_threshold) // 视频时钟超前于同步时钟,且超过同步域值
  163. delay = 2 * delay; // 视频播放要放慢脚步,delay扩大至2倍
  164. }
  165. //av_log(NULL, AV_LOG_TRACE, "video: delay=%0.3f A-V=%f\n", delay, -diff);
  166. //is->rvc_log("video: delay=%0.3f A-V=%f\n", delay, -diff);
  167. return delay;
  168. }
  169. static double vp_duration(player_stat_t *is, frame_t *vp, frame_t *nextvp) {
  170. if (vp->serial == nextvp->serial)
  171. {
  172. double duration = nextvp->pts - vp->pts;
  173. if (isnan(duration) || duration <= 0)
  174. return vp->duration;
  175. else
  176. return duration;
  177. } else {
  178. return 0.0;
  179. }
  180. }
  181. static void update_video_pts(player_stat_t *is, double pts, int64_t pos, int serial) {
  182. /* update current video pts */
  183. set_clock(&is->video_clk, pts, serial); // 更新vidclock
  184. //-sync_clock_to_slave(&is->extclk, &is->vidclk); // 将extclock同步到vidclock
  185. }
  186. static void video_display(player_stat_t *is)
  187. {
  188. frame_t *vp;
  189. vp = frame_queue_peek_last(&is->video_frm_queue);
  190. // 图像转换:p_frm_raw->data ==> p_frm_yuv->data
  191. // 将源图像中一片连续的区域经过处理后更新到目标图像对应区域,处理的图像区域必须逐行连续
  192. // plane: 如YUV有Y、U、V三个plane,RGB有R、G、B三个plane
  193. // slice: 图像中一片连续的行,必须是连续的,顺序由顶部到底部或由底部到顶部
  194. // stride/pitch: 一行图像所占的字节数,Stride=BytesPerPixel*Width+Padding,注意对齐
  195. // AVFrame.*data[]: 每个数组元素指向对应plane
  196. // AVFrame.linesize[]: 每个数组元素表示对应plane中一行图像所占的字节数
  197. sws_scale(is->img_convert_ctx, // sws context
  198. (const uint8_t *const *)vp->frame->data,// src slice
  199. vp->frame->linesize, // src stride
  200. 0, // src slice y
  201. is->p_vcodec_ctx->height, // src slice height
  202. is->p_frm_yuv->data, // dst planes
  203. is->p_frm_yuv->linesize // dst strides
  204. );
  205. // 使用新的YUV像素数据更新SDL_Rect
  206. SDL_UpdateYUVTexture(is->sdl_video.texture, // sdl texture
  207. &is->sdl_video.rect, // sdl rect
  208. is->p_frm_yuv->data[0], // y plane
  209. is->p_frm_yuv->linesize[0], // y pitch
  210. is->p_frm_yuv->data[1], // u plane
  211. is->p_frm_yuv->linesize[1], // u pitch
  212. is->p_frm_yuv->data[2], // v plane
  213. is->p_frm_yuv->linesize[2] // v pitch
  214. );
  215. // 使用特定颜色清空当前渲染目标
  216. SDL_RenderClear(is->sdl_video.renderer);
  217. // 使用部分图像数据(texture)更新当前渲染目标
  218. SDL_RenderCopy(is->sdl_video.renderer, // sdl renderer
  219. is->sdl_video.texture, // sdl texture
  220. NULL, // src rect, if NULL copy texture
  221. &is->sdl_video.rect // dst rect
  222. );
  223. // 执行渲染,更新屏幕显示
  224. SDL_RenderPresent(is->sdl_video.renderer);
  225. }
  226. /* called to display each frame */
  227. static void video_refresh(void *opaque, double *remaining_time)
  228. {
  229. player_stat_t *is = (player_stat_t *)opaque;
  230. double time;
  231. static bool first_frame = true;
  232. retry:
  233. if (frame_queue_nb_remaining(&is->video_frm_queue) == 0) // 所有帧已显示
  234. {
  235. // nothing to do, no picture to display in the queue
  236. return;
  237. }
  238. double last_duration, duration, delay;
  239. frame_t *vp, *lastvp;
  240. /* dequeue the picture */
  241. lastvp = frame_queue_peek_last(&is->video_frm_queue); // 上一帧:上次已显示的帧
  242. vp = frame_queue_peek(&is->video_frm_queue); // 当前帧:当前待显示的帧
  243. // lastvp和vp不是同一播放序列(一个seek会开始一个新播放序列),将frame_timer更新为当前时间
  244. if (first_frame)
  245. {
  246. is->frame_timer = av_gettime_relative() / 1000000.0;
  247. first_frame = false;
  248. }
  249. // 暂停处理:不停播放上一帧图像
  250. if (is->paused)
  251. goto display;
  252. /* compute nominal last_duration */
  253. last_duration = vp_duration(is, lastvp, vp); // 上一帧播放时长:vp->pts - lastvp->pts
  254. delay = compute_target_delay(last_duration, is); // 根据视频时钟和同步时钟的差值,计算delay值
  255. time= av_gettime_relative()/1000000.0;
  256. // 当前帧播放时刻(is->frame_timer+delay)大于当前时刻(time),表示播放时刻未到
  257. if (time < is->frame_timer + delay) {
  258. // 播放时刻未到,则更新刷新时间remaining_time为当前时刻到下一播放时刻的时间差
  259. *remaining_time = FFMIN(is->frame_timer + delay - time, *remaining_time);
  260. // 播放时刻未到,则不播放,直接返回
  261. return;
  262. }
  263. // 更新frame_timer值
  264. is->frame_timer += delay;
  265. // 校正frame_timer值:若frame_timer落后于当前系统时间太久(超过最大同步域值),则更新为当前系统时间
  266. if (delay > 0 && time - is->frame_timer > AV_SYNC_THRESHOLD_MAX)
  267. {
  268. is->frame_timer = time;
  269. }
  270. SDL_LockMutex(is->video_frm_queue.mutex);
  271. if (!isnan(vp->pts))
  272. {
  273. update_video_pts(is, vp->pts, vp->pos, vp->serial); // 更新视频时钟:时间戳、时钟时间
  274. }
  275. SDL_UnlockMutex(is->video_frm_queue.mutex);
  276. // 是否要丢弃未能及时播放的视频帧
  277. if (frame_queue_nb_remaining(&is->video_frm_queue) > 1) // 队列中未显示帧数>1(只有一帧则不考虑丢帧)
  278. {
  279. frame_t *nextvp = frame_queue_peek_next(&is->video_frm_queue); // 下一帧:下一待显示的帧
  280. duration = vp_duration(is, vp, nextvp); // 当前帧vp播放时长 = nextvp->pts - vp->pts
  281. // 当前帧vp未能及时播放,即下一帧播放时刻(is->frame_timer+duration)小于当前系统时刻(time)
  282. if (time > is->frame_timer + duration)
  283. {
  284. frame_queue_next(&is->video_frm_queue); // 删除上一帧已显示帧,即删除lastvp,读指针加1(从lastvp更新到vp)
  285. goto retry;
  286. }
  287. }
  288. // 删除当前读指针元素,读指针+1。若未丢帧,读指针从lastvp更新到vp;若有丢帧,读指针从vp更新到nextvp
  289. frame_queue_next(&is->video_frm_queue);
  290. display:
  291. video_display(is); // 取出当前帧vp(若有丢帧是nextvp)进行播放
  292. }
  293. static int video_playing_thread(void *arg)
  294. {
  295. player_stat_t *is = (player_stat_t *)arg;
  296. double remaining_time = 0.0;
  297. is->rvc_log("video_playing_thread is->abort_request == %d.", is->abort_request);
  298. while (0 == is->abort_request)
  299. {
  300. if (remaining_time > 0.0)
  301. {
  302. av_usleep((unsigned)(remaining_time * 1000000.0));
  303. }
  304. remaining_time = REFRESH_RATE;
  305. // 立即显示当前帧,或延时remaining_time后再显示
  306. video_refresh(is, &remaining_time);
  307. }
  308. return 0;
  309. }
  310. static int open_video_playing(void *arg)
  311. {
  312. player_stat_t *is = (player_stat_t *)arg;
  313. int ret;
  314. int buf_size;
  315. uint8_t* buffer = NULL;
  316. SDL_Surface* IconSurface = NULL;
  317. is->p_frm_yuv = av_frame_alloc();
  318. if (is->p_frm_yuv == NULL)
  319. {
  320. is->rvc_log("av_frame_alloc() for p_frm_raw failed\n");
  321. return -1;
  322. }
  323. // 为AVFrame.*data[]手工分配缓冲区,用于存储sws_scale()中目的帧视频数据
  324. buf_size = av_image_get_buffer_size(AV_PIX_FMT_YUV420P,
  325. is->p_vcodec_ctx->width,
  326. is->p_vcodec_ctx->height,
  327. 1
  328. );
  329. // buffer将作为p_frm_yuv的视频数据缓冲区
  330. buffer = (uint8_t *)av_malloc(buf_size);
  331. if (buffer == NULL)
  332. {
  333. is->rvc_log("av_malloc() for buffer failed\n");
  334. return -1;
  335. }
  336. // 使用给定参数设定p_frm_yuv->data和p_frm_yuv->linesize
  337. ret = av_image_fill_arrays(is->p_frm_yuv->data, // dst data[]
  338. is->p_frm_yuv->linesize, // dst linesize[]
  339. buffer, // src buffer
  340. AV_PIX_FMT_YUV420P, // pixel format
  341. is->p_vcodec_ctx->width, // width
  342. is->p_vcodec_ctx->height,// height
  343. 1 // align
  344. );
  345. if (ret < 0)
  346. {
  347. is->rvc_log("av_image_fill_arrays() failed %d\n", ret);
  348. return -1;;
  349. }
  350. // A2. 初始化SWS context,用于后续图像转换
  351. // 此处第6个参数使用的是FFmpeg中的像素格式,对比参考注释B3
  352. // FFmpeg中的像素格式AV_PIX_FMT_YUV420P对应SDL中的像素格式SDL_PIXELFORMAT_IYUV
  353. // 如果解码后得到图像的不被SDL支持,不进行图像转换的话,SDL是无法正常显示图像的
  354. // 如果解码后得到图像的能被SDL支持,则不必进行图像转换
  355. // 这里为了编码简便,统一转换为SDL支持的格式AV_PIX_FMT_YUV420P==>SDL_PIXELFORMAT_IYUV
  356. is->img_convert_ctx = sws_getContext(is->p_vcodec_ctx->width, // src width
  357. is->p_vcodec_ctx->height, // src height
  358. is->p_vcodec_ctx->pix_fmt, // src format
  359. is->p_vcodec_ctx->width, // dst width
  360. is->p_vcodec_ctx->height, // dst height
  361. AV_PIX_FMT_YUV420P, // dst format
  362. SWS_BICUBIC, // flags
  363. NULL, // src filter
  364. NULL, // dst filter
  365. NULL // param
  366. );
  367. if (is->img_convert_ctx == NULL)
  368. {
  369. is->rvc_log("sws_getContext() failed\n");
  370. return -1;
  371. }
  372. // SDL_Rect赋值
  373. is->sdl_video.rect.x = 0;
  374. is->sdl_video.rect.y = 0;
  375. is->sdl_video.rect.w = is->p_vcodec_ctx->width;
  376. is->sdl_video.rect.h = is->p_vcodec_ctx->height;
  377. // 1. 创建SDL窗口,SDL 2.0支持多窗口
  378. // SDL_Window即运行程序后弹出的视频窗口,同SDL 1.x中的SDL_Surface
  379. is->sdl_video.window = SDL_CreateWindow(NULL,
  380. SDL_WINDOWPOS_UNDEFINED,// 不关心窗口X坐标
  381. SDL_WINDOWPOS_UNDEFINED,// 不关心窗口Y坐标
  382. is->sdl_video.rect.w,
  383. is->sdl_video.rect.h,
  384. SDL_WINDOW_OPENGL|SDL_WINDOW_RESIZABLE
  385. );
  386. if (is->sdl_video.window == NULL)
  387. {
  388. is->rvc_log("SDL_CreateWindow() failed: %s.", SDL_GetError());
  389. return -1;
  390. }
  391. IconSurface = SDL_LoadBMP(is->piconpath);
  392. if (NULL == IconSurface)
  393. {
  394. is->rvc_log("SDL_LoadBMP(%s) failed: %s\n", is->piconpath, SDL_GetError());
  395. }
  396. else
  397. {
  398. SDL_SetWindowIcon(is->sdl_video.window, IconSurface);
  399. SDL_FreeSurface(IconSurface);
  400. }
  401. // 2. 创建SDL_Renderer
  402. // SDL_Renderer:渲染器
  403. is->sdl_video.renderer = SDL_CreateRenderer(is->sdl_video.window, -1, 0);
  404. if (is->sdl_video.renderer == NULL)
  405. {
  406. is->rvc_log("SDL_CreateRenderer() failed: %s\n", SDL_GetError());
  407. return -1;
  408. }
  409. // 3. 创建SDL_Texture
  410. // 一个SDL_Texture对应一帧YUV数据,同SDL 1.x中的SDL_Overlay
  411. is->sdl_video.texture = SDL_CreateTexture(is->sdl_video.renderer,
  412. SDL_PIXELFORMAT_IYUV,
  413. SDL_TEXTUREACCESS_STREAMING,
  414. is->sdl_video.rect.w,
  415. is->sdl_video.rect.h
  416. );
  417. if (is->sdl_video.texture == NULL)
  418. {
  419. is->rvc_log("SDL_CreateTexture() failed: %s\n", SDL_GetError());
  420. return -1;
  421. }
  422. SDL_CreateThread(video_playing_thread, "video playing thread", is);
  423. return 0;
  424. }
  425. static int open_video_stream(player_stat_t *is)
  426. {
  427. AVCodecParameters* p_codec_par = NULL;
  428. AVCodec* p_codec = NULL;
  429. AVCodecContext* p_codec_ctx = NULL;
  430. AVStream *p_stream = is->p_video_stream;
  431. int ret;
  432. // 1. 为视频流构建解码器AVCodecContext
  433. // 1.1 获取解码器参数AVCodecParameters
  434. p_codec_par = p_stream->codecpar;
  435. // 1.2 获取解码器
  436. p_codec = avcodec_find_decoder(p_codec_par->codec_id);
  437. if (p_codec == NULL)
  438. {
  439. is->rvc_log("Cann't find codec!\n");
  440. return -1;
  441. }
  442. // 1.3 构建解码器AVCodecContext
  443. // 1.3.1 p_codec_ctx初始化:分配结构体,使用p_codec初始化相应成员为默认值
  444. p_codec_ctx = avcodec_alloc_context3(p_codec);
  445. if (p_codec_ctx == NULL)
  446. {
  447. is->rvc_log("avcodec_alloc_context3() failed\n");
  448. return -1;
  449. }
  450. // 1.3.2 p_codec_ctx初始化:p_codec_par ==> p_codec_ctx,初始化相应成员
  451. ret = avcodec_parameters_to_context(p_codec_ctx, p_codec_par);
  452. if (ret < 0)
  453. {
  454. is->rvc_log("avcodec_parameters_to_context() failed\n");
  455. return -1;
  456. }
  457. // 1.3.3 p_codec_ctx初始化:使用p_codec初始化p_codec_ctx,初始化完成
  458. ret = avcodec_open2(p_codec_ctx, p_codec, NULL);
  459. if (ret < 0)
  460. {
  461. is->rvc_log("avcodec_open2() failed %d\n", ret);
  462. return -1;
  463. }
  464. is->p_vcodec_ctx = p_codec_ctx;
  465. // 2. 创建视频解码线程
  466. SDL_CreateThread(video_decode_thread, "video decode thread", is);
  467. return 0;
  468. }
  469. int open_video(player_stat_t *is)
  470. {
  471. open_video_stream(is);
  472. open_video_playing(is);
  473. return 0;
  474. }