-
Notifications
You must be signed in to change notification settings - Fork 6
/
Copy pathvideo.c
552 lines (481 loc) · 21.8 KB
/
video.c
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
#include "video.h"
#include "packet.h"
#include "frame.h"
#include "player.h"
static int queue_picture(player_stat_t *is, AVFrame *src_frame, double pts, double duration, int64_t pos)
{
frame_t *vp;
if (!(vp = frame_queue_peek_writable(&is->video_frm_queue)))
return -1;
vp->sar = src_frame->sample_aspect_ratio;
vp->uploaded = 0;
vp->width = src_frame->width;
vp->height = src_frame->height;
vp->format = src_frame->format;
vp->pts = pts;
vp->duration = duration;
vp->pos = pos;
//vp->serial = serial;
// 将AVFrame拷入队列相应位置
av_frame_move_ref(vp->frame, src_frame);
// 更新队列计数及写索引
frame_queue_push(&is->video_frm_queue);
return 0;
}
// 从packet_queue中取一个packet,解码生成frame
static int video_decode_frame(AVCodecContext *p_codec_ctx, packet_queue_t *p_pkt_queue, AVFrame *frame)
{
int ret;
while (1)
{
AVPacket pkt;
while (1)
{
if (p_pkt_queue->abort_request) {
av_log(NULL, AV_LOG_DEBUG, "video_decode_frame receive quit\n");
return -1;
}
// 3. 从解码器接收frame
// 3.1 一个视频packet含一个视频frame
// 解码器缓存一定数量的packet后,才有解码后的frame输出
// frame输出顺序是按pts的顺序,如IBBPBBP
// frame->pkt_pos变量是此frame对应的packet在视频文件中的偏移地址,值同pkt.pos
ret = avcodec_receive_frame(p_codec_ctx, frame);
if (ret < 0)
{
if (ret == AVERROR_EOF)
{
av_log(NULL, AV_LOG_INFO, "video avcodec_receive_frame(): the decoder has been fully flushed\n");
avcodec_flush_buffers(p_codec_ctx);
return 0;
}
else if (ret == AVERROR(EAGAIN))
{
// av_log(NULL, AV_LOG_INFO, "video avcodec_receive_frame(): output is not available in this state - user must try to send new input\n");
break;
}
else
{
av_log(NULL, AV_LOG_ERROR, "video avcodec_receive_frame(): other errors\n");
continue;
}
}
else
{
frame->pts = frame->best_effort_timestamp;
//frame->pts = frame->pkt_dts;
return 1; // 成功解码得到一个视频帧或一个音频帧,则返回
}
}
// 1. 取出一个packet。使用pkt对应的serial赋值给d->pkt_serial
if (packet_queue_get(p_pkt_queue, &pkt, true) < 0)
{
av_log(NULL, AV_LOG_DEBUG, "get packet error\n");
return -1;
}
if (pkt.data == NULL)
{
// 复位解码器内部状态/刷新内部缓冲区。
avcodec_flush_buffers(p_codec_ctx);
}
else
{
// 2. 将packet发送给解码器
// 发送packet的顺序是按dts递增的顺序,如IPBBPBB
// pkt.pos变量可以标识当前packet在视频文件中的地址偏移
if (avcodec_send_packet(p_codec_ctx, &pkt) == AVERROR(EAGAIN))
{
av_log(NULL, AV_LOG_ERROR, "receive_frame and send_packet both returned EAGAIN, which is an API violation.\n");
}
av_packet_unref(&pkt);
}
}
}
// 将视频包解码得到视频帧,然后写入picture队列
static int video_decode_thread(void *arg)
{
player_stat_t *is = (player_stat_t *)arg;
AVFrame *p_frame = av_frame_alloc();
double pts;
double duration;
int ret;
int got_picture;
AVRational tb = is->p_video_stream->time_base;
AVRational frame_rate = av_guess_frame_rate(is->p_fmt_ctx, is->p_video_stream, NULL);
if (p_frame == NULL)
{
av_log(NULL, AV_LOG_ERROR, "av_frame_alloc() for p_frame failed\n");
return AVERROR(ENOMEM);
}
while (1)
{
got_picture = video_decode_frame(is->p_vcodec_ctx, &is->video_pkt_queue, p_frame);
if (got_picture < 0)
{
goto exit;
}
duration = (frame_rate.num && frame_rate.den ? av_q2d((AVRational){frame_rate.den, frame_rate.num}) : 0); // 当前帧播放时长
pts = (p_frame->pts == AV_NOPTS_VALUE) ? NAN : p_frame->pts * av_q2d(tb); // 当前帧显示时间戳
ret = queue_picture(is, p_frame, pts, duration, p_frame->pkt_pos); // 将当前帧压入frame_queue
av_frame_unref(p_frame);
if (ret < 0)
{
goto exit;
}
}
exit:
av_frame_free(&p_frame);
return 0;
}
// 根据视频时钟与同步时钟(如音频时钟)的差值,校正delay值,使视频时钟追赶或等待同步时钟
// 输入参数delay是上一帧播放时长,即上一帧播放后应延时多长时间后再播放当前帧,通过调节此值来调节当前帧播放快慢
// 返回值delay是将输入参数delay经校正后得到的值
static double compute_target_delay(double delay, player_stat_t *is)
{
double sync_threshold, diff = 0;
/* update delay to follow master synchronisation source */
/* if video is slave, we try to correct big delays by
duplicating or deleting a frame */
// 视频时钟与同步时钟(如音频时钟)的差异,时钟值是上一帧pts值(实为:上一帧pts + 上一帧至今流逝的时间差)
diff = get_clock(&is->video_clk) - get_clock(&is->audio_clk);
// delay是上一帧播放时长:当前帧(待播放的帧)播放时间与上一帧播放时间差理论值
// diff是视频时钟与同步时钟的差值
/* skip or repeat frame. We take into account the
delay to compute the threshold. I still don't know
if it is the best guess */
// 若delay < AV_SYNC_THRESHOLD_MIN,则同步域值为AV_SYNC_THRESHOLD_MIN
// 若delay > AV_SYNC_THRESHOLD_MAX,则同步域值为AV_SYNC_THRESHOLD_MAX
// 若AV_SYNC_THRESHOLD_MIN < delay < AV_SYNC_THRESHOLD_MAX,则同步域值为delay
sync_threshold = FFMAX(AV_SYNC_THRESHOLD_MIN, FFMIN(AV_SYNC_THRESHOLD_MAX, delay));
if (!isnan(diff))
{
if (diff <= -sync_threshold) // 视频时钟落后于同步时钟,且超过同步域值
delay = FFMAX(0, delay + diff); // 当前帧播放时刻落后于同步时钟(delay+diff<0)则delay=0(视频追赶,立即播放),否则delay=delay+diff
else if (diff >= sync_threshold && delay > AV_SYNC_FRAMEDUP_THRESHOLD) // 视频时钟超前于同步时钟,且超过同步域值,但上一帧播放时长超长
delay = delay + diff; // 仅仅校正为delay=delay+diff,主要是AV_SYNC_FRAMEDUP_THRESHOLD参数的作用
else if (diff >= sync_threshold) // 视频时钟超前于同步时钟,且超过同步域值
delay = 2 * delay; // 视频播放要放慢脚步,delay扩大至2倍
}
av_log(NULL, AV_LOG_TRACE, "video: delay=%0.3f A-V=%f\n", delay, -diff);
return delay;
}
static double vp_duration(player_stat_t *is, frame_t *vp, frame_t *nextvp) {
if (vp->serial == nextvp->serial)
{
double duration = nextvp->pts - vp->pts;
if (isnan(duration) || duration <= 0)
return vp->duration;
else
return duration;
} else {
return 0.0;
}
}
static void update_video_pts(player_stat_t *is, double pts, int64_t pos, int serial) {
/* update current video pts */
set_clock(&is->video_clk, pts, serial); // 更新vidclock
//-sync_clock_to_slave(&is->extclk, &is->vidclk); // 将extclock同步到vidclock
}
static void video_display(player_stat_t *is)
{
frame_t *vp;
vp = frame_queue_peek_last(&is->video_frm_queue);
// 图像转换:p_frm_raw->data ==> p_frm_yuv->data
// 将源图像中一片连续的区域经过处理后更新到目标图像对应区域,处理的图像区域必须逐行连续
// plane: 如YUV有Y、U、V三个plane,RGB有R、G、B三个plane
// slice: 图像中一片连续的行,必须是连续的,顺序由顶部到底部或由底部到顶部
// stride/pitch: 一行图像所占的字节数,Stride=BytesPerPixel*Width+Padding,注意对齐
// AVFrame.*data[]: 每个数组元素指向对应plane
// AVFrame.linesize[]: 每个数组元素表示对应plane中一行图像所占的字节数
sws_scale(is->img_convert_ctx, // sws context
(const uint8_t *const *)vp->frame->data,// src slice
vp->frame->linesize, // src stride
0, // src slice y
is->p_vcodec_ctx->height, // src slice height
is->p_frm_yuv->data, // dst planes
is->p_frm_yuv->linesize // dst strides
);
// 使用新的YUV像素数据更新Texture
is->sdl_video.rect.w = is->sdl_video.width;
is->sdl_video.rect.h = is->sdl_video.height;
is->sdl_video.rect.x = (is->sdl_video.window_width - is->sdl_video.width) /2;
is->sdl_video.rect.y = (is->sdl_video.window_height - is->sdl_video.height) /2;
SDL_UpdateYUVTexture(is->sdl_video.texture, // sdl texture
NULL,
is->p_frm_yuv->data[0], // y plane
is->p_frm_yuv->linesize[0], // y pitch
is->p_frm_yuv->data[1], // u plane
is->p_frm_yuv->linesize[1], // u pitch
is->p_frm_yuv->data[2], // v plane
is->p_frm_yuv->linesize[2] // v pitch
);
// 使用特定颜色清空当前渲染目标
SDL_RenderClear(is->sdl_video.renderer);
// 将更新后的Texture拷贝到窗口的指定矩形区域(is->sdl_video.rect)
SDL_RenderCopy(is->sdl_video.renderer, // sdl renderer
is->sdl_video.texture, // sdl texture
NULL, // src rect, if NULL copy texture
&is->sdl_video.rect // dst rect
);
// 执行渲染,更新屏幕显示
SDL_RenderPresent(is->sdl_video.renderer);
}
/* called to display each frame */
static void video_refresh(void *opaque, double *remaining_time)
{
player_stat_t *is = (player_stat_t *)opaque;
double time;
static bool first_frame = true;
// av_log(NULL, AV_LOG_INFO, "frame_time=%f, video=%f, audio=%f\n", is->frame_timer, is->video_clk.pts, is->audio_clk.pts);
retry:
if (frame_queue_nb_remaining(&is->video_frm_queue) == 0) // 所有帧已显示
{
// nothing to do, no picture to display in the queue
return;
}
double last_duration, duration, delay;
frame_t *vp, *lastvp;
/* dequeue the picture */
lastvp = frame_queue_peek_last(&is->video_frm_queue); // 上一帧:上次已显示的帧
vp = frame_queue_peek(&is->video_frm_queue); // 当前帧:当前待显示的帧
// lastvp和vp不是同一播放序列(一个seek会开始一个新播放序列),将frame_timer更新为当前时间
if (first_frame)
{
is->frame_timer = av_gettime_relative() / 1000000.0;
first_frame = false;
}
// 暂停处理:不停播放上一帧图像
if (is->paused)
goto display;
/* compute nominal last_duration */
last_duration = vp_duration(is, lastvp, vp); // 上一帧播放时长:vp->pts - lastvp->pts
delay = compute_target_delay(last_duration, is); // 根据视频时钟和同步时钟的差值,计算delay值
// av_log(NULL, AV_LOG_DEBUG, "%f,%f\n", last_duration, delay);
time= av_gettime_relative()/1000000.0;
// 当前帧播放时刻(is->frame_timer+delay)大于当前时刻(time),表示播放时刻未到
if (time < is->frame_timer + delay) {
// 播放时刻未到,则更新刷新时间remaining_time为当前时刻到下一播放时刻的时间差
*remaining_time = FFMIN(is->frame_timer + delay - time, *remaining_time);
// 播放时刻未到,则不播放,直接返回
return;
}
// 更新frame_timer值
is->frame_timer += delay;
// 校正frame_timer值:若frame_timer落后于当前系统时间太久(超过最大同步域值),则更新为当前系统时间
if (delay > 0 && time - is->frame_timer > AV_SYNC_THRESHOLD_MAX)
{
is->frame_timer = time;
}
SDL_LockMutex(is->video_frm_queue.mutex);
if (!isnan(vp->pts))
{
// av_log(NULL, AV_LOG_DEBUG, "update ts from %f to %f\n", is->video_clk.pts, vp->pts);
update_video_pts(is, vp->pts, vp->pos, vp->serial); // 更新视频时钟:时间戳、时钟时间
}
SDL_UnlockMutex(is->video_frm_queue.mutex);
// 是否要丢弃未能及时播放的视频帧
if (frame_queue_nb_remaining(&is->video_frm_queue) > 1) // 队列中未显示帧数>1(只有一帧则不考虑丢帧)
{
frame_t *nextvp = frame_queue_peek_next(&is->video_frm_queue); // 下一帧:下一待显示的帧
duration = vp_duration(is, vp, nextvp); // 当前帧vp播放时长 = nextvp->pts - vp->pts
// 当前帧vp未能及时播放,即下一帧播放时刻(is->frame_timer+duration)小于当前系统时刻(time)
if (time > is->frame_timer + duration)
{
frame_queue_next(&is->video_frm_queue); // 删除上一帧已显示帧,即删除lastvp,读指针加1(从lastvp更新到vp)
goto retry;
}
}
// 删除当前读指针元素,读指针+1。若未丢帧,读指针从lastvp更新到vp;若有丢帧,读指针从vp更新到nextvp
frame_queue_next(&is->video_frm_queue);
display:
video_display(is); // 取出当前帧vp(若有丢帧是nextvp)进行播放
}
static int video_playing_thread(void *arg)
{
player_stat_t *is = (player_stat_t *)arg;
double remaining_time = 0.0;
while (1)
{
if (remaining_time > 0.0)
{
av_usleep((unsigned)(remaining_time * 1000000.0));
}
remaining_time = REFRESH_RATE;
// 立即显示当前帧,或延时remaining_time后再显示
video_refresh(is, &remaining_time);
if (is->abort_request) {
av_log(NULL, AV_LOG_DEBUG, "playing thread receive quit\n");
break;
}
}
return 0;
}
static int open_video_playing(void *arg)
{
player_stat_t *is = (player_stat_t *)arg;
int ret;
int buf_size;
uint8_t* buffer = NULL;
is->p_frm_yuv = av_frame_alloc();
if (is->p_frm_yuv == NULL)
{
av_log(NULL, AV_LOG_FATAL, "av_frame_alloc() for p_frm_raw failed\n");
return -1;
}
// 为AVFrame.*data[]手工分配缓冲区,用于存储sws_scale()中目的帧视频数据
buf_size = av_image_get_buffer_size(AV_PIX_FMT_YUV420P,
is->p_vcodec_ctx->width,
is->p_vcodec_ctx->height,
1
);
// buffer将作为p_frm_yuv的视频数据缓冲区
buffer = (uint8_t *)av_malloc(buf_size);
if (buffer == NULL)
{
av_log(NULL, AV_LOG_FATAL, "av_malloc() for buffer failed\n");
return -1;
}
// 使用给定参数设定p_frm_yuv->data和p_frm_yuv->linesize
ret = av_image_fill_arrays(is->p_frm_yuv->data, // dst data[]
is->p_frm_yuv->linesize, // dst linesize[]
buffer, // src buffer
AV_PIX_FMT_YUV420P, // pixel format
is->p_vcodec_ctx->width, // width
is->p_vcodec_ctx->height,// height
1 // align
);
if (ret < 0)
{
av_log(NULL, AV_LOG_FATAL, "av_image_fill_arrays() failed %d\n", ret);
return -1;;
}
// A2. 初始化SWS context,用于后续图像转换
// 此处第6个参数使用的是FFmpeg中的像素格式,对比参考注释B3
// FFmpeg中的像素格式AV_PIX_FMT_YUV420P对应SDL中的像素格式SDL_PIXELFORMAT_IYUV
// 如果解码后得到图像的不被SDL支持,不进行图像转换的话,SDL是无法正常显示图像的
// 如果解码后得到图像的能被SDL支持,则不必进行图像转换
// 这里为了编码简便,统一转换为SDL支持的格式AV_PIX_FMT_YUV420P==>SDL_PIXELFORMAT_IYUV
is->img_convert_ctx = sws_getContext(is->p_vcodec_ctx->width, // src width
is->p_vcodec_ctx->height, // src height
is->p_vcodec_ctx->pix_fmt, // src format
is->p_vcodec_ctx->width, // dst width
is->p_vcodec_ctx->height, // dst height
AV_PIX_FMT_YUV420P, // dst format
SWS_BICUBIC, // flags
NULL, // src filter
NULL, // dst filter
NULL // param
);
if (is->img_convert_ctx == NULL)
{
av_log(NULL, AV_LOG_FATAL, "sws_getContext() failed\n");
return -1;
}
// SDL_Rect赋值
is->sdl_video.rect.x = 0;
is->sdl_video.rect.y = 0;
is->sdl_video.rect.w = is->p_vcodec_ctx->width;
is->sdl_video.rect.h = is->p_vcodec_ctx->height;
// 1. 创建SDL窗口,SDL 2.0支持多窗口
// SDL_Window即运行程序后弹出的视频窗口,同SDL 1.x中的SDL_Surface
is->sdl_video.window = SDL_CreateWindow("simple ffplayer",
SDL_WINDOWPOS_UNDEFINED,// 不关心窗口X坐标
SDL_WINDOWPOS_UNDEFINED,// 不关心窗口Y坐标
is->sdl_video.rect.w,
is->sdl_video.rect.h,
SDL_WINDOW_OPENGL | SDL_WINDOW_RESIZABLE
);
if (is->sdl_video.window == NULL)
{
av_log(NULL, AV_LOG_FATAL, "SDL_CreateWindow() failed: %s\n", SDL_GetError());
return -1;
}
// 2. 创建SDL_Renderer
// SDL_Renderer:渲染器
is->sdl_video.renderer = SDL_CreateRenderer(is->sdl_video.window, -1, 0);
if (is->sdl_video.renderer == NULL)
{
av_log(NULL, AV_LOG_FATAL, "SDL_CreateRenderer() failed: %s\n", SDL_GetError());
return -1;
}
// 3. 创建SDL_Texture
// 一个SDL_Texture对应一帧YUV数据,同SDL 1.x中的SDL_Overlay
is->sdl_video.texture = SDL_CreateTexture(is->sdl_video.renderer,
SDL_PIXELFORMAT_IYUV,
SDL_TEXTUREACCESS_STREAMING,
is->sdl_video.rect.w,
is->sdl_video.rect.h
);
if (is->sdl_video.texture == NULL)
{
av_log(NULL, AV_LOG_FATAL, "SDL_CreateTexture() failed: %s\n", SDL_GetError());
return -1;
}
is->video_ply_tid = SDL_CreateThread(video_playing_thread, "video playing thread", is);
return 0;
}
static int open_video_stream(player_stat_t *is)
{
AVCodecParameters* p_codec_par = NULL;
AVCodec* p_codec = NULL;
AVCodecContext* p_codec_ctx = NULL;
AVStream *p_stream = is->p_video_stream;
int ret;
// 1. 为视频流构建解码器AVCodecContext
// 1.1 获取解码器参数AVCodecParameters
p_codec_par = p_stream->codecpar;
// 1.2 获取解码器
p_codec = avcodec_find_decoder(p_codec_par->codec_id);
if (p_codec == NULL)
{
av_log(NULL, AV_LOG_FATAL, "Cann't find codec!\n");
return -1;
}
// 1.3 构建解码器AVCodecContext
// 1.3.1 p_codec_ctx初始化:分配结构体,使用p_codec初始化相应成员为默认值
p_codec_ctx = avcodec_alloc_context3(p_codec);
if (p_codec_ctx == NULL)
{
av_log(NULL, AV_LOG_FATAL, "avcodec_alloc_context3() failed\n");
return -1;
}
// 1.3.2 p_codec_ctx初始化:p_codec_par ==> p_codec_ctx,初始化相应成员
ret = avcodec_parameters_to_context(p_codec_ctx, p_codec_par);
if (ret < 0)
{
av_log(NULL, AV_LOG_FATAL, "avcodec_parameters_to_context() failed\n");
return -1;
}
// 1.3.3 p_codec_ctx初始化:使用p_codec初始化p_codec_ctx,初始化完成
ret = avcodec_open2(p_codec_ctx, p_codec, NULL);
if (ret < 0)
{
av_log(NULL, AV_LOG_FATAL, "avcodec_open2() failed %d\n", ret);
return -1;
}
is->p_vcodec_ctx = p_codec_ctx;
// 2. 创建视频解码线程
is->video_dec_tid = SDL_CreateThread(video_decode_thread, "video decode thread", is);
// 3. record default display size
is->sdl_video.width = p_codec_par->width;
is->sdl_video.height = p_codec_par->height;
is->sdl_video.height_width_ratio = (double)(1.0 * is->sdl_video.height) / is->sdl_video.width;
is->sdl_video.window_width = p_codec_par->width;
is->sdl_video.window_height = p_codec_par->height;
return 0;
}
int open_video(player_stat_t *is)
{
int ret;
// 初始化视频解码器,启动视频解码线程
ret = open_video_stream(is);
if (ret < 0) {
return ret;
}
// 初始化图像转换上下文和渲染器,启动视频显示线程
ret = open_video_playing(is);
if (ret < 0) {
return ret;
}
return 0;
}