C语言中使用FFMPEG捕获视频流后播放视频

1.成功播放视频,但播放速度太快

在C语言中实现视频流的捕获和播放需要结合FFmpeg的解码能力和图形库的渲染功能,本质上是按帧解码并按时间间隔渲染。以下是使用FFmpeg+SDL2的实现方案:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <SDL2/SDL.h>
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libswscale/swscale.h>

#define REFRESH_EVENT (SDL_USEREVENT + 1)

SDL_Window* window;
SDL_Renderer* renderer;
SDL_Texture* texture;
AVFormatContext* fmt_ctx;
AVCodecContext* codec_ctx;
int video_stream_index;

// 视频播放线程
int video_play_thread(void* arg) {
AVPacket* pkt = av_packet_alloc();
AVFrame* frame = av_frame_alloc();
struct SwsContext* sws_ctx = NULL;

while (av_read_frame(fmt_ctx, pkt) >= 0) {
if (pkt->stream_index == video_stream_index) {
if (avcodec_send_packet(codec_ctx, pkt) == 0) {
while (avcodec_receive_frame(codec_ctx, frame) == 0) {
// 转换YUV到RGB
if (!sws_ctx) {
sws_ctx = sws_getContext(
codec_ctx->width,
codec_ctx->height,
codec_ctx->pix_fmt,
codec_ctx->width,
codec_ctx->height,
AV_PIX_FMT_RGB24,
SWS_BILINEAR,
NULL,
NULL,
NULL
);
}

AVFrame* rgb_frame = av_frame_alloc();
av_image_alloc(rgb_frame->data, rgb_frame->linesize, codec_ctx->width, codec_ctx->height, AV_PIX_FMT_RGB24, 1);
sws_scale(sws_ctx, (const uint8_t* const*)frame->data, frame->linesize, 0, frame->height, rgb_frame->data, rgb_frame->linesize);

// 更新纹理
SDL_UpdateTexture(texture, NULL, rgb_frame->data[0], rgb_frame->linesize[0]);

// 发送刷新事件
SDL_Event event;
event.type = REFRESH_EVENT;
SDL_PushEvent(&event);

// 计算帧延迟(按帧率等待)
int delay = (codec_ctx->time_base.num * 1000) / codec_ctx->time_base.den;
SDL_Delay(delay);
av_freep(&rgb_frame->data[0]);
av_frame_free(&rgb_frame);
}
}
}
av_packet_unref(pkt);
}
return 0;
}
#undef main
int main(int argc, char* argv[]) {
// 初始化SDL
if (SDL_Init(SDL_INIT_VIDEO) < 0) {
fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
return -1;
}
window = SDL_CreateWindow("Video Player", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, 640, 480, SDL_WINDOW_SHOWN);
if (!window) {
fprintf(stderr, "SDL: could not create window - exiting:%s\n", SDL_GetError());
return -1;
}
renderer = SDL_CreateRenderer(window, -1, 0);
if (!renderer) {
fprintf(stderr, "SDL: could not create renderer - exiting:%s\n", SDL_GetError());
return -1;
}

// 初始化FFmpeg
//av_register_all();
if (avformat_open_input(&fmt_ctx, "test.mp4", NULL, NULL) < 0) {
fprintf(stderr, "Could not open source file %s\n", "test.mp4");
return -1;
}
if (avformat_find_stream_info(fmt_ctx, NULL) < 0) {
fprintf(stderr, "Could not find stream information\n");
return -1;
}

// 查找视频流
for (int i = 0; i < fmt_ctx->nb_streams; i++) {
if (fmt_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
video_stream_index = i;
break;
}
}

// 初始化解码器
AVCodecParameters* codecpar = fmt_ctx->streams[video_stream_index]->codecpar;
const AVCodec* codec = avcodec_find_decoder(codecpar->codec_id);
if (!codec) {
fprintf(stderr, "Codec not found\n");
return -1;
}
codec_ctx = avcodec_alloc_context3(codec);
if (!codec_ctx) {
fprintf(stderr, "Could not allocate video codec context\n");
return -1;
}
if (avcodec_parameters_to_context(codec_ctx, codecpar) < 0) {
fprintf(stderr, "Could not copy codec parameters to codec context\n");
return -1;
}
if (avcodec_open2(codec_ctx, codec, NULL) < 0) {
fprintf(stderr, "Could not open codec\n");
return -1;
}

// 创建纹理
texture = SDL_CreateTexture(renderer, SDL_PIXELFORMAT_RGB24, SDL_TEXTUREACCESS_STREAMING, codec_ctx->width, codec_ctx->height);

// 启动播放线程
SDL_CreateThread(video_play_thread, "video_thread", NULL);

// 事件循环
SDL_Event event;
while (1) {
SDL_WaitEvent(&event);
if (event.type == REFRESH_EVENT) {
SDL_RenderClear(renderer);
SDL_RenderCopy(renderer, texture, NULL, NULL);
SDL_RenderPresent(renderer);
}
else if (event.type == SDL_QUIT) {
break;
}
}

// 资源释放
avformat_close_input(&fmt_ctx);
avcodec_free_context(&codec_ctx);
SDL_DestroyTexture(texture);
SDL_DestroyRenderer(renderer);
SDL_DestroyWindow(window);
SDL_Quit();

return 0;
}

编译命令:

1
gcc -o client ./src/client.o -lavformat -lavcodec -lavutil -lswscale -lswresample -lSDL2 -lm -pthread

实现原理说明:

播放本质:视频播放确实是按帧播放,但需要满足两个关键条件:

  • 解码速度 > 帧率要求 // 保证及时提供帧数据
  • 渲染间隔 = 帧时间间隔 // 保证流畅的视觉效果

核心流程:

1
2
3
4
5
6
7
8
graph LR 
A[视频文件] --> B[FFmpeg解封装]
B --> C[H.264/H.265数据包]
C --> D[解码器]
D --> E[YUV420P原始帧]
E --> F[转RGB格式]
F --> G[SDL纹理更新]
G --> H[屏幕渲染]

2.修复播放速度过快的问题

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165

#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libswscale/swscale.h>
#include <SDL2/SDL.h>

#define nullptr NULL

#undef main
int main(int argc, char* argv[]) {
if (argc < 2) {
printf("Usage: %s <video_file>\n", argv[0]);
return -1;
}

const char* video_file = argv[1];

// Initialize FFmpeg
avformat_network_init();

AVFormatContext* format_context = avformat_alloc_context();
if (avformat_open_input(&format_context, video_file, nullptr, nullptr) != 0) {
printf("Could not open video file.\n");
return -1;
}

if (avformat_find_stream_info(format_context, nullptr) < 0) {
printf("Could not find stream information.\n");
return -1;
}

int video_stream_index = -1;
for (unsigned int i = 0; i < format_context->nb_streams; i++) {
if (format_context->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
video_stream_index = i;
break;
}
}
if (video_stream_index == -1) {
printf("Could not find a video stream.\n");
return -1;
}

AVCodecParameters* codec_params = format_context->streams[video_stream_index]->codecpar;
AVCodec* codec = avcodec_find_decoder(codec_params->codec_id);
if (codec == nullptr) {
printf("Unsupported codec.\n");
return -1;
}

AVCodecContext* codec_context = avcodec_alloc_context3(codec);
if (avcodec_parameters_to_context(codec_context, codec_params) < 0) {
printf("Could not copy codec parameters to codec context.\n");
return -1;
}

if (avcodec_open2(codec_context, codec, nullptr) < 0) {
printf("Could not open codec.\n");
return -1;
}

// Initialize SDL
if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_TIMER) < 0) {
printf("Could not initialize SDL - %s\n", SDL_GetError());
return -1;
}

SDL_Window* window = SDL_CreateWindow(
"Video Player",
SDL_WINDOWPOS_UNDEFINED,
SDL_WINDOWPOS_UNDEFINED,
codec_context->width,
codec_context->height,
SDL_WINDOW_OPENGL
);
if (!window) {
printf("Could not create SDL window - %s\n", SDL_GetError());
return -1;
}

SDL_Renderer* renderer = SDL_CreateRenderer(window, -1, 0);
SDL_Texture* texture = SDL_CreateTexture(
renderer,
SDL_PIXELFORMAT_YV12,
SDL_TEXTUREACCESS_STREAMING,
codec_context->width,
codec_context->height
);

AVFrame* frame = av_frame_alloc();
AVFrame* frameYUV = av_frame_alloc();
uint8_t* buffer = (uint8_t*)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_YUV420P, codec_context->width, codec_context->height, 1));
av_image_fill_arrays(frameYUV->data, frameYUV->linesize, buffer, AV_PIX_FMT_YUV420P, codec_context->width, codec_context->height, 1);

struct SwsContext* sws_ctx = sws_getContext(
codec_context->width,
codec_context->height,
codec_context->pix_fmt,
codec_context->width,
codec_context->height,
AV_PIX_FMT_YUV420P,
SWS_BILINEAR,
nullptr,
nullptr,
nullptr
);

AVPacket packet;
double frame_delay = av_q2d(format_context->streams[video_stream_index]->time_base) * AV_TIME_BASE;
int64_t start_time = av_gettime();
while (av_read_frame(format_context, &packet) >= 0) {
if (packet.stream_index == video_stream_index) {
if (avcodec_send_packet(codec_context, &packet) == 0) {
while (avcodec_receive_frame(codec_context, frame) == 0) {
sws_scale(
sws_ctx,
frame->data,
frame->linesize,
0,
codec_context->height,
frameYUV->data,
frameYUV->linesize
);

SDL_UpdateYUVTexture(
texture,
nullptr,
frameYUV->data[0],
frameYUV->linesize[0],
frameYUV->data[1],
frameYUV->linesize[1],
frameYUV->data[2],
frameYUV->linesize[2]
);

SDL_RenderClear(renderer);
SDL_RenderCopy(renderer, texture, nullptr, nullptr);
SDL_RenderPresent(renderer);

int64_t current_time = av_gettime();
int64_t frame_time = frame->pts * frame_delay;
int64_t delay = frame_time - (current_time - start_time);
if (delay > 0) {
SDL_Delay(delay / 1000);
}
}
}
}
av_packet_unref(&packet);
}

// Clean up
av_free(buffer);
av_frame_free(&frameYUV);
av_frame_free(&frame);
sws_freeContext(sws_ctx);
SDL_DestroyTexture(texture);
SDL_DestroyRenderer(renderer);
SDL_DestroyWindow(window);
SDL_Quit();
avcodec_free_context(&codec_context);
avformat_close_input(&format_context);

return 0;
}