1.首先要给yuv数据分配缓存空间
u_int8_t *video_dst_data[4];
int video_dst_linesize[4];
int video_dst_bufsize;
video_dst_bufsize=av_image_alloc(video_dst_data,video_dst_linesize,dec_ctx->width,dec_ctx->height,pixelFormat,1);
2.
// 循环读取一帧视频
while (av_read_frame(fmt_ctx, &avPacket) >= 0) {
//判断是否为视频流
if (avPacket.stream_index == videoStream->index) {
LOGE(“AVStream is VideoStream”);
got_frame = 0;
//对该帧进行解码
ret = avcodec_decode_video2(dec_ctx, frame, &got_frame, &avPacket);
if (ret < 0) {
LOGE(“Error decoding video\n ret=%d”, ret);
break;
}
if (got_frame) {
//把解码后视频帧添加到filter graph
if (av_buffersrc_add_frame_flags(buffersrc_ctx, frame, AV_BUFFERSRC_FLAG_KEEP_REF) <
0) {
LOGE(“Error while feeding the filter_graph\n”);
break;
}
//把滤波后的视频帧从filter graph取出来
ret = av_buffersink_get_frame(buffersink_ctx, filt_frame);
if (ret >= 0) {
if (filt_frame->format == AV_PIX_FMT_YUV420P) {
LOGE(“video format is yuv420p”);
/yuv数据写入yuv文件/
av_image_copy(video_dst_data, video_dst_linesize,
(const uint8_t **)(filt_frame->data), filt_frame->linesize, pixelFormat, filt_frame->width, filt_frame->height);
fwrite(video_dst_data[0],1,video_dst_bufsize,file_fd);
}
av_frame_unref(filt_frame);
}
}
}else if(avPacket.stream_index==audioStream->index){ //音频流
LOGE("AVStream is AudioStream");
got_frame = 0;
encode_audio(env,&avPacket,frame);
}
av_frame_unref(frame);
av_packet_unref(&avPacket);
}
3.由于此时写入的是yuv原始数据,所以必须要用yuvplayer.exe专用yuv播放器来播放