#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <stdio.h>
#include <SDL2/SDL.h>
#include <time.h> #define SFM_REFRESH_EVENT (SDL_USEREVENT + 1) int thread_exit=;
//Thread
int sfp_refresh_thread(void *opaque)
{
SDL_Event event;
while (thread_exit==) {
event.type = SFM_REFRESH_EVENT;
SDL_PushEvent(&event);
//Wait 40 ms
SDL_Delay();
}
return ;
} int main(int argc, char* argv[])
{
AVFormatContext *pFormatCtx;//格式上下文结构体
int i, videoindex;
AVCodecContext *pCodecCtx;//codec上下文
AVCodec *pCodec;//codec int screen_w=,screen_h=;
SDL_Window *screen;
SDL_Renderer* sdlRenderer;
SDL_Texture* sdlTexture;
SDL_Rect sdlRect;
SDL_Thread *video_tid;
SDL_Event event; av_register_all();//ffmpeg flow 0,注册codec
avformat_network_init();//如要打开网络流,必须运行此函数
pFormatCtx = avformat_alloc_context();//格式上下文结构体指针开空间
if(avformat_open_input(&pFormatCtx, argv[], NULL, NULL) != )//打开多媒体文件
{
printf("open file error\n");
return -;
} AVDictionary* pOptions = NULL;
if ( avformat_find_stream_info(pFormatCtx, &pOptions) < )//读取音视频数据相关信息,参数0:上下文结构体指针,参数1:option
{
return -;
}
av_dump_format(pFormatCtx, , argv[], );//调试函数,输出文件的音、视频流的基本信息 //获取视频的时长
if(pFormatCtx->duration != AV_NOPTS_VALUE)
{
int hours, mins, secs, us;
int64_t duration = pFormatCtx->duration + ;
secs = duration / AV_TIME_BASE;
us = duration % AV_TIME_BASE;
mins = secs / ;
secs %= ;
hours = mins/ ;
mins %= ;
printf("%02d:%02d:%02d.%02d\n", hours, mins, secs, ( * us) / AV_TIME_BASE);
} i = ;
int videostream = -;
printf("pFormatCtx->nb_streams=%d\n", pFormatCtx->nb_streams);
for(i=;i<pFormatCtx->nb_streams;i++)//遍历多媒体文件中的每一个流,判断是否为视频。
{
if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
{
videostream = i;
break;
}
}
printf("videostream=%d\n", videostream); if (- == videostream)
{
printf("error no video stream\n");
return;
} pCodecCtx = pFormatCtx->streams[videostream]->codec;//codec上下文指定到格式上下文中的codec pCodec = avcodec_find_decoder( pCodecCtx->codec_id );//找到一个codec,必须先调用av_register_all() if(NULL == pCodec)
{
printf("couldn't find the decode\n");
return -;
} if( avcodec_open2(pCodecCtx, pCodec, NULL) < )//初始化一个视音频编解码器的AVCodecContext
{
printf("open decode error\n");
return -;
} AVFrame *pFrame,*pFrameYUV;//Frame结构体
pFrame = av_frame_alloc();//原始帧
pFrameYUV = av_frame_alloc();//YUV帧
uint8_t *out_buffer; int num = avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
printf("num=%d\n", num); out_buffer = (uint8_t *)av_malloc(num*sizeof(uint8_t));
avpicture_fill((AVPicture *)pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);//将pFrameYUV和out_buffer联系起来(pFrame指向一段内存) AVPacket packet;//packet结构体
int ret = -;
i = ;
struct SwsContext *img_convert_ctx = NULL;//图像格式转化上下文
img_convert_ctx = sws_getContext(pCodecCtx->width,pCodecCtx->height,pCodecCtx->pix_fmt , pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);//初始化SWS,图片格式转化上下文 if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER))
{
printf( "Could not initialize SDL - %s\n", SDL_GetError());
return -;
} screen_w = pCodecCtx->width;
screen_h = pCodecCtx->height;
//SDL 2.0 Support for multiple windows
screen = SDL_CreateWindow("Simplest ffmpeg player's Window", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED,
screen_w, screen_h,
SDL_WINDOW_OPENGL); if(!screen)
{
printf("SDL: could not create window - exiting:%s\n",SDL_GetError());
return -;
} sdlRenderer = SDL_CreateRenderer(screen, -, );
sdlTexture = SDL_CreateTexture(sdlRenderer, SDL_PIXELFORMAT_IYUV, SDL_TEXTUREACCESS_STREAMING,pCodecCtx->width,pCodecCtx->height);
sdlRect.x=;
sdlRect.y=;
sdlRect.w=screen_w;
sdlRect.h=screen_h; int f1 = ;
int f2 = ;
int got_picture = -; video_tid = SDL_CreateThread(sfp_refresh_thread,NULL,NULL);
time_t t;
time(&t);
printf("begin :%s\n", ctime(&t)); while ()
{
SDL_WaitEvent(&event);
if(event.type==SFM_REFRESH_EVENT)
{
if(av_read_frame(pFormatCtx, &packet)>=)//读取码流中的音频若干帧或者视频一帧,作为packet
{
f1++;
if(packet.stream_index == videostream)//如果是视频
{
ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, &packet);//解码一帧视频数据。输入一个压缩编码的结构体AVPacket,输出一个解码后的结构体AVFrame if(ret < )
{
printf("decode error\n");
return -;
} if(got_picture)
{
//转换
sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize,
, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);//将输出结果转化成YUV SDL_UpdateYUVTexture(sdlTexture, &sdlRect,
pFrameYUV->data[], pFrameYUV->linesize[],
pFrameYUV->data[], pFrameYUV->linesize[],
pFrameYUV->data[], pFrameYUV->linesize[]); SDL_RenderClear( sdlRenderer );
SDL_RenderCopy( sdlRenderer, sdlTexture, NULL, &sdlRect);
SDL_RenderPresent( sdlRenderer ); //SDL_Delay(40); f2++; }
} av_free_packet(&packet);
}
else
{
thread_exit=;
break;
}
} } time(&t);
printf("begin :%s\n", ctime(&t)); SDL_Quit(); sws_freeContext(img_convert_ctx); free(out_buffer);
av_free(pFrameYUV); // Free the YUV frame
av_free(pFrame); // Close the codec
avcodec_close(pCodecCtx); // Close the video file
avformat_close_input(&pFormatCtx); printf("f1=%d\n", f1);
printf("f2=%d\n", f2); return ;
}