FFMPEG와 SDL을 이용한 파일 플레이어 소스 코드
// FilePlayer (ffmepg + SDL) #include <windows.h> #include <stdio.h> extern "C" { #include "libavcodec/avcodec.h" #include "libavformat/avformat.h" #include "libswscale/swscale.h" #include "sdl/SDL.h" #include "sdl/SDL_thread.h" }; int main(int argc, char* args[]) { AVFormatContext *pFormatCtx; AVCodecContext *pCodecCtx; AVCodec *pCodec; // 파일 경로 char FilePath[] ="test.mp4"; // FFMPEG 초기화 av_register_all(); avformat_network_init(); pFormatCtx = avformat_alloc_context(); if(avformat_open_input(&pFormatCtx, FilePath,NULL,NULL)!=0) { printf("Couldn't open input stream. \n"); return -1; } if(avformat_find_stream_info(pFormatCtx,NULL)<0) { printf("Could't find stream information \n"); } // viedo index 찾기 int nVidoIndex = -1; for(int i=0; i < pFormatCtx->nb_streams; i++) { if(pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) { nVidoIndex = i; break; } } if(nVidoIndex == -1) { printf("Didn't fidn a video stream.\n"); return -1; } //코덱 찾기 pCodecCtx = pFormatCtx->streams[nVidoIndex]->codec; pCodec = avcodec_find_decoder(pCodecCtx->codec_id); if(pCodecCtx==NULL) { printf("Codec not found\n"); } if(avcodec_open2(pCodecCtx, pCodec, NULL)<0) { printf("Couldn't open codec\n "); } // frame 초기화 AVFrame *pFrame, *pFramYUV; pFrame=avcodec_alloc_frame(); pFramYUV=avcodec_alloc_frame(); uint8_t *out_buffer = (uint8_t*)av_malloc(avpicture_get_size(PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height)); avpicture_fill((AVPicture *)pFramYUV, out_buffer, PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height); //SDL if(SDL_Init(SDL_INIT_VIDEO |SDL_INIT_AUDIO | SDL_INIT_TIMER)) { printf("Couldn't initialize SDL [%s]", SDL_GetError()); return -1; } int screen_w=0 , screen_h=0; SDL_Surface *screen; screen_w = pCodecCtx->width; screen_h = pCodecCtx->height; //screen = SDL_SetVideoMode(screen_w, screen_h, 0 ,0); screen = SDL_SetVideoMode(screen_w, screen_h, 0, SDL_RESIZABLE); if(!screen) { printf("SDL could not ste video mod [%s]\n", SDL_GetError()); return -1; } SDL_Overlay *bmp; bmp = SDL_CreateYUVOverlay(pCodecCtx->width, pCodecCtx->height, SDL_YV12_OVERLAY, screen); SDL_Rect rect; rect.x = 0; rect.y = 0; rect.w = screen_w; rect.h = screen_h; // file dump printf("File Info\n"); av_dump_format(pFormatCtx, 0, FilePath, 0); AVPacket *packet = (AVPacket*)av_malloc(sizeof(AVPacket)); struct SwsContext *img_convert_ctx; img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,pCodecCtx->pix_fmt,pCodecCtx->width,pCodecCtx->height,PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL); int ret = 0; int got_picture; while (av_read_frame(pFormatCtx,packet)>=0) { if(packet->stream_index==nVidoIndex) { //Decode ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet ); if(ret<0) { printf("Decode Err\n"); return -1; } if(got_picture) { sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFramYUV->data, pFramYUV->linesize); SDL_LockYUVOverlay(bmp); bmp->pixels[0]=pFramYUV->data[0]; bmp->pixels[2]=pFramYUV->data[1]; bmp->pixels[1]=pFramYUV->data[2]; bmp->pitches[0]=pFramYUV->linesize[0]; bmp->pitches[2]=pFramYUV->linesize[1]; bmp->pitches[1]=pFramYUV->linesize[2]; SDL_LockYUVOverlay(bmp); SDL_DisplayYUVOverlay(bmp, &rect); SDL_Delay(40); } } av_free_packet(packet); } return 0; }