SDL2 draws mp4 files parsed by ffmpeg

Article directory

  • 1.FFMPEG uses the command line to convert mp4 to yuv420
  • 2.ffmpeg parses mp4 into yuv data
    • 2.1 Core API:
  • 3.SDL2 performs yuv drawing to the screen
    • 3.1 Core API
  • 4. Complete code
  • 5. Effect display
  • 6.SDL2 incident response supplement
  • 6.1 Processing method-01
  • 6.2 Processing method-02

This project adopts the producer-consumer model. The producer thread: uses ffmpeg to parse the mp4 format data into yuv frames. The consumer thread: uses sdl2 to consume the parsed yuv frames and draws them to the screen. Unfinished parts: 1. Parse audio data and synchronize with video data. 2. Add interface: pause, play button, support video forward and backward.
Reference materials and projects for learning audio and video:
playdemo_github
Thor’s csdn blog

1.FFMPEG uses the command line to convert mp4 to yuv420

ffmpeg -i input.mp4 -c:v rawvideo -pix_fmt yuv420p output.yuv

2.ffmpeg parses mp4 into yuv data

2.1 Core API:

  • av_read_frame: Read a frame of data
  • avcodec_send_packet: Send the packet to the decoder
  • avcodec_receive_frame: Take the data packet from the decoder
  • sws_scale: Format conversion, convert decoded frame data into yuv data, stored in data[0], data[1], data[2]
void readFrame()
{<!-- -->
AVPacket* avPacket = av_packet_alloc();
AVFrame* frame = av_frame_alloc();
\t
FILE* fp = fopen("F:/VS_Project/ffmpeg_demo/yuv.data","wb + ");
while (av_read_frame(formatContext, avPacket) >= 0 & amp; & amp; fp)
{<!-- -->
if (avPacket->stream_index == videoStreamIndex)
{<!-- -->
if (avcodec_send_packet(codecContext, avPacket) < 0) {<!-- -->
std::cerr << "Failed to send packet to decoder" << std::endl;
break;
}
/*Decode*/
int ret = avcodec_receive_frame(codecContext, frame);
printf("ret:%d\\
", ret);
if (ret >= 0)
{<!-- -->
ret = sws_scale(swsContext, frame->data, frame->linesize, 0, codecContext->height, yuvFrame->data, yuvFrame->linesize);
printf("sws_scale ret=%d\\
", ret);
std::lock_guard<std::mutex>lck(mtx);
isFinished = false;
memcpy(yuvBuf, yuvFrame->data[0], yuvFrame->width * yuvFrame->height);
memcpy(yuvBuf + yuvFrame->width * yuvFrame->height, yuvFrame->data[1], yuvFrame->width * yuvFrame->height / 4);
memcpy(yuvBuf + yuvFrame->width * yuvFrame->height*5/4, yuvFrame->data[2], yuvFrame->width * yuvFrame->height / 4);
isFinished = true;
condvar.notify_one();
//Save the y component
//fwrite(yuvFrame->data[0], 1, yuvFrame->width * yuvFrame->height, fp);
//Save uv component
//fwrite(yuvFrame->data[1], 1, yuvFrame->width * yuvFrame->height/4, fp);
//fwrite(yuvFrame->data[2], 1, yuvFrame->width * yuvFrame->height / 4, fp);
}
\t\t\t
}
}
fclose(fp);
av_frame_unref(yuvFrame);
av_packet_free( & amp;avPacket);
av_frame_unref(frame);
}

3.SDL2 draws yuv to the screen

3.1 Core API

  • SDL_Init
  • SDL_CreateWindow
  • SDL_CreateRenderer
  • SDL_CreateTexture
  • SDL_UpdateTexture
  • SDL_RenderCopy
  • SDL_RenderPresent
  • SDL_Delay: control frame rate
int sdl_display()
{<!-- -->
if (SDL_Init(SDL_INIT_VIDEO)) {<!-- -->
printf("sdl init failed\\
");
return -1;
}
SDL_Window* window = SDL_CreateWindow("sdl_demo", 200, 200, codecContext->width, codecContext->height, SDL_WINDOW_OPENGL | SDL_WINDOW_RESIZABLE);
if (!window) {<!-- -->
SDL_Quit();
return -1;
}
SDL_Renderer* renderer = SDL_CreateRenderer(window, -1, 0);
if (!renderer)
{<!-- -->
SDL_DestroyWindow(window);
SDL_Quit();
return -1;
}
SDL_SetRenderDrawColor(renderer, 0, 0, 0, 255);
SDL_RenderClear(renderer);
Uint32 pixformat = SDL_PIXELFORMAT_IYUV;
SDL_Texture* sdlTexture = SDL_CreateTexture(renderer, pixformat, SDL_TEXTUREACCESS_STREAMING, codecContext->width, codecContext->height);
//FILE* fp = fopen("F:/VS_Project/ffmpeg_demo/yuv.data", "rb + ");
\t
\t
while (1) {<!-- -->
//int ret = fread(yuvBuf, 1, yuvlen, fp);
//if (ret <= 0) {<!-- -->
// break;
//}
std::unique_lock<std::mutex>lck(mtx);
if (condvar.wait_for(lck, std::chrono::seconds(1), [] {<!-- -->
return isFinished;}))
{<!-- -->
isFinished = false;
SDL_UpdateTexture(sdlTexture, NULL, yuvBuf, codecContext->width);
SDL_RenderCopy(renderer, sdlTexture, NULL, NULL);
SDL_RenderPresent(renderer);
//Control frame rate 25fps
SDL_Delay(40);
}
else {<!-- -->
printf("sdl thread exit!\\
");
break;
}
}
SDL_Quit();
return 0;
}

4. Complete code

-Using two threads, producer-consumer model

#include 
#include 
#include 
#include 
#include 
#include 
#include 
#include 

#include 
#include 
extern "C" {
#include 
#include 
#include 
#include 

#include 
}
#undef main
#pragma warning(disable:4996)
AVFormatContext* formatContext = nullptr;
AVCodecContext* codecContext = nullptr;
SwsContext* swsContext = nullptr;
int videoStreamIndex = -1;
AVFrame* yuvFrame;
unsigned char* yuvBuf;
bool isReady = false;
bool isFinished = false;

std::mutex mtx;
std::condition_variable condvar;
void readFrame()
{<!-- -->
AVPacket* avPacket = av_packet_alloc();
AVFrame* frame = av_frame_alloc();
\t
FILE* fp = fopen("F:/VS_Project/ffmpeg_demo/yuv.data","wb + ");
while (av_read_frame(formatContext, avPacket) >= 0 & amp; & amp; fp)
{<!-- -->
if (avPacket->stream_index == videoStreamIndex)
{<!-- -->
if (avcodec_send_packet(codecContext, avPacket) < 0) {<!-- -->
std::cerr << "Failed to send packet to decoder" << std::endl;
break;
}
/*Decode*/
int ret = avcodec_receive_frame(codecContext, frame);
printf("ret:%d\\
", ret);
if (ret >= 0)
{<!-- -->
ret = sws_scale(swsContext, frame->data, frame->linesize, 0, codecContext->height, yuvFrame->data, yuvFrame->linesize);
printf("sws_scale ret=%d\\
", ret);
std::lock_guard<std::mutex>lck(mtx);
isFinished = false;
memcpy(yuvBuf, yuvFrame->data[0], yuvFrame->width * yuvFrame->height);
memcpy(yuvBuf + yuvFrame->width * yuvFrame->height, yuvFrame->data[1], yuvFrame->width * yuvFrame->height / 4);
memcpy(yuvBuf + yuvFrame->width * yuvFrame->height*5/4, yuvFrame->data[2], yuvFrame->width * yuvFrame->height / 4);
isFinished = true;
condvar.notify_one();
//Save the y component
//fwrite(yuvFrame->data[0], 1, yuvFrame->width * yuvFrame->height, fp);
//Save uv component
//fwrite(yuvFrame->data[1], 1, yuvFrame->width * yuvFrame->height/4, fp);
//fwrite(yuvFrame->data[2], 1, yuvFrame->width * yuvFrame->height / 4, fp);
}
\t\t\t
}
}
fclose(fp);
av_frame_unref(yuvFrame);
av_packet_free( & amp;avPacket);
av_frame_unref(frame);
}

int sdl_display()
{<!-- -->
if (SDL_Init(SDL_INIT_VIDEO)) {<!-- -->
printf("sdl init failed\\
");
return -1;
}
SDL_Window* window = SDL_CreateWindow("sdl_demo", 200, 200, codecContext->width, codecContext->height, SDL_WINDOW_OPENGL | SDL_WINDOW_RESIZABLE);
if (!window) {<!-- -->
SDL_Quit();
return -1;
}
SDL_Renderer* renderer = SDL_CreateRenderer(window, -1, 0);
if (!renderer)
{<!-- -->
SDL_DestroyWindow(window);
SDL_Quit();
return -1;
}
SDL_SetRenderDrawColor(renderer, 0, 0, 0, 255);
SDL_RenderClear(renderer);
Uint32 pixformat = SDL_PIXELFORMAT_IYUV;
SDL_Texture* sdlTexture = SDL_CreateTexture(renderer, pixformat, SDL_TEXTUREACCESS_STREAMING, codecContext->width, codecContext->height);
//FILE* fp = fopen("F:/VS_Project/ffmpeg_demo/yuv.data", "rb + ");
\t
\t
while (1) {<!-- -->
//int ret = fread(yuvBuf, 1, yuvlen, fp);
//if (ret <= 0) {<!-- -->
// break;
//}
std::unique_lock<std::mutex>lck(mtx);
if (condvar.wait_for(lck, std::chrono::seconds(1), [] {<!-- -->
return isFinished;}))
{<!-- -->
isFinished = false;
SDL_UpdateTexture(sdlTexture, NULL, yuvBuf, codecContext->width);
SDL_RenderCopy(renderer, sdlTexture, NULL, NULL);
SDL_RenderPresent(renderer);
//Control frame rate 25fps
SDL_Delay(40);
}
else {<!-- -->
printf("sdl thread exit!\\
");
break;
}
}
SDL_Quit();
return 0;
}

/*ffmpeg -i input.mp4 -c:v rawvideo -pix_fmt yuv420p output.yuv*/
int main(int argc, char* argv[])
{
/*if (argc != 2) {
std::cerr << "File name not specified" << std::endl;
return -1;
}*/
std::string filename = "F:/VS_Project/ffmpeg_demo/1.mkv";
if (avformat_open_input( & amp;formatContext, filename.c_str(), nullptr, nullptr) != 0)
{
std::cerr << "Unable to open file" << std::endl;
return -1;
}
if (avformat_find_stream_info(formatContext, nullptr) < 0) {
std::cerr << "Cannot find video stream" << std::endl;
return -1;
}
for (int i = 0; i < formatContext->nb_streams; i + + )
{
enum AVMediaType type = AVMEDIA_TYPE_VIDEO;
AVStream* st = formatContext->streams[i];
AVCodecParameters* codecpar = st->codecpar;
if (codecpar->codec_type == type)
{
\t\t\t
videoStreamIndex = i;
const AVCodec* codec = avcodec_find_decoder(codecpar->codec_id);
codecContext = avcodec_alloc_context3(codec);
avcodec_parameters_to_context(codecContext, codecpar);
avcodec_open2(codecContext, codec, nullptr);
swsContext = sws_getContext(codecContext->width, codecContext->height, codecContext->pix_fmt,
codecContext->width, codecContext->height, AV_PIX_FMT_YUV420P,
SWS_BILINEAR, nullptr, nullptr, nullptr);
std::cout << "w:" << codecpar->width << std::endl;
std::cout << "h:" << codecpar->height << std::endl;
}
}
yuvFrame = av_frame_alloc();
yuvFrame->width = codecContext->width;
yuvFrame->height = codecContext->height;
yuvFrame->format = AV_PIX_FMT_YUV420P;

int yuvlen = codecContext->width * codecContext->height * 3 / 2;
yuvBuf = new unsigned char[yuvlen];
int ret = av_frame_get_buffer(yuvFrame, 0);
if (ret < 0) {
printf("Failed to allocate buffer\\
");
return -1;
}
//sdl_init();
std::thread th1(readFrame);
std::thread th2(sdl_display);
th1.join();
th2.join();
delete[]yuvBuf;
return 0;
}

5. Effect display

6.SDL2 incident response supplement

6.1 Processing method-01

Start a refresh_video thread to generate a custom update video event: REFRESH_EVENT, and update it every 40ms. Continuously wait for the arrival of events in the while loop: SDL_WaitEvent, and update one frame of the picture when the event is received. At the same time, when a window change event is detected, SDL_GetWindowSize adjusts the window.

int refresh_video(void *opaque){<!-- -->
thread_exit=0;
while (thread_exit==0) {<!-- -->
SDL_Event event;
event.type = REFRESH_EVENT;
SDL_PushEvent( & amp;event);
SDL_Delay(40);
}
thread_exit=0;
//Break
SDL_Event event;
event.type = BREAK_EVENT;
SDL_PushEvent( & amp;event);
return 0;
}
SDL_Thread *refresh_thread = SDL_CreateThread(refresh_video,NULL,NULL);
SDL_Event event;
while(1){<!-- -->
//Wait
SDL_WaitEvent( & amp;event);
if(event.type==REFRESH_EVENT){<!-- -->
if (fread(buffer, 1, pixel_w*pixel_h*bpp/8, fp) != pixel_w*pixel_h*bpp/8){<!-- -->
// Loop
fseek(fp, 0, SEEK_SET);
fread(buffer, 1, pixel_w*pixel_h*bpp/8, fp);
}

SDL_UpdateTexture(sdlTexture, NULL, buffer, pixel_w);

//FIX: If window is resize
sdlRect.x = 0;
sdlRect.y = 0;
sdlRect.w = screen_w;
sdlRect.h = screen_h;
\t\t\t
SDL_RenderClear( sdlRenderer );
SDL_RenderCopy(sdlRenderer, sdlTexture, NULL, & amp;sdlRect);
SDL_RenderPresent( sdlRenderer );
\t\t\t
}else if(event.type==SDL_WINDOWEVENT){<!-- -->
//If Resize
SDL_GetWindowSize(screen, & amp;screen_w, & amp;screen_h);
}else if(event.type==SDL_QUIT){<!-- -->
thread_exit=1;
}else if(event.type==BREAK_EVENT){<!-- -->
break;
}
}
SDL_Quit();

6.2 Processing Method-02

Start an event loop thread, SDL_PeepEvents take out an event from the event queue, then update the event queue, and perform drawing operations.

//Playback control loop
void VideoCtl::LoopThread(VideoState *cur_stream)
{<!-- -->
    SDL_Event event;
    double incr, pos, frac;

    m_bPlayLoop = true;

    while (m_bPlayLoop)
    {<!-- -->
        double x;
        refresh_loop_wait_event(cur_stream, & amp;event);
        switch (event.type) {<!-- -->
        case SDL_KEYDOWN:
            switch (event.key.keysym.sym) {<!-- -->
            case SDLK_s: // S: Step to next frame
                step_to_next_frame(cur_stream);
                break;
            case SDLK_a:
                stream_cycle_channel(cur_stream, AVMEDIA_TYPE_AUDIO);
                break;
            case SDLK_v:
                stream_cycle_channel(cur_stream, AVMEDIA_TYPE_VIDEO);
                break;
            case SDLK_c:
                stream_cycle_channel(cur_stream, AVMEDIA_TYPE_VIDEO);
                stream_cycle_channel(cur_stream, AVMEDIA_TYPE_AUDIO);
                stream_cycle_channel(cur_stream, AVMEDIA_TYPE_SUBTITLE);
                break;
            case SDLK_t:
                stream_cycle_channel(cur_stream, AVMEDIA_TYPE_SUBTITLE);
                break;

            default:
                break;
            }
            break;
        case SDL_WINDOWEVENT:
            //Window size change event
            qDebug()<<"SDL_WINDOWEVENT "<<endl;
            switch (event.window.event) {<!-- -->
            case SDL_WINDOWEVENT_RESIZED:
                screen_width = cur_stream->width = event.window.data1;
                screen_height = cur_stream->height = event.window.data2;
            case SDL_WINDOWEVENT_EXPOSED:
                cur_stream->force_refresh = 1;
            }
            break;
        case SDL_QUIT:
        case FF_QUIT_EVENT:
            do_exit(cur_stream);
            break;
        default:
            break;
        }
    }


    do_exit(m_CurStream);
    //m_CurStream = nullptr;

}