H264 file screenshots are JPG images, and the code passed the actual test.

#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "libavcodec/avcodec.h"
#include "libavfilter/avfilter.h"
#include "libavformat/avformat.h"
#include "libavutil/avutil.h"
#include "libavutil/ffversion.h"
#include "libswresample/swresample.h"
#include "libswscale/swscale.h"
#include "libpostproc/postprocess.h"
#include "libavutil/imgutils.h"


void saveFrame(AVFrame *pFrame, int width, int height, int iFrame);
int writeJPEG(AVFrame *pFrame, int width, int height, int iIndex);

//Save the FFmpeg decoded data to a local file
void saveFrame(AVFrame *pFrame, int width, int height, int iFrame)
{
    FILE *pFile;
    char szFilename[32];
    int y;

    // open a file
    sprintf(szFilename, "frame%d.ppm", iFrame);
    pFile = fopen(szFilename, "wb");
    if (pFile == NULL)
        return;

    //Write file header
    fprintf(pFile, "P6\\
%d %d\\
255\\
", width, height);

    //Write pixel data
    for (y = 0; y < height; y + + ){
        fwrite(pFrame->data[0] + y * pFrame->linesize[0], 1, width * 3, pFile);
    }

    // close file
    fclose(pFile);
}

int saveJPEG(const char *fileName)
{
    int videoStream = -1;
    AVCodecContext *pCodecCtx = NULL;
    AVFormatContext *pFormatCtx;
    AVCodec *pCodec;
    AVFrame *pFrame, *pFrameRGB;
    struct SwsContext *pSwsCtx;
    AVPacket packet;
    int PictureSize;
    uint8_t *outBuff;
    avformat_network_init();
    pFormatCtx=avformat_alloc_context();
    if(avformat_open_input( & amp;pFormatCtx, fileName, NULL, NULL)!=0){
        printf("av open input file failed!");
        return -1;
    }
    if ( avformat_find_stream_info(pFormatCtx, NULL ) < 0 ){
        printf("av find stream failed!");
        return -1;
    }
    for(int i = 0; i < pFormatCtx->nb_streams; i + + ){
        if(pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO){
            videoStream = i;
            break;
        }
    }
    if(videoStream == -1){
        printf("find video stream failed!\\
");
        return -1;
    }
    printf("video stream:%d",videoStream);
    pCodecCtx = avcodec_alloc_context3(NULL);
    if (!pCodecCtx) {
        return -1;
    };
    avcodec_parameters_to_context(pCodecCtx, pFormatCtx->streams[videoStream]->codecpar);
    pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
    if(pCodec == NULL){
        printf("avcodec find decorder failed!\\
");
        return -1;
    }
    if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0){
        printf("avcode open failed! ");
        return -1;
    }
    pFrame = av_frame_alloc();
    pFrameRGB = av_frame_alloc();
    if(pFrame==NULL ||pFrameRGB==NULL){
        printf("avcodec alloc frame failed!\\
");
        return -1;
    }

    // Determine image size
    PictureSize = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);

    outBuff = (uint8_t*)av_malloc(PictureSize);
    if( outBuff == NULL ) {
        printf("av malloc failed!\\
");
        return -1;
    }

// av_image_fill_arrays(pFrameRGB->data, pFrameRGB->linesize, outBuff, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height,1);

// //Set image conversion context
// pSwsCtx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
// pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_YUVJ420P,
// SWS_BICUBIC, NULL, NULL, NULL);

    int i = 0;
    while(av_read_frame(pFormatCtx, & amp;packet) >= 0 ){
        int ret = -1;
        if(packet.stream_index == videoStream ){
            ret = avcodec_send_packet(pCodecCtx, & amp;packet);
            if(ret < 0)
                continue;
            while(ret >= 0){
                ret = avcodec_receive_frame(pCodecCtx, pFrame);
                if(ret >=0 ){
                    printf("before call writeJPEG ");
                    ret = writeJPEG(pFrame, pCodecCtx->width, pCodecCtx->height, i + + );
                    if(ret == 0)
                        break;
                }
            }
        }
        av_packet_unref( & amp;packet);
        if(ret == 0)
            break;
    }
    //sws_freeContext(pSwsCtx);

    av_free(pFrame);
    av_free(pFrameRGB);
    avcodec_close(pCodecCtx);
    avformat_close_input( & amp;pFormatCtx);
    return 0;
}

int writeJPEG(AVFrame *pFrame, int width, int height, int iIndex)
{
    char out_file[20] = {0};
    printf("index is %d\\
",iIndex);
    sprintf(out_file, "out%d.jpg", iIndex);
    AVFormatContext *pFormatCtx = avformat_alloc_context();
    pFormatCtx->oformat = av_guess_format("mjpeg", NULL, NULL);
    if (avio_open( & amp;pFormatCtx->pb, out_file, AVIO_FLAG_READ_WRITE) < 0){
        printf("Couldn't open output file.");
        return -1;
    }
    AVStream *pAVStream = avformat_new_stream(pFormatCtx, 0);
    if ( pAVStream == NULL ){
        printf("avformat_new_stream failed\\
");
        return -1;
    }

    AVCodec* pCodec = avcodec_find_encoder(pFormatCtx->oformat->video_codec);
    if( !pCodec ) {
        printf("Codec not found.");
        return -1;
    }

    AVCodecContext *pCodecCtx = NULL;
    pCodecCtx = avcodec_alloc_context3(pCodec);
    if (!pCodecCtx) {
        return -1;
    }

    pCodecCtx->codec_id = pFormatCtx->oformat->video_codec;
    pCodecCtx->codec_type = AVMEDIA_TYPE_VIDEO;
    pCodecCtx->pix_fmt = AV_PIX_FMT_YUVJ420P;
    pCodecCtx->width = width;
    pCodecCtx->height = height;
    pCodecCtx->compression_level = 10;
    pCodecCtx->time_base.num = 1;
    pCodecCtx->time_base.den = 25;

    if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0){
        printf("Could not open codec. ");
        return -1;
    }

    avcodec_parameters_from_context(pAVStream->codecpar, pCodecCtx);
    avformat_write_header(pFormatCtx, NULL);

    AVPacket* pkt;
    pkt = av_packet_alloc();
    if (!pkt) {
        return -1;
    }

    int ret = avcodec_send_frame(pCodecCtx, pFrame);
    if(ret < 0){
        printf("avcodec_send_frame failed\\
");
        return -1;
    }
    while(ret >= 0){
        ret = avcodec_receive_packet(pCodecCtx, pkt);
        if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF){
            printf("avcodec_receive_packet failed\\
");
            continue;
        }
        else if (ret < 0){
            printf("Encode Error. \\
");
            continue;
        }

        ret = av_write_frame(pFormatCtx, pkt);
        av_packet_unref(pkt);
        break;
    }

    av_packet_free( & amp;pkt);
    av_write_trailer(pFormatCtx);
    avio_close(pFormatCtx->pb);

    //std::cout<<"Encode Successful. "<<std::endl;
    if (pAVStream){
        avcodec_free_context( & amp;pCodecCtx);
    }
    avformat_free_context(pFormatCtx);
    return 0;
}

int main() {
    char filePath[] = "wmapro-in-wmavoice.wmv";//File address

    int videoStreamIndex = -1;//The index in the stream sequence where the video stream is located
    int ret = 0;//Default return value

    //Required variable name and initialization
    AVFormatContext *fmtCtx=NULL;
    AVPacket *pkt =NULL;
    AVCodecContext *codecCtx=NULL;
    AVCodecParameters *avCodecPara=NULL;
    const AVCodec *codec=NULL;
    AVFrame *yuvFrame = av_frame_alloc();
    AVFrame *rgbFrame = av_frame_alloc();

    do{
        //============================ Create the AVFormatContext structure =================== ==============//
        //Assign an AVFormatContext. All operations of FFMPEG must be performed through this AVFormatContext.
        fmtCtx = avformat_alloc_context();
        //==================================== Open file========== ============================//
        if ((ret=avformat_open_input( & amp;fmtCtx, filePath, NULL, NULL)) != 0) {
            printf("cannot open video file\\
");
            break;
        }

        //================================== Get video stream information========== ==========================//
        if ((ret=avformat_find_stream_info(fmtCtx, NULL)) < 0) {
            printf("cannot retrive video info\\
");
            break;
        }

        //Loop to find the stream information contained in the video until a stream of video type is found
        //Record it and save it in the videoStreamIndex variable
        for (unsigned int i = 0; i < fmtCtx->nb_streams; i + + ) {
            if (fmtCtx->streams[ i ]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
                videoStreamIndex = i;
                break;//Exit when finding the video stream
            }
        }

        //If videoStream is -1, it means that the video stream is not found
        if (videoStreamIndex == -1) {
            printf("cannot find video stream\\
");
            break;
        }

        //Print input and output information: length, bit rate, stream format, etc.
        av_dump_format(fmtCtx, 0, filePath, 0);

        //================================== Find decoder============ =======================//
        avCodecPara = fmtCtx->streams[ videoStreamIndex ]->codecpar;
        codec = avcodec_find_decoder(avCodecPara->codec_id);
        if (codec == NULL) {
            printf("cannot find decoder\\
");
            break;
        }
        //Create decoder content based on decoder parameters
        codecCtx = avcodec_alloc_context3(codec);
        avcodec_parameters_to_context(codecCtx, avCodecPara);
        if (codecCtx == NULL) {
            printf("Cannot alloc context.");
            break;
        }

        //================================ Open the decoder ============= ======================//
        if ((ret=avcodec_open2(codecCtx, codec, NULL)) < 0) { // We don’t need to know what decoder ffmpeg uses after encapsulation
            printf("cannot open decoder\\
");
            break;
        }

        //================================ Set data conversion parameters ============ ====================//
        struct SwsContext *img_ctx = sws_getContext(
            codecCtx->width, codecCtx->height, codecCtx->pix_fmt, //Source address length and width and data format
            codecCtx->width, codecCtx->height, AV_PIX_FMT_RGB32, //Destination address length, width and data format
            SWS_BICUBIC, NULL, NULL, NULL); //Algorithm type AV_PIX_FMT_YUVJ420P AV_PIX_FMT_BGR24

        //==================================== Allocate space ========== ========================//
        //One frame image data size
        int numBytes = av_image_get_buffer_size(AV_PIX_FMT_RGB32, codecCtx->width, codecCtx->height, 1);
        unsigned char *out_buffer = (unsigned char *)av_malloc(numBytes * sizeof(unsigned char));


        //============================ Allocate AVPacket structure ================= ==============//
        int i = 0;//for frame counting
        pkt = av_packet_alloc(); //Allocate a packet
        av_new_packet(pkt, codecCtx->width * codecCtx->height); //Adjust packet data

        //The data of pFrameRGB will be automatically "associated" to the buffer in RGB format, that is, the data in pFrameRGB has changed.
        //The data in out_buffer will also change accordingly
        av_image_fill_arrays(rgbFrame->data, rgbFrame->linesize, out_buffer, AV_PIX_FMT_RGB32,
                             codecCtx->width, codecCtx->height, 1);

        //============================ Read video information================== ==============//
        while (av_read_frame(fmtCtx, pkt) >= 0) { //Read a frame of video and store the data in an AVPacket structure
            if (pkt->stream_index == videoStreamIndex){
                if (avcodec_send_packet(codecCtx, pkt) == 0){
                    while (avcodec_receive_frame(codecCtx, yuvFrame) == 0){
                        if ( + + i <= 500 & amp; & amp; i >= 455){
                            sws_scale(img_ctx,
                                      (const uint8_t* const*)yuvFrame->data,
                                      yuvFrame->linesize,
                                      0,
                                      codecCtx->height,
                                      rgbFrame->data,
                                      rgbFrame->linesize);
                            saveFrame(rgbFrame, codecCtx->width, codecCtx->height, i);
                        }
                    }
                }
            }
            av_packet_unref(pkt);//Reset the contents of pkt
        }
        printf("There are %d frames int total.\\
", i);
    }while(0);
    //============================ Release all pointers================== =============//
    av_packet_free( & amp;pkt);
    avcodec_close(codecCtx);
    avformat_close_input( & amp;fmtCtx);
    avformat_free_context(fmtCtx);
    av_frame_free( & amp;yuvFrame);
    av_frame_free( & amp;rgbFrame);

    saveJPEG("test.h264");
    return ret;
}