2

I am trying to encode a video in H.264 with libavcodec (version 3.4.6). It works when I use the software encoder "libx264", however it does not when I try to use the hardware encoder of my Intel cpu with VAAPI. Hardware encoding with ffmpeg via VAAPI works from the command line (using commands from here).

Apparently, there is no example or tutorial how to encode with VAAPI and libav*. I read through those of ffmpeg's examples that cover a related use case (hardware decoding, software encoding, muxing) and tried to adapt them accordingly.

When I setup the VAAPI encoder, avcodec_open2() returns AVERROR(EINVAL) (-22) and it prints the following error message to the console:

Mismatching AVCodecContext.pix_fmt and AVHWFramesContext.format

You can find it at the end of Encoder::setupEncoder() in my code. What am I missing?

Below is my code, which is split into three files:

encoder.h

#ifndef ENCODER_H
#define ENCODER_H
#include <cassert>

extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavutil/opt.h>
#include <libavutil/hwcontext.h>
}

class Encoder
{
public:
    Encoder(const bool hwAccel);
    void addFrame(AVFrame* frame);
    void flush();

    static constexpr int s_width = 640;
    static constexpr int s_height = 480;
    static constexpr int s_fps = 25;
private:
    void setup();
    void setupEncoder();
    void encodeFrame(AVFrame* frame);

    // members
    int m_frameId = 1;
    const bool m_hardwareAcceleration = false;

    AVCodecContext* m_encoder = nullptr;
    AVFormatContext* m_muxer = nullptr;
    AVStream* m_avStream = nullptr;
    AVBufferRef* m_device = nullptr;

    AVFrame* m_hwFrame = nullptr;
};

#endif // ENCODER_H

encoder.cpp

#include "encoder.h"

extern "C" {

static enum AVPixelFormat get_vaapi_format(AVCodecContext*, const enum AVPixelFormat *pix_fmts)
{
    const enum AVPixelFormat *p;
    for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) {
        if (*p == AV_PIX_FMT_VAAPI)
            return *p;
    }
    fprintf(stderr, "Failed to get HW surface format.\n");
    return AV_PIX_FMT_NONE;
}

}

Encoder::Encoder(const bool hwAccel)
  : m_hardwareAcceleration(hwAccel)
{
    setup();
}
void Encoder::addFrame(AVFrame* frame)
{
    AVFrame* frameToEncode = frame;
    if(m_hardwareAcceleration) {
        assert(frame->format == AV_PIX_FMT_NV12);
        av_hwframe_transfer_data(m_hwFrame, frame, 0);
        assert(m_hwFrame->format == AV_PIX_FMT_VAAPI);
        frameToEncode = m_hwFrame;
    }

    frameToEncode->pts = m_frameId++;
    encodeFrame(frameToEncode);
}
void Encoder::flush()
{
    encodeFrame(nullptr);
    av_write_trailer(m_muxer);
}

void Encoder::setup()
{
    assert(avformat_alloc_output_context2(&m_muxer, nullptr, "matroska", nullptr) == 0);
    assert(m_muxer != nullptr);

    setupEncoder();

    m_avStream = avformat_new_stream(m_muxer, nullptr);
    assert(m_avStream != nullptr);
    m_avStream->id = m_muxer->nb_streams-1;
    m_avStream->time_base = m_encoder->time_base;

    // Some formats want stream headers to be separate.
    if(m_muxer->oformat->flags & AVFMT_GLOBALHEADER)
        m_encoder->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;

    assert(avcodec_parameters_from_context(m_avStream->codecpar, m_encoder) == 0);
    assert(avio_open(&m_muxer->pb, m_hardwareAcceleration? "hardware.mkv": "software.mkv", AVIO_FLAG_WRITE) == 0);
    assert(avformat_write_header(m_muxer, nullptr) == 0);
}
void Encoder::setupEncoder()
{
    const char* encoderName = m_hardwareAcceleration? "h264_vaapi": "libx264";
    AVCodec* videoCodec = avcodec_find_encoder_by_name(encoderName);
    m_encoder = avcodec_alloc_context3(videoCodec);
    m_encoder->bit_rate = s_width * s_height * s_fps * 2;
    m_encoder->width = s_width;
    m_encoder->height = s_height;
    m_encoder->time_base = (AVRational){1, s_fps};
    m_encoder->framerate = (AVRational){s_fps, 1};

    m_encoder->gop_size = s_fps;  // have at least 1 I-frame per second
    m_encoder->max_b_frames = 1;
    m_encoder->pix_fmt = AV_PIX_FMT_YUV420P;

    if(m_hardwareAcceleration) {
        m_encoder->pix_fmt = AV_PIX_FMT_VAAPI;
        m_encoder->get_format = get_vaapi_format;

        assert(av_hwdevice_ctx_create(&m_device, AV_HWDEVICE_TYPE_VAAPI, "/dev/dri/renderD128", nullptr, 0) == 0);

        AVHWDeviceContext* deviceCtx = (AVHWDeviceContext*) m_device->data;
        assert(deviceCtx->type == AV_HWDEVICE_TYPE_VAAPI);

        m_encoder->hw_device_ctx = av_hwframe_ctx_alloc(m_device);
        m_encoder->hw_frames_ctx = av_buffer_ref(m_device);
        m_hwFrame = av_frame_alloc();
        av_hwframe_get_buffer(m_encoder->hw_device_ctx, m_hwFrame, 0);
    }

    assert(avcodec_open2(m_encoder, videoCodec, nullptr) == 0);  // <-- returns -22 (EINVAL) for hardware encoder

    m_muxer->video_codec_id = videoCodec->id;
    m_muxer->video_codec = videoCodec;
}
void Encoder::encodeFrame(AVFrame* frame)
{
    assert(avcodec_send_frame(m_encoder, frame) == 0);

    AVPacket packet;
    av_init_packet(&packet);
    int ret = 0;
    while(ret >= 0) {
        ret = avcodec_receive_packet(m_encoder, &packet);
        if(ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
            return;  // nothing to write
        }
        assert(ret >= 0);

        av_packet_rescale_ts(&packet, m_encoder->time_base, m_avStream->time_base);
        packet.stream_index = m_avStream->index;
        av_interleaved_write_frame(m_muxer, &packet);
        av_packet_unref(&packet);
    }
}

main.cpp

#include "encoder.h"

AVFrame* createFrame(const int format)
{
    AVFrame* frame = av_frame_alloc();
    frame->format = format;
    frame->width  = Encoder::s_width;
    frame->height = Encoder::s_height;
    assert(av_frame_get_buffer(frame, 0) == 0);
    assert(av_frame_make_writable(frame) == 0);

    // Y
    for(int y=0; y<frame->height; y++) {
        for(int x=0; x<frame->width; x++) {
            frame->data[0][y * frame->linesize[0] + x] = 0;
        }
    }

    // CbCr
    const int widthCbCr  = frame->width / 2;
    const int heightCbCr = frame->height / 2;

    if(format == AV_PIX_FMT_YUV420P) {
        for(int y=0; y<heightCbCr; y++) {
            for(int x=0; x<widthCbCr; x++) {
                frame->data[1][y * frame->linesize[1] + x] = 0;  // Cb
                frame->data[2][y * frame->linesize[2] + x] = 0;  // Cr
            }
        }
        return frame;
    }
    else if(format == AV_PIX_FMT_NV12) {
        for(int y=0; y<heightCbCr; y++) {
            for(int x=0; x<widthCbCr; x++) {
                frame->data[1][y * frame->linesize[0] + x] = 0;
            }
        }
        return frame;
    }

    return nullptr;
}

int main()
{
    av_register_all();

    AVFrame* yuv420pFrame = createFrame(AV_PIX_FMT_YUV420P);
    AVFrame* nv12Frame = createFrame(AV_PIX_FMT_NV12);

    // works well
    Encoder softwareEncoder(false);
    for(int i=0; i<100; ++i)
        softwareEncoder.addFrame(yuv420pFrame);
    softwareEncoder.flush();

    // does not work
    Encoder hardwareEncoder(true);
    for(int i=0; i<100; ++i)
        hardwareEncoder.addFrame(nv12Frame);
    hardwareEncoder.flush();

    return 0;
}

Note that I intentionally left out all kinds of free() functions and destructors to keep the code short.

gretel99
  • 21
  • 1
  • 3

3 Answers3

0

There is a large comment in the avcodec.h file where this (poorly reproduced] excerpt is located:

  • Mixing new and old function calls on the same AVCodecContext is not allowed,

  • and will result in undefined behavior.

  • Some codecs might require using the new API; using the old API will return

  • an error when calling it. All codecs support the new API.

This (and the surrounding content) suggests possible reasons you are seeing the error for one, and not the other.

Community
  • 1
  • 1
ryyker
  • 22,849
  • 3
  • 43
  • 87
  • Thanks for the quick reply! Is there a complete list of functions that are deprecated? The comment section at which you pointed me just mentions functions that are directly involved in the actual encoding process. My error already occurs in the _setup_ step. I forgot to mention that I use version 3.4.6 of libav*, which is currently distributed by Ubuntu 18.04 LTS. – gretel99 Jan 09 '20 at 16:08
  • @gretel99 - _Is there a complete list of functions that are deprecated?_ I do not know, but you can Google that as well as I can. Have you browsed through the rest of the comments. There are quite a few, and some of them may end up pointing you to other sources of information. That is a common process when using third party, or even open source libraries. – ryyker Jan 09 '20 at 16:18
0

I still don't know what is wrong with the code above. However, I got it working by debugging ffmpeg and imitating its behavior in my code. Instead of manually transferring the frame to GPU memory, ffmpeg makes use of its filtering framework.

For those interested, here is my code adapted to ffmpeg's filtering API (still without freeing any resources):

encoder.h

#ifndef ENCODER_H
#define ENCODER_H
#include <cassert>

extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavutil/opt.h>
#include <libavutil/hwcontext.h>

#include <libavfilter/avfilter.h>
#include <libavfilter/buffersink.h>
#include <libavfilter/buffersrc.h>
}

class Encoder
{
public:
    Encoder(const bool hwAccel);
    void addFrame(AVFrame* frame);
    void flush();

    static constexpr int s_width = 640;
    static constexpr int s_height = 480;
    static constexpr int s_fps = 25;
private:
    void setup();
    void setupEncoder();
    void initFilters();
    void initInputFilters(AVFilterInOut* inputs);
    void initOutputFilters(AVFilterInOut* outputs);
    void filterFrame(AVFrame* inFrame, AVFrame* outFrame);
    void encodeFrame(AVFrame* frame);

    // members
    int m_frameId = 1;
    const bool m_hardwareAcceleration = false;

    AVCodecContext* m_encoder = nullptr;
    AVFormatContext* m_muxer = nullptr;
    AVStream* m_avStream = nullptr;
    AVBufferRef* m_device = nullptr;

    AVFrame* m_hwFrame = nullptr;

    AVFilterGraph* m_filterGraph = nullptr;
    AVFilterContext* m_bufferSrc = nullptr;
    AVFilterContext* m_bufferSink = nullptr;
    AVFilterContext* m_formatFilter = nullptr;
};

#endif // ENCODER_H

encoder.c

#include "encoder.h"

extern "C" {

static enum AVPixelFormat get_vaapi_format(AVCodecContext*, const enum AVPixelFormat *pix_fmts)
{
    const enum AVPixelFormat *p;
    for (p = pix_fmts; *p != AV_PIX_FMT_NONE; p++) {
        if (*p == AV_PIX_FMT_VAAPI)
            return *p;
    }
    fprintf(stderr, "Failed to get HW surface format.\n");
    return AV_PIX_FMT_NONE;
}

}

Encoder::Encoder(const bool hwAccel)
  : m_hardwareAcceleration(hwAccel)
{
    setup();
}
void Encoder::addFrame(AVFrame* frame)
{
    AVFrame* frameToEncode = frame;
    if(m_hardwareAcceleration) {
        filterFrame(frame, m_hwFrame);
        assert(m_hwFrame->format == AV_PIX_FMT_VAAPI);
        frameToEncode = m_hwFrame;
    }

    frameToEncode->pts = m_frameId++;
    encodeFrame(frameToEncode);
}
void Encoder::flush()
{
    encodeFrame(nullptr);
    av_write_trailer(m_muxer);
}

void Encoder::setup()
{
    assert(avformat_alloc_output_context2(&m_muxer, nullptr, "matroska", nullptr) == 0);
    assert(m_muxer != nullptr);

    setupEncoder();

    m_avStream = avformat_new_stream(m_muxer, nullptr);
    assert(m_avStream != nullptr);
    m_avStream->id = m_muxer->nb_streams-1;
    m_avStream->time_base = m_encoder->time_base;

    // Some formats want stream headers to be separate.
    if(m_muxer->oformat->flags & AVFMT_GLOBALHEADER)
        m_encoder->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;

    assert(avcodec_parameters_from_context(m_avStream->codecpar, m_encoder) == 0);
    assert(avio_open(&m_muxer->pb, m_hardwareAcceleration? "hardware.mkv": "software.mkv", AVIO_FLAG_WRITE) == 0);
    assert(avformat_write_header(m_muxer, nullptr) == 0);
}
void Encoder::setupEncoder()
{
    const char* encoderName = m_hardwareAcceleration? "h264_vaapi": "libx264";
    AVCodec* videoCodec = avcodec_find_encoder_by_name(encoderName);
    m_encoder = avcodec_alloc_context3(videoCodec);
    m_encoder->bit_rate = s_width * s_height * s_fps * 2;
    m_encoder->width = s_width;
    m_encoder->height = s_height;
    m_encoder->time_base = (AVRational){1, s_fps};
    m_encoder->framerate = (AVRational){s_fps, 1};

    m_encoder->gop_size = s_fps;  // have at least 1 I-frame per second
    m_encoder->max_b_frames = 1;
    m_encoder->pix_fmt = AV_PIX_FMT_YUV420P;

    if(m_hardwareAcceleration) {
        m_encoder->pix_fmt = AV_PIX_FMT_VAAPI;
        m_encoder->get_format = get_vaapi_format;

        assert(av_hwdevice_ctx_create(&m_device, AV_HWDEVICE_TYPE_VAAPI, "/dev/dri/renderD128", nullptr, 0) == 0);
        const AVHWDeviceContext* deviceCtx = (AVHWDeviceContext*) m_device->data;
        assert(deviceCtx->type == AV_HWDEVICE_TYPE_VAAPI);

        initFilters();

        m_encoder->hw_device_ctx = nullptr;
        m_encoder->hw_frames_ctx = av_buffer_ref(av_buffersink_get_hw_frames_ctx(m_bufferSink));
    }

    assert(avcodec_open2(m_encoder, videoCodec, nullptr) == 0);

    if(m_hardwareAcceleration) {
        m_encoder->hw_device_ctx = av_hwframe_ctx_alloc(m_device);
        m_hwFrame = av_frame_alloc();
        av_hwframe_get_buffer(m_encoder->hw_device_ctx, m_hwFrame, 0);
    }

    m_muxer->video_codec_id = videoCodec->id;
    m_muxer->video_codec = videoCodec;
}
void Encoder::initFilters()
{
    AVFilterInOut* inputs = nullptr;
    AVFilterInOut* outputs = nullptr;
    m_filterGraph = avfilter_graph_alloc();
    assert(avfilter_graph_parse2(m_filterGraph, "format=nv12,hwupload", &inputs, &outputs) == 0);

    for(unsigned i=0; i<m_filterGraph->nb_filters; i++) {
        m_filterGraph->filters[i]->hw_device_ctx = av_buffer_ref(m_device);
        assert(m_filterGraph->filters[i]->hw_device_ctx != nullptr);
    }

    initInputFilters(inputs);
    initOutputFilters(outputs);

    assert(avfilter_graph_config(m_filterGraph, nullptr) == 0);
}
void Encoder::initInputFilters(AVFilterInOut* inputs)
{
    assert(inputs != nullptr);
    assert(inputs->next == nullptr);

    char args[512];
    snprintf(args, sizeof(args),
            "video_size=%dx%d:pix_fmt=%d:time_base=%d/%d:pixel_aspect=%d/%d",
            s_width, s_height, AV_PIX_FMT_YUV420P,
            1, s_fps,
            1, 1);

    assert(avfilter_graph_create_filter(&m_bufferSrc, avfilter_get_by_name("buffer"), "in",
                                        args, nullptr, m_filterGraph) == 0);
    assert(avfilter_link(m_bufferSrc, 0, inputs->filter_ctx, inputs->pad_idx) == 0);
}
void Encoder::initOutputFilters(AVFilterInOut* outputs)
{
    assert(outputs != nullptr);
    assert(outputs->next == nullptr);

    assert(avfilter_graph_create_filter(&m_bufferSink, avfilter_get_by_name("buffersink"), "out",
                                       nullptr, nullptr, m_filterGraph) == 0);
    assert(avfilter_graph_create_filter(&m_formatFilter, avfilter_get_by_name("format"), "format",
                                       "vaapi_vld", nullptr, m_filterGraph) == 0);
    assert(avfilter_link(outputs->filter_ctx, outputs->pad_idx, m_formatFilter, 0) == 0);
    assert(avfilter_link(m_formatFilter, 0, m_bufferSink, 0) == 0);
}
void Encoder::filterFrame(AVFrame* inFrame, AVFrame* outFrame)
{
    assert(av_buffersrc_add_frame_flags(m_bufferSrc, inFrame, AV_BUFFERSRC_FLAG_KEEP_REF) == 0);
    assert(av_buffersink_get_frame(m_bufferSink, outFrame) == 0);
}
void Encoder::encodeFrame(AVFrame* frame)
{
    assert(avcodec_send_frame(m_encoder, frame) == 0);

    AVPacket packet;
    av_init_packet(&packet);
    int ret = 0;
    while(ret >= 0) {
        ret = avcodec_receive_packet(m_encoder, &packet);
        if(ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
            return;  // nothing to write
        }
        assert(ret >= 0);

        av_packet_rescale_ts(&packet, m_encoder->time_base, m_avStream->time_base);
        packet.stream_index = m_avStream->index;
        av_interleaved_write_frame(m_muxer, &packet);
        av_packet_unref(&packet);
    }
}
gretel99
  • 21
  • 1
  • 3
0

Maybe a bit late, but for anyone still interested, the original code (without filtering API) actually works, but gretel99 missed out on doing av_hwframe_ctx_alloc, see the FFMmpeg example in doc/examples/vaapi_transcode.c.

Here's a fixed Encoder::setupEncoder() version with comments what need's to be changed. It's not only the missing hwframe buffer allocation, at least for my VAAPI driver rc_mode=CQP is required and global_quality must be set to shut up a warning.

void Encoder::setupEncoder()
{
    const char* encoderName = m_hardwareAcceleration? "h264_vaapi": "libx264";
    AVCodec* videoCodec = avcodec_find_encoder_by_name(encoderName);
    m_encoder = avcodec_alloc_context3(videoCodec);
    m_encoder->bit_rate = s_width * s_height * s_fps * 2;
    m_encoder->width = s_width;
    m_encoder->height = s_height;
    m_encoder->time_base = (AVRational){1, s_fps};
    m_encoder->framerate = (AVRational){s_fps, 1};

    m_encoder->gop_size = s_fps;  // have at least 1 I-frame per second
    m_encoder->max_b_frames = 1;
    m_encoder->pix_fmt = AV_PIX_FMT_YUV420P;

    if(m_hardwareAcceleration) {
        m_encoder->pix_fmt = AV_PIX_FMT_VAAPI;
        m_encoder->get_format = get_vaapi_format;

        assert(av_hwdevice_ctx_create(&m_device, AV_HWDEVICE_TYPE_VAAPI, "/dev/dri/renderD128", nullptr, 0) == 0);

        AVHWDeviceContext* deviceCtx = (AVHWDeviceContext*) m_device->data;
        assert(deviceCtx->type == AV_HWDEVICE_TYPE_VAAPI);

        // Fix error: Mismatching AVCodecContext.pix_fmt and AVHWFramesContext.format
        // See doc/examples/vaapi_transcode.c "set_hwframe_ctx()"
        {
            AVBufferRef *hw_frames_ref;
            AVHWFramesContext *frames_ctx = NULL;

            assert((hw_frames_ref = av_hwframe_ctx_alloc(m_device)) != nullptr);
            frames_ctx = (AVHWFramesContext *)(hw_frames_ref->data);
            frames_ctx->format    = AV_PIX_FMT_VAAPI;
            frames_ctx->sw_format = AV_PIX_FMT_NV12;
            frames_ctx->width     = s_width;
            frames_ctx->height    = s_height;
            frames_ctx->initial_pool_size = 20;
            assert(av_hwframe_ctx_init(hw_frames_ref) == 0);
            m_encoder->hw_frames_ctx = av_buffer_ref(hw_frames_ref);
            assert(m_encoder->hw_frames_ctx != nullptr);
        }

        // Fix error: Driver does not support any RC mode compatible with selected options (supported modes: CQP).
        assert(av_opt_set(m_encoder->priv_data, "rc_mode", "CQP", AV_OPT_SEARCH_CHILDREN) == 0);

        // Fix warning, cosmetical only: No quality level set; using default (20).
        m_encoder->global_quality = 20;

        m_encoder->hw_device_ctx = av_hwframe_ctx_alloc(m_device);
        //m_encoder->hw_frames_ctx = av_buffer_ref(m_device);           // Fix: Not required, done by av_hwframe_ctx_alloc
        m_hwFrame = av_frame_alloc();
        av_hwframe_get_buffer(m_encoder->hw_frames_ctx, m_hwFrame, 0); // Fix: Must pass hw_frames_ctx, not m_encoder->hw_device_ctx
        assert(m_hwFrame != nullptr);
    }

    assert(avcodec_open2(m_encoder, videoCodec, nullptr) == 0);  // <-- returns -22 (EINVAL) for hardware encoder

    m_muxer->video_codec_id = videoCodec->id;
    m_muxer->video_codec = videoCodec;
}
nschlia
  • 1
  • 2