[FFmpeg-devel] [PATCH v5 5/5] libavfilter: VAAPI surface scaler
wm4
nfxjfg at googlemail.com
Mon Feb 1 17:14:06 CET 2016
On Sat, 30 Jan 2016 22:15:04 +0000
Mark Thompson <sw at jkqxz.net> wrote:
> ---
> configure | 3 +
> libavfilter/Makefile | 1 +
> libavfilter/allfilters.c | 1 +
> libavfilter/vf_vaapi_scale.c | 709 +++++++++++++++++++++++++++++++++++++++++++
> 4 files changed, 714 insertions(+)
> create mode 100644 libavfilter/vf_vaapi_scale.c
>
> diff --git a/configure b/configure
> index 62eca15..d59bdd5 100755
> --- a/configure
> +++ b/configure
> @@ -2917,6 +2917,7 @@ tinterlace_filter_deps="gpl"
> tinterlace_merge_test_deps="tinterlace_filter"
> tinterlace_pad_test_deps="tinterlace_filter"
> uspp_filter_deps="gpl avcodec"
> +vaapi_scale_filter_deps="vaapi_recent VAProcPipelineParameterBuffer"
> vidstabdetect_filter_deps="libvidstab"
> vidstabtransform_filter_deps="libvidstab"
> zmq_filter_deps="libzmq"
> @@ -5384,6 +5385,7 @@ check_type "va/va.h" "VAPictureParameterBufferHEVC"
> check_type "va/va.h" "VADecPictureParameterBufferVP9"
> check_type "va/va.h" "VAEncPictureParameterBufferH264"
> check_type "va/va.h" "VAEncPictureParameterBufferHEVC"
> +check_type "va/va.h" "VAProcPipelineParameterBuffer"
>
> check_type "vdpau/vdpau.h" "VdpPictureInfoHEVC"
>
> @@ -6135,6 +6137,7 @@ enabled showspectrum_filter && prepend avfilter_deps "avcodec"
> enabled smartblur_filter && prepend avfilter_deps "swscale"
> enabled spectrumsynth_filter && prepend avfilter_deps "avcodec"
> enabled subtitles_filter && prepend avfilter_deps "avformat avcodec"
> +enabled vaapi_scale_filter && prepend avfilter_deps "avcodec"
> enabled uspp_filter && prepend avfilter_deps "avcodec"
>
> enabled lavfi_indev && prepend avdevice_deps "avfilter"
> diff --git a/libavfilter/Makefile b/libavfilter/Makefile
> index b93e5f2..9019ef1 100644
> --- a/libavfilter/Makefile
> +++ b/libavfilter/Makefile
> @@ -248,6 +248,7 @@ OBJS-$(CONFIG_TRANSPOSE_FILTER) += vf_transpose.o
> OBJS-$(CONFIG_TRIM_FILTER) += trim.o
> OBJS-$(CONFIG_UNSHARP_FILTER) += vf_unsharp.o
> OBJS-$(CONFIG_USPP_FILTER) += vf_uspp.o
> +OBJS-$(CONFIG_VAAPI_SCALE_FILTER) += vf_vaapi_scale.o
> OBJS-$(CONFIG_VECTORSCOPE_FILTER) += vf_vectorscope.o
> OBJS-$(CONFIG_VFLIP_FILTER) += vf_vflip.o
> OBJS-$(CONFIG_VIDSTABDETECT_FILTER) += vidstabutils.o vf_vidstabdetect.o
> diff --git a/libavfilter/allfilters.c b/libavfilter/allfilters.c
> index 1d48970..c06a4ce 100644
> --- a/libavfilter/allfilters.c
> +++ b/libavfilter/allfilters.c
> @@ -268,6 +268,7 @@ void avfilter_register_all(void)
> REGISTER_FILTER(TRIM, trim, vf);
> REGISTER_FILTER(UNSHARP, unsharp, vf);
> REGISTER_FILTER(USPP, uspp, vf);
> + REGISTER_FILTER(VAAPI_SCALE, vaapi_scale, vf);
> REGISTER_FILTER(VECTORSCOPE, vectorscope, vf);
> REGISTER_FILTER(VFLIP, vflip, vf);
> REGISTER_FILTER(VIDSTABDETECT, vidstabdetect, vf);
> diff --git a/libavfilter/vf_vaapi_scale.c b/libavfilter/vf_vaapi_scale.c
> new file mode 100644
> index 0000000..25bcd4e
> --- /dev/null
> +++ b/libavfilter/vf_vaapi_scale.c
> @@ -0,0 +1,709 @@
> +/*
> + * VAAPI converter (scaling and colour conversion).
> + *
> + * Copyright (C) 2016 Mark Thompson <mrt at jkqxz.net>
> + *
> + * This file is part of FFmpeg.
> + *
> + * FFmpeg is free software; you can redistribute it and/or
> + * modify it under the terms of the GNU Lesser General Public
> + * License as published by the Free Software Foundation; either
> + * version 2.1 of the License, or (at your option) any later version.
> + *
> + * FFmpeg is distributed in the hope that it will be useful,
> + * but WITHOUT ANY WARRANTY; without even the implied warranty of
> + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
> + * Lesser General Public License for more details.
> + *
> + * You should have received a copy of the GNU Lesser General Public
> + * License along with FFmpeg; if not, write to the Free Software
> + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
> + */
> +
> +#include "avfilter.h"
> +#include "formats.h"
> +#include "internal.h"
> +
> +#include "libavutil/avassert.h"
> +#include "libavutil/opt.h"
> +#include "libavutil/pixdesc.h"
> +#include "libavcodec/vaapi_support.h"
> +
> +#include <va/va_vpp.h>
> +
> +typedef struct VAAPIScaleContext {
> + const AVClass *class;
> +
> + int pipeline_initialised;
> + int input_initialised;
> + int output_initialised;
> +
> + AVVAAPIHardwareContext *hardware_context;
> + AVVAAPIPipelineConfig pipeline_config;
> + AVVAAPIPipelineContext pipeline;
> +
> + int input_is_vaapi;
> + int output_is_vaapi;
> + AVVAAPISurfaceConfig input_config;
> + AVVAAPISurfacePool input_pool;
> + AVVAAPISurfaceConfig output_config;
> + AVVAAPISurfacePool output_pool;
> +
> + int output_width;
> + int output_height;
> +
> + struct {
> + int64_t hardware_context;
> + int output_size[2];
> +
> + int force_vaapi_in;
> + int force_vaapi_out;
> + } options;
> +
> +} VAAPIScaleContext;
> +
> +
> +static int vaapi_scale_query_formats(AVFilterContext *avctx)
> +{
> + VAAPIScaleContext *ctx = avctx->priv;
> + VAStatus vas;
> + VAConfigID config_id;
> + VASurfaceAttrib *attr_list;
> + int i, attr_count;
> + unsigned int fourcc;
> + enum AVPixelFormat pix_fmt, pix_fmt_list[16];
> + int pix_fmt_count, err;
> +
> + // Always support opaque VAAPI surfaces for both input and output.
> + pix_fmt_list[0] = AV_PIX_FMT_VAAPI;
> + pix_fmt_count = 1;
> +
> + av_vaapi_lock_hardware_context(ctx->hardware_context);
> +
> + // Create a temporary VideoProc config to query the image formats.
> + vas = vaCreateConfig(ctx->hardware_context->display,
> + VAProfileNone, VAEntrypointVideoProc,
> + 0, 0, &config_id);
> + if(vas != VA_STATUS_SUCCESS) {
> + av_log(ctx, AV_LOG_ERROR, "Failed to create dummy pipeline "
> + "configuration: %d (%s).\n", vas, vaErrorStr(vas));
> + err = AVERROR_EXTERNAL;
> + goto fail;
> + }
> +
> + attr_count = 0;
> + vas = vaQuerySurfaceAttributes(ctx->hardware_context->display, config_id,
> + 0, &attr_count);
> + if(vas != VA_STATUS_SUCCESS) {
> + av_log(ctx, AV_LOG_ERROR, "Failed to query surface attributes: "
> + "%d (%s).\n", vas, vaErrorStr(vas));
> + err = AVERROR_EXTERNAL;
> + goto fail_config;
> + }
> +
> + attr_list = av_calloc(attr_count, sizeof(VASurfaceAttrib));
> + if(!attr_list) {
> + err = AVERROR(ENOMEM);
> + goto fail_config;
> + }
> +
> + vas = vaQuerySurfaceAttributes(ctx->hardware_context->display, config_id,
> + attr_list, &attr_count);
> + if(vas != VA_STATUS_SUCCESS) {
> + av_log(ctx, AV_LOG_ERROR, "Failed to query surface attributes: "
> + "%d (%s).\n", vas, vaErrorStr(vas));
> + av_free(attr_list);
> + err = AVERROR_EXTERNAL;
> + goto fail_config;
> + }
> +
> + for(i = 0; i < attr_count; i++) {
> + if(attr_list[i].type == VASurfaceAttribPixelFormat) {
> + fourcc = attr_list[i].value.value.i;
> + pix_fmt = av_vaapi_pix_fmt(fourcc);
> + if(pix_fmt != AV_PIX_FMT_NONE) {
> + av_log(ctx, AV_LOG_DEBUG, "Hardware supports %#x -> %s.\n",
> + fourcc, av_get_pix_fmt_name(pix_fmt));
> + pix_fmt_list[pix_fmt_count++] = pix_fmt;
> + } else {
> + av_log(ctx, AV_LOG_DEBUG, "Hardware supports unknown "
> + "format %#x.\n", fourcc);
> + }
> + }
> + }
> +
> + av_free(attr_list);
> +
> + vas = vaDestroyConfig(ctx->hardware_context->display, config_id);
> + if(vas != VA_STATUS_SUCCESS) {
> + av_log(ctx, AV_LOG_ERROR, "Failed to destroy dummy pipeline "
> + "configuration: %d (%s).\n", vas, vaErrorStr(vas));
> + }
> +
> + av_vaapi_unlock_hardware_context(ctx->hardware_context);
> +
> + pix_fmt_list[pix_fmt_count] = AV_PIX_FMT_NONE;
> +
> + if(avctx->inputs[0]) {
> + if(ctx->options.force_vaapi_in)
> + pix_fmt_list[1] = AV_PIX_FMT_NONE;
> +
> + err = ff_formats_ref(ff_make_format_list(pix_fmt_list),
> + &avctx->inputs[0]->out_formats);
> + if(err < 0)
> + return err;
> + }
> +
> + if(avctx->outputs[0]) {
> + if(ctx->options.force_vaapi_out)
> + pix_fmt_list[1] = AV_PIX_FMT_NONE;
> +
> + err = ff_formats_ref(ff_make_format_list(pix_fmt_list),
> + &avctx->outputs[0]->in_formats);
> + if(err < 0)
> + return err;
> + }
> +
> + return 0;
> +
> + fail_config:
> + vaDestroyConfig(ctx->hardware_context->display, config_id);
> + fail:
> + av_vaapi_unlock_hardware_context(ctx->hardware_context);
> + return err;
> +}
> +
> +static int vaapi_scale_pipeline_init(VAAPIScaleContext *ctx)
> +{
> + AVVAAPIPipelineConfig *config = &ctx->pipeline_config;
> + int err;
> +
> + if(ctx->pipeline_initialised)
> + return 0;
> +
> + config->profile = VAProfileNone;
> + config->entrypoint = VAEntrypointVideoProc;
> +
> + config->attribute_count = 0;
> +
> + av_vaapi_lock_hardware_context(ctx->hardware_context);
> +
> + err = av_vaapi_pipeline_init(&ctx->pipeline, ctx->hardware_context,
> + &ctx->pipeline_config, &ctx->output_pool);
> + if(err < 0) {
> + av_log(ctx, AV_LOG_ERROR, "Failed to create video processing "
> + "pipeline: " "%d (%s).\n", err, av_err2str(err));
> + }
> +
> + av_vaapi_unlock_hardware_context(ctx->hardware_context);
> +
> + ctx->pipeline_initialised = 1;
> +
> + return err;
> +}
> +
> +static int vaapi_scale_pipeline_uninit(VAAPIScaleContext *ctx)
> +{
> + int err;
> +
> + if(!ctx->pipeline_initialised)
> + return 0;
> +
> + av_vaapi_lock_hardware_context(ctx->hardware_context);
> +
> + err = av_vaapi_pipeline_uninit(&ctx->pipeline);
> + if(err < 0) {
> + av_log(ctx, AV_LOG_ERROR, "Failed to uninitialise video "
> + "processing pipeline: %d (%s).\n", err, av_err2str(err));
> + return err;
> + }
> +
> + av_vaapi_lock_hardware_context(ctx->hardware_context);
> +
> + return 0;
> +}
> +
> +static int vaapi_scale_config_input(AVFilterLink *inlink)
> +{
> + AVFilterContext *avctx = inlink->dst;
> + VAAPIScaleContext *ctx = avctx->priv;
> + AVVAAPISurfaceConfig *config = &ctx->input_config;
> + int err;
> +
> + if(ctx->pipeline_initialised) {
> + err = vaapi_scale_pipeline_uninit(ctx);
> + if(err < 0)
> + return err;
> + }
> + if(ctx->input_initialised && !ctx->input_is_vaapi) {
> + err = av_vaapi_surface_pool_uninit(&ctx->input_pool);
> + if(err < 0)
> + return err;
> + }
> +
> + if(inlink->format == AV_PIX_FMT_VAAPI) {
> + av_log(ctx, AV_LOG_DEBUG, "Input format is VAAPI (using incoming surfaces).\n");
> + ctx->input_is_vaapi = 1;
> + ctx->input_initialised = 1;
> + return 0;
> + }
> + ctx->input_is_vaapi = 0;
> +
> + av_log(ctx, AV_LOG_DEBUG, "Input format is %s.\n",
> + av_get_pix_fmt_name(inlink->format));
> + config->rt_format = VA_RT_FORMAT_YUV420;
> + config->av_format = inlink->format;
> +
> + av_vaapi_lock_hardware_context(ctx->hardware_context);
> +
> + err = av_vaapi_get_image_format(ctx->hardware_context,
> + inlink->format, &config->image_format);
> + if(err < 0) {
> + av_log(ctx, AV_LOG_ERROR, "Tried to configure with invalid input "
> + "format %s.\n", av_get_pix_fmt_name(inlink->format));
> + goto fail;
> + }
> +
> + config->width = inlink->w;
> + config->height = inlink->h;
> +
> + config->attribute_count = 0;
> +
> + err = av_vaapi_surface_pool_init(&ctx->input_pool,
> + ctx->hardware_context, config, 4);
> + if(err < 0) {
> + av_log(ctx, AV_LOG_ERROR, "Failed to create input surface pool: "
> + "%d (%s).\n", err, av_err2str(err));
> + goto fail;
> + }
> +
> + av_vaapi_unlock_hardware_context(ctx->hardware_context);
> +
> + if(ctx->output_width == 0)
> + ctx->output_width = inlink->w;
> + if(ctx->output_height == 0)
> + ctx->output_height = inlink->h;
> +
> + ctx->input_initialised = 1;
> + return 0;
> +
> + fail:
> + av_vaapi_unlock_hardware_context(ctx->hardware_context);
> + return err;
> +}
> +
> +static int vaapi_scale_config_output(AVFilterLink *outlink)
> +{
> + AVFilterContext *avctx = outlink->src;
> + VAAPIScaleContext *ctx = avctx->priv;
> + AVVAAPISurfaceConfig *config = &ctx->output_config;
> + int err;
> +
> + if(ctx->pipeline_initialised) {
> + err = vaapi_scale_pipeline_uninit(ctx);
> + if(err < 0)
> + return err;
> + }
> + if(ctx->output_initialised && !ctx->output_is_vaapi) {
> + err = av_vaapi_surface_pool_uninit(&ctx->output_pool);
> + if(err < 0)
> + return err;
> + }
> +
> + if(outlink->format == AV_PIX_FMT_VAAPI) {
> + // Should the opaque format here be settable somehow?
> + config->rt_format = VA_RT_FORMAT_YUV420;
> + config->av_format = AV_PIX_FMT_NV12;
> + av_log(ctx, AV_LOG_DEBUG, "Output format is %s (in VAAPI surfaces).\n",
> + av_get_pix_fmt_name(config->av_format));
> +
> + ctx->output_is_vaapi = 1;
> + } else {
> + config->rt_format = VA_RT_FORMAT_YUV420;
> + config->av_format = outlink->format;
> + av_log(ctx, AV_LOG_DEBUG, "Output format is %s.\n",
> + av_get_pix_fmt_name(config->av_format));
> +
> + ctx->output_is_vaapi = 0;
> + }
> +
> + av_vaapi_lock_hardware_context(ctx->hardware_context);
> +
> + err = av_vaapi_get_image_format(ctx->hardware_context,
> + config->av_format, &config->image_format);
> + if(err < 0) {
> + av_log(ctx, AV_LOG_ERROR, "Tried to configure with invalid output "
> + "format %s.\n", av_get_pix_fmt_name(outlink->format));
> + goto fail;
> + }
> +
> + outlink->w = config->width = ctx->output_width;
> + outlink->h = config->height = ctx->output_height;
> +
> + config->attribute_count = 0;
> +
> + err = av_vaapi_surface_pool_init(&ctx->output_pool,
> + ctx->hardware_context, config, 4);
> + if(err < 0) {
> + av_log(ctx, AV_LOG_ERROR, "Failed to create output surface pool: "
> + "%d (%s).\n", err, av_err2str(err));
> + goto fail;
> + }
> +
> + av_vaapi_unlock_hardware_context(ctx->hardware_context);
> +
> + ctx->output_initialised = 1;
> + return 0;
> +
> + fail:
> + av_vaapi_unlock_hardware_context(ctx->hardware_context);
> + return err;
> +}
> +
> +static int vaapi_proc_colour_standard(enum AVColorSpace av_cs)
> +{
> + switch(av_cs) {
> +#define CS(av, va) case AVCOL_SPC_ ## av: return VAProcColorStandard ## va;
> + CS(BT709, BT709);
> + CS(BT470BG, BT601);
> + CS(SMPTE170M, SMPTE170M);
> + CS(SMPTE240M, SMPTE240M);
> +#undef CS
> + default:
> + return VAProcColorStandardNone;
> + }
> +}
> +
> +static int vaapi_scale_filter_frame(AVFilterLink *inlink, AVFrame *input_image)
> +{
> + AVFilterContext *avctx = inlink->dst;
> + AVFilterLink *outlink = avctx->outputs[0];
> + VAAPIScaleContext *ctx = avctx->priv;
> + AVFrame *source_image, *target_image, *output_image;
> + VASurfaceID input_surface, output_surface;
> + VAProcPipelineParameterBuffer params;
> + VABufferID params_id;
> + VAStatus vas;
> + int err;
> +
> + av_log(ctx, AV_LOG_DEBUG, "Filter input: %s, %ux%u (%"PRId64").\n",
> + av_get_pix_fmt_name(input_image->format),
> + input_image->width, input_image->height, input_image->pts);
> +
> + if(!ctx->input_initialised || !ctx->output_initialised)
> + return AVERROR(EINVAL);
> + if(!ctx->pipeline_initialised) {
> + err = vaapi_scale_pipeline_init(ctx);
> + if(err < 0)
> + return err;
> + }
> +
> + av_vaapi_lock_hardware_context(ctx->hardware_context);
> +
> + if(input_image->format == AV_PIX_FMT_VAAPI) {
> + source_image = 0;
> + input_surface = (VASurfaceID)input_image->data[3];
> +
> + } else {
> + source_image = av_frame_alloc();
> + if(!source_image) {
> + av_log(ctx, AV_LOG_ERROR, "Failed to allocate frame to "
> + "copy input.");
> + err = AVERROR(ENOMEM);
> + goto fail;
> + }
> +
> + source_image->format = AV_PIX_FMT_VAAPI;
> + source_image->width = input_image->width;
> + source_image->height = input_image->height;
> +
> + err = av_vaapi_surface_pool_get(&ctx->input_pool, source_image);
> + if(err < 0) {
> + av_log(ctx, AV_LOG_ERROR, "Failed to allocate input frame "
> + "from surface pool: %d (%s).\n", err, av_err2str(err));
> + goto fail;
> + }
> +
> + err = av_vaapi_copy_to_surface(source_image, input_image);
> + if(err < 0) {
> + av_log(ctx, AV_LOG_ERROR, "Failed to copy to input surface: "
> + "%d (%s).\n", err, av_err2str(err));
> + goto fail;
> + }
> +
> + input_surface = (VASurfaceID)source_image->data[3];
> + }
> + av_log(ctx, AV_LOG_DEBUG, "Using surface %#x for scale input.\n",
> + input_surface);
> +
> + target_image = av_frame_alloc();
> + if(!target_image) {
> + av_log(ctx, AV_LOG_ERROR, "Failed to allocate output frame.");
> + err = AVERROR(ENOMEM);
> + goto fail;
> + }
> +
> + target_image->format = AV_PIX_FMT_VAAPI;
> + target_image->width = ctx->output_width;
> + target_image->height = ctx->output_height;
> +
> + err = av_vaapi_surface_pool_get(&ctx->output_pool, target_image);
> + if(err < 0) {
> + av_log(ctx, AV_LOG_ERROR, "Failed to allocate output frame "
> + "from surface pool: %d (%s).\n", err, av_err2str(err));
> + goto fail;
> + }
> +
> + output_surface = (VASurfaceID)target_image->data[3];
> + av_log(ctx, AV_LOG_DEBUG, "Using surface %#x for scale output.\n",
> + output_surface);
> +
> + memset(¶ms, 0, sizeof(params));
> +
> + params.surface = input_surface;
> + params.surface_region = 0;
> + params.surface_color_standard =
> + vaapi_proc_colour_standard(input_image->colorspace);
> +
> + params.output_region = 0;
> + params.output_background_color = 0xff000000;
> + params.output_color_standard = params.surface_color_standard;
> +
> + params.pipeline_flags = 0;
> + params.filter_flags = VA_FILTER_SCALING_HQ;
> +
> + vas = vaBeginPicture(ctx->hardware_context->display,
> + ctx->pipeline.context_id, output_surface);
> + if(vas != VA_STATUS_SUCCESS) {
> + av_log(ctx, AV_LOG_ERROR, "Failed to attach new picture: "
> + "%d (%s).\n", vas, vaErrorStr(vas));
> + err = AVERROR_EXTERNAL;
> + goto fail;
> + }
> +
> + vas = vaCreateBuffer(ctx->hardware_context->display,
> + ctx->pipeline.context_id,
> + VAProcPipelineParameterBufferType,
> + sizeof(params), 1, ¶ms, ¶ms_id);
> + if(vas != VA_STATUS_SUCCESS) {
> + av_log(ctx, AV_LOG_ERROR, "Failed to create parameter buffer: "
> + "%d (%s).\n", vas, vaErrorStr(vas));
> + err = AVERROR_EXTERNAL;
> + goto fail_after_begin;
> + }
> + av_log(ctx, AV_LOG_DEBUG, "Pipeline parameter buffer is %#x.\n",
> + params_id);
> +
> + vas = vaRenderPicture(ctx->hardware_context->display,
> + ctx->pipeline.context_id, ¶ms_id, 1);
> + if(vas != VA_STATUS_SUCCESS) {
> + av_log(ctx, AV_LOG_ERROR, "Failed to render parameter buffer: "
> + "%d (%s).\n", vas, vaErrorStr(vas));
> + err = AVERROR_EXTERNAL;
> + goto fail_after_begin;
> + }
> +
> + vas = vaEndPicture(ctx->hardware_context->display,
> + ctx->pipeline.context_id);
> + if(vas != VA_STATUS_SUCCESS) {
> + av_log(ctx, AV_LOG_ERROR, "Failed to start picture processing: "
> + "%d (%s).\n", vas, vaErrorStr(vas));
> + err = AVERROR_EXTERNAL;
> + goto fail_after_render;
> + }
> +
> + // This doesn't get freed automatically for some reason.
> + vas = vaDestroyBuffer(ctx->hardware_context->display, params_id);
> + if(vas != VA_STATUS_SUCCESS) {
> + av_log(ctx, AV_LOG_ERROR, "Failed to free parameter buffer: "
> + "%d (%s).\n", vas, vaErrorStr(vas));
> + err = AVERROR_EXTERNAL;
> + goto fail;
> + }
> +
> + if(ctx->output_is_vaapi) {
> + output_image = target_image;
> +
> + } else {
> + vas = vaSyncSurface(ctx->hardware_context->display, output_surface);
> + if(vas != VA_STATUS_SUCCESS) {
> + av_log(ctx, AV_LOG_ERROR, "Failed to sync picture completion: "
> + "%d (%s).\n", vas, vaErrorStr(vas));
> + err = AVERROR_EXTERNAL;
> + goto fail;
> + }
> +
> + output_image = av_frame_alloc();
> + if(!output_image) {
> + av_log(ctx, AV_LOG_ERROR, "Failed to allocate frame to "
> + "copy output.");
> + err = AVERROR(ENOMEM);
> + goto fail;
> + }
> +
> + output_image->format = ctx->output_config.av_format;
> + output_image->width = target_image->width;
> + output_image->height = target_image->height;
> + err = av_frame_get_buffer(output_image, 32);
> + if(err < 0) {
> + av_log(ctx, AV_LOG_ERROR, "Failed to allocate output frame of "
> + "%dx%d (%s): %s\n",
> + output_image->width, output_image->height,
> + av_get_pix_fmt_name(output_image->format), av_err2str(err));
> + goto fail;
> + }
> +
> + err = av_vaapi_copy_from_surface(output_image, target_image);
> + if(err < 0) {
> + av_log(ctx, AV_LOG_ERROR, "Failed to copy from output surface.\n");
> + goto fail;
> + }
> + }
> +
> + av_frame_copy_props(output_image, input_image);
> +
> + av_frame_free(&input_image);
> + if(!ctx->input_is_vaapi)
> + av_frame_free(&source_image);
> + if(!ctx->output_is_vaapi)
> + av_frame_free(&target_image);
> +
> + av_vaapi_unlock_hardware_context(ctx->hardware_context);
> +
> + av_log(ctx, AV_LOG_DEBUG, "Filter output: %s, %ux%u (%"PRId64").\n",
> + av_get_pix_fmt_name(output_image->format),
> + output_image->width, output_image->height, output_image->pts);
> +
> + return ff_filter_frame(outlink, output_image);
> +
> + // We want to make sure that if vaBeginPicture has been called, we also
> + // call vaRenderPicture and vaEndPicture. These calls may well fail or
> + // do something else nasty, but once we're in this failure case there
> + // isn't much else we can do.
> + fail_after_begin:
> + vaRenderPicture(ctx->hardware_context->display,
> + ctx->pipeline.context_id, ¶ms_id, 1);
> + fail_after_render:
> + vaEndPicture(ctx->hardware_context->display,
> + ctx->pipeline.context_id);
> + fail:
> + av_vaapi_unlock_hardware_context(ctx->hardware_context);
> + if(input_image)
> + av_frame_free(&input_image);
> + if(source_image)
> + av_frame_free(&source_image);
> + if(target_image)
> + av_frame_free(&target_image);
> + if(output_image)
> + av_frame_free(&output_image);
> + return err;
> +}
> +
> +static av_cold int vaapi_scale_init(AVFilterContext *avctx)
> +{
> + VAAPIScaleContext *ctx = avctx->priv;
> +
> +#if 0
> + // Minimal hack to make this filter usable from ffmpeg.
> + extern AVVAAPIHardwareContext *vaapi_context;
> + ctx->hardware_context = vaapi_context;
> +#else
> + if(ctx->options.hardware_context == 0) {
> + av_log(ctx, AV_LOG_ERROR, "VAAPI encode requires hardware context.\n");
> + return AVERROR(EINVAL);
> + }
> + ctx->hardware_context =
> + (AVVAAPIHardwareContext*)ctx->options.hardware_context;
> +#endif
> +
> + ctx->output_width = ctx->options.output_size[0];
> + ctx->output_height = ctx->options.output_size[1];
> +
> + return 0;
> +}
> +
> +static av_cold void vaapi_scale_uninit(AVFilterContext *avctx)
> +{
> + VAAPIScaleContext *ctx = avctx->priv;
> + int err;
> +
> + if(ctx->pipeline_initialised) {
> + vaapi_scale_pipeline_uninit(ctx);
> +
> + av_vaapi_lock_hardware_context(ctx->hardware_context);
> +
> + err = av_vaapi_surface_pool_uninit(&ctx->output_pool);
> + if(err < 0) {
> + av_log(ctx, AV_LOG_ERROR, "Failed to uninitialise output "
> + "surface pool: %d (%s).\n", err, av_err2str(err));
> + }
> +
> + if(!ctx->input_is_vaapi) {
> + err = av_vaapi_surface_pool_uninit(&ctx->input_pool);
> + if(err < 0) {
> + av_log(ctx, AV_LOG_ERROR, "Failed to uninitialise input "
> + "surface pool: %d (%s).\n", err, av_err2str(err));
> + }
> + }
> +
> + av_vaapi_unlock_hardware_context(ctx->hardware_context);
> + }
> +}
> +
> +
> +#define OFFSET(member) offsetof(VAAPIScaleContext, options.member)
> +#define FLAGS (AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_FILTERING_PARAM)
> +static const AVOption vaapi_scale_options[] = {
> + { "hardware_context", "VAAPI hardware context",
> + OFFSET(hardware_context), AV_OPT_TYPE_INT64,
> + { .i64 = 0 }, INT64_MIN, INT64_MAX, AV_OPT_FLAG_VIDEO_PARAM },
> + { "size", "Set output size",
> + OFFSET(output_size), AV_OPT_TYPE_IMAGE_SIZE,
> + { 0 }, 0, 0, FLAGS },
> + // Need these until we can autonegotiate VAAPI surface format.
> + { "force_vaapi_in", "Force use of VAAPI surfaces on input",
> + OFFSET(force_vaapi_in), AV_OPT_TYPE_BOOL,
> + { .i64 = 0 }, 0, 1, FLAGS },
> + { "force_vaapi_out", "Force use of VAAPI surfaces on output",
> + OFFSET(force_vaapi_out), AV_OPT_TYPE_BOOL,
> + { .i64 = 0 }, 0, 1, FLAGS },
> + { 0 },
> +};
> +
> +static const AVClass vaapi_scale_class = {
> + .class_name = "vaapi_scale",
> + .item_name = av_default_item_name,
> + .option = vaapi_scale_options,
> + .version = LIBAVUTIL_VERSION_INT,
> +};
> +
> +static const AVFilterPad vaapi_scale_inputs[] = {
> + {
> + .name = "default",
> + .type = AVMEDIA_TYPE_VIDEO,
> + .filter_frame = &vaapi_scale_filter_frame,
> + .config_props = &vaapi_scale_config_input,
> + },
> + { 0 }
> +};
> +
> +static const AVFilterPad vaapi_scale_outputs[] = {
> + {
> + .name = "default",
> + .type = AVMEDIA_TYPE_VIDEO,
> + .config_props = &vaapi_scale_config_output,
> + },
> + { 0 }
> +};
> +
> +AVFilter ff_vf_vaapi_scale = {
> + .name = "vaapi_scale",
> + .description = NULL_IF_CONFIG_SMALL("Scale to/from VAAPI surfaces."),
> + .priv_size = sizeof(VAAPIScaleContext),
> + .init = &vaapi_scale_init,
> + .uninit = &vaapi_scale_uninit,
> + .query_formats = &vaapi_scale_query_formats,
> + .inputs = vaapi_scale_inputs,
> + .outputs = vaapi_scale_outputs,
> + .priv_class = &vaapi_scale_class,
> +};
Most of it is probably ok. But before it gets ready to be applied, I
think we should find a semi-elegant way to populate the filter graph
with the hw context. The "hardware_context" option is just a void*
pointer, and there isn't anything that would prevent a big mess if e.g.
vaapi support is introduced later.
Seems like the Libav patch does provide something of value here with
its generic hw context. It could be a field in AVFilterGraph itself.
More information about the ffmpeg-devel
mailing list