[FFmpeg-devel] [PATCH V3] avfilter: Add tonemap vaapi filter

Zachary Zhou zachary.zhou at intel.com
Sat Dec 29 09:02:30 EET 2018


It supports ICL platform.
H2H (HDR to HDR): P010 -> RGB10
H2S (HDR to SDR): P010 -> RGB8

libva commit for HDR10
https://github.com/intel/libva/commit/cf11abe5e1b9c93ee75cf974076957162c1605b9

Signed-off-by: Zachary Zhou <zachary.zhou at intel.com>
---
 configure                      |   2 +
 doc/filters.texi               |  63 ++++
 libavfilter/Makefile           |   1 +
 libavfilter/allfilters.c       |   1 +
 libavfilter/vaapi_vpp.c        |  29 ++
 libavfilter/vaapi_vpp.h        |   8 +
 libavfilter/vf_tonemap_vaapi.c | 542 +++++++++++++++++++++++++++++++++
 libavutil/hwcontext_vaapi.c    |   3 +
 8 files changed, 649 insertions(+)
 create mode 100644 libavfilter/vf_tonemap_vaapi.c

diff --git a/configure b/configure
index be49c19b88..baf70d03fc 100755
--- a/configure
+++ b/configure
@@ -3480,6 +3480,7 @@ tinterlace_merge_test_deps="tinterlace_filter"
 tinterlace_pad_test_deps="tinterlace_filter"
 tonemap_filter_deps="const_nan"
 tonemap_opencl_filter_deps="opencl const_nan"
+tonemap_vaapi_filter_deps="vaapi VAProcFilterParameterBufferHDRToneMapping"
 transpose_opencl_filter_deps="opencl"
 unsharp_opencl_filter_deps="opencl"
 uspp_filter_deps="gpl avcodec"
@@ -5984,6 +5985,7 @@ check_type "d3d9.h dxva2api.h" DXVA2_ConfigPictureDecode -D_WIN32_WINNT=0x0602
 
 check_type "va/va.h va/va_dec_hevc.h" "VAPictureParameterBufferHEVC"
 check_struct "va/va.h" "VADecPictureParameterBufferVP9" bit_depth
+check_type "va/va.h va/va_vpp.h" "VAProcFilterParameterBufferHDRToneMapping"
 check_type "va/va.h va/va_enc_hevc.h" "VAEncPictureParameterBufferHEVC"
 check_type "va/va.h va/va_enc_jpeg.h" "VAEncPictureParameterBufferJPEG"
 check_type "va/va.h va/va_enc_vp8.h"  "VAEncPictureParameterBufferVP8"
diff --git a/doc/filters.texi b/doc/filters.texi
index ac4c9b44d8..9ed53a1008 100644
--- a/doc/filters.texi
+++ b/doc/filters.texi
@@ -19130,6 +19130,69 @@ Convert HDR(PQ/HLG) video to bt2020-transfer-characteristic p010 format using li
 @end example
 @end itemize
 
+ at section tonemap_vappi
+
+Perform HDR(High Dynamic Range) to HDR and HDR to SDR conversion with tone-mapping.
+
+It accepts the following parameters:
+
+ at table @option
+ at item type
+Specify the tone-mapping operator to be used.
+
+Possible values are:
+ at table @var
+ at item h2h
+Perform H2H(HDR to HDR), convert from p010 to rgb10
+ at item h2s
+Perform H2S(HDR to SDR), convert from p010 to rgb8
+ at end table
+
+ at item display
+Set mastering display metadata for H2H
+
+Can assume the following values:
+ at table @var
+ at item G
+Green primary G(x|y)
+ at item B
+Blue primary B(x|y)
+ at item R
+Red primary R(x|y)
+ at item WP
+White point WP(x|y)
+ at item L
+Display mastering luminance L(max|min)
+ at end table
+
+ at item light
+Set content light level for H2H
+
+Can assume the following values:
+ at table @var
+ at item CLL
+Max content light level
+ at item FALL
+Max average light level per frame
+ at end table
+
+ at end table
+
+ at subsection Example
+
+ at itemize
+ at item
+Convert HDR video to HDR video from p010 format to rgb10 format.
+ at example
+-i INPUT -vf "tonemap_vaapi=h2h:display=G(13250|34500)B(7500|3000)R(34000|16000)WP(15635|16450)L(2000|2000):light=CLL(10000)FALL(1000)" OUTPUT 
+ at end example
+ at item
+Convert HDR video to SDR video from p010 format to rgb8 format.
+ at example
+-i INPUT -vf "tonemap_vaapi=h2s" OUTPUT
+ at end example
+ at end itemize
+
 @section unsharp_opencl
 
 Sharpen or blur the input video.
diff --git a/libavfilter/Makefile b/libavfilter/Makefile
index 6e2658186d..f6894209d1 100644
--- a/libavfilter/Makefile
+++ b/libavfilter/Makefile
@@ -390,6 +390,7 @@ OBJS-$(CONFIG_TMIX_FILTER)                   += vf_mix.o framesync.o
 OBJS-$(CONFIG_TONEMAP_FILTER)                += vf_tonemap.o colorspace.o
 OBJS-$(CONFIG_TONEMAP_OPENCL_FILTER)         += vf_tonemap_opencl.o colorspace.o opencl.o \
                                                 opencl/tonemap.o opencl/colorspace_common.o
+OBJS-$(CONFIG_TONEMAP_VAAPI_FILTER)          += vf_tonemap_vaapi.o vaapi_vpp.o
 OBJS-$(CONFIG_TPAD_FILTER)                   += vf_tpad.o
 OBJS-$(CONFIG_TRANSPOSE_FILTER)              += vf_transpose.o
 OBJS-$(CONFIG_TRANSPOSE_NPP_FILTER)          += vf_transpose_npp.o cuda_check.o
diff --git a/libavfilter/allfilters.c b/libavfilter/allfilters.c
index a600069500..754b84819d 100644
--- a/libavfilter/allfilters.c
+++ b/libavfilter/allfilters.c
@@ -369,6 +369,7 @@ extern AVFilter ff_vf_tlut2;
 extern AVFilter ff_vf_tmix;
 extern AVFilter ff_vf_tonemap;
 extern AVFilter ff_vf_tonemap_opencl;
+extern AVFilter ff_vf_tonemap_vaapi;
 extern AVFilter ff_vf_tpad;
 extern AVFilter ff_vf_transpose;
 extern AVFilter ff_vf_transpose_npp;
diff --git a/libavfilter/vaapi_vpp.c b/libavfilter/vaapi_vpp.c
index c5bbc3b85b..7389ebd9d5 100644
--- a/libavfilter/vaapi_vpp.c
+++ b/libavfilter/vaapi_vpp.c
@@ -276,6 +276,35 @@ int ff_vaapi_vpp_make_param_buffers(AVFilterContext *avctx,
     return 0;
 }
 
+int ff_vaapi_vpp_make_param_buffers2(AVFilterContext *avctx,
+                                     int type,
+                                     const void *data,
+                                     size_t size,
+                                     int count,
+                                     VABufferID *buffer_id)
+{
+    VAStatus vas;
+    VABufferID buffer;
+    VAAPIVPPContext *ctx   = avctx->priv;
+
+    av_assert0(ctx->nb_filter_buffers + 1 <= VAProcFilterCount);
+
+    vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
+                         type, size, count, (void*)data, &buffer);
+    if (vas != VA_STATUS_SUCCESS) {
+        av_log(avctx, AV_LOG_ERROR, "Failed to create parameter "
+               "buffer (type %d): %d (%s).\n",
+               type, vas, vaErrorStr(vas));
+        return AVERROR(EIO);
+    }
+
+    ctx->filter_buffers[ctx->nb_filter_buffers++] = buffer;
+    *buffer_id = buffer;
+
+    av_log(avctx, AV_LOG_DEBUG, "Param buffer (type %d, %zu bytes, count %d) "
+           "is %#x.\n", type, size, count, buffer);
+    return 0;
+}
 
 int ff_vaapi_vpp_render_picture(AVFilterContext *avctx,
                                 VAProcPipelineParameterBuffer *params,
diff --git a/libavfilter/vaapi_vpp.h b/libavfilter/vaapi_vpp.h
index 0bc31018d4..e920c10591 100644
--- a/libavfilter/vaapi_vpp.h
+++ b/libavfilter/vaapi_vpp.h
@@ -72,6 +72,14 @@ int ff_vaapi_vpp_make_param_buffers(AVFilterContext *avctx,
                                     size_t size,
                                     int count);
 
+int ff_vaapi_vpp_make_param_buffers2(AVFilterContext *avctx,
+                                     int type,
+                                     const void *data,
+                                     size_t size,
+                                     int count,
+                                     VABufferID *buffer_id);
+
+
 int ff_vaapi_vpp_render_picture(AVFilterContext *avctx,
                                 VAProcPipelineParameterBuffer *params,
                                 VASurfaceID output_surface);
diff --git a/libavfilter/vf_tonemap_vaapi.c b/libavfilter/vf_tonemap_vaapi.c
new file mode 100644
index 0000000000..d77cd574b3
--- /dev/null
+++ b/libavfilter/vf_tonemap_vaapi.c
@@ -0,0 +1,542 @@
+/*
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+#include <string.h>
+
+#include "libavutil/avassert.h"
+#include "libavutil/mem.h"
+#include "libavutil/opt.h"
+#include "libavutil/pixdesc.h"
+#include "libavutil/mastering_display_metadata.h"
+
+#include "avfilter.h"
+#include "formats.h"
+#include "internal.h"
+#include "vaapi_vpp.h"
+
+typedef enum {
+    HDR_VAAPI_H2H,
+    HDR_VAAPI_H2S,
+} HDRType;
+
+typedef struct HDRVAAPIContext {
+    VAAPIVPPContext vpp_ctx; // must be the first field
+
+    int hdr_type;
+
+    char *master_display;
+    char *content_light;
+
+    VABufferID          buffer;
+    VAHdrMetaDataHDR10  out_metadata;
+
+    AVFrameSideData    *src_display;
+    AVFrameSideData    *src_light;
+} HDRVAAPIContext;
+
+static int tonemap_vaapi_set_filter_params(AVFilterContext *avctx, AVFrame *input_frame)
+{
+    VAAPIVPPContext *vpp_ctx   = avctx->priv;
+    HDRVAAPIContext *ctx       = avctx->priv;
+    VAStatus vas;
+    VAHdrMetaDataHDR10  *in_metadata;
+    VAProcFilterParameterBufferHDRToneMapping *hdrtm_param;
+
+    vas = vaMapBuffer(vpp_ctx->hwctx->display, ctx->buffer, (void**)&hdrtm_param);
+    if (vas != VA_STATUS_SUCCESS) {
+        av_log(avctx, AV_LOG_ERROR, "Failed to map "
+               "buffer (%d): %d (%s).\n",
+               ctx->buffer, vas, vaErrorStr(vas));
+        return AVERROR(EIO);
+    }
+
+    in_metadata = hdrtm_param->data.metadata;
+    ctx->src_display = av_frame_get_side_data(input_frame,
+                                              AV_FRAME_DATA_MASTERING_DISPLAY_METADATA);
+    if (ctx->src_display) {
+        const AVMasteringDisplayMetadata *hdr_meta =
+            (const AVMasteringDisplayMetadata *)ctx->src_display->data;
+        if (hdr_meta->has_luminance) {
+#define LAV_UINT32(a) (a.num)
+            in_metadata->max_display_mastering_luminance =
+                LAV_UINT32(hdr_meta->max_luminance);
+            in_metadata->min_display_mastering_luminance =
+                LAV_UINT32(hdr_meta->min_luminance);
+#undef LAV_UINT32
+            av_log(avctx, AV_LOG_DEBUG,
+                   "Mastering Display Metadata(in luminance):\n");
+            av_log(avctx, AV_LOG_DEBUG,
+                   "min_luminance=%d, max_luminance=%d\n",
+                   in_metadata->min_display_mastering_luminance,
+                   in_metadata->max_display_mastering_luminance);
+        }
+
+        if (hdr_meta->has_primaries) {
+#define LAV_RED    0
+#define LAV_GREEN  1
+#define LAV_BLUE   2
+#define LAV_UINT16(a) (a.num)
+            in_metadata->display_primaries_x[0] =
+                LAV_UINT16(hdr_meta->display_primaries[LAV_GREEN][0]);
+            in_metadata->display_primaries_y[0] =
+                LAV_UINT16(hdr_meta->display_primaries[LAV_GREEN][1]);
+            in_metadata->display_primaries_x[1] =
+                LAV_UINT16(hdr_meta->display_primaries[LAV_BLUE][0]);
+            in_metadata->display_primaries_y[1] =
+                LAV_UINT16(hdr_meta->display_primaries[LAV_BLUE][1]);
+            in_metadata->display_primaries_x[2] =
+                LAV_UINT16(hdr_meta->display_primaries[LAV_RED][0]);
+            in_metadata->display_primaries_y[2] =
+                LAV_UINT16(hdr_meta->display_primaries[LAV_RED][1]);
+            in_metadata->white_point_x =
+                LAV_UINT16(hdr_meta->white_point[0]);
+            in_metadata->white_point_y =
+                LAV_UINT16(hdr_meta->white_point[1]);
+#undef LAV_RED
+#undef LAV_GREEN
+#undef LAV_BLUE
+#undef LAV_UINT16
+            av_log(avctx, AV_LOG_DEBUG,
+                   "Mastering Display Metadata(in primaries):\n");
+            av_log(avctx, AV_LOG_DEBUG,
+                   "G(%u,%u) B(%u,%u) R(%u,%u) WP(%u,%u)\n",
+                   in_metadata->display_primaries_x[0],
+                   in_metadata->display_primaries_y[0],
+                   in_metadata->display_primaries_x[1],
+                   in_metadata->display_primaries_y[1],
+                   in_metadata->display_primaries_x[2],
+                   in_metadata->display_primaries_y[2],
+                   in_metadata->white_point_x,
+                   in_metadata->white_point_y);
+        }
+    }
+
+    ctx->src_light = av_frame_get_side_data(input_frame,
+                                            AV_FRAME_DATA_CONTENT_LIGHT_LEVEL);
+    if (ctx->src_light) {
+        const AVContentLightMetadata *light_meta =
+            (const AVContentLightMetadata *) ctx->src_light->data;
+        in_metadata->max_content_light_level = light_meta->MaxCLL;
+        in_metadata->max_pic_average_light_level = light_meta->MaxFALL;
+
+        av_log(avctx, AV_LOG_DEBUG,
+               "Mastering Content Light Level (in):\n");
+        av_log(avctx, AV_LOG_DEBUG,
+               "MaxCLL(%u) MaxFALL(%u)\n",
+               in_metadata->max_content_light_level,
+               in_metadata->max_pic_average_light_level);
+    }
+
+    vas = vaUnmapBuffer(vpp_ctx->hwctx->display, ctx->buffer);
+    if (vas != VA_STATUS_SUCCESS) {
+        av_log(avctx, AV_LOG_ERROR, "Failed to unmap output buffers: "
+               "%d (%s).\n", vas, vaErrorStr(vas));
+        return AVERROR(EIO);
+    }
+
+    return 0;
+}
+
+static int tonemap_vaapi_build_filter_params(AVFilterContext *avctx)
+{
+    VAAPIVPPContext *vpp_ctx   = avctx->priv;
+    HDRVAAPIContext *ctx       = avctx->priv;
+    VAStatus vas;
+    VAProcFilterCapHighDynamicRange hdr_cap;
+    int num_query_caps;
+    VAProcFilterParameterBufferHDRToneMapping hdrtm_param;
+    VAHdrMetaDataHDR10  in_metadata;
+
+    vas = vaQueryVideoProcFilterCaps(vpp_ctx->hwctx->display,
+                                     vpp_ctx->va_context,
+                                     VAProcFilterHighDynamicRangeToneMapping,
+                                     &hdr_cap, &num_query_caps);
+    if (vas != VA_STATUS_SUCCESS) {
+        av_log(avctx, AV_LOG_ERROR, "Failed to query HDR caps "
+               "context: %d (%s).\n", vas, vaErrorStr(vas));
+        return AVERROR(EIO);
+    }
+
+    if (hdr_cap.metadata_type == VAProcHighDynamicRangeMetadataNone) {
+        av_log(avctx, AV_LOG_ERROR, "VAAPI driver doesn't support HDR\n");
+        return AVERROR(EINVAL);
+    }
+
+    if (! (VA_TONE_MAPPING_HDR_TO_HDR & hdr_cap.caps_flag) &&
+        ! (VA_TONE_MAPPING_HDR_TO_SDR & hdr_cap.caps_flag)) {
+        av_log(avctx, AV_LOG_ERROR,
+               "VAAPI driver doesn't support H2H or H2S\n");
+        return AVERROR(EINVAL);
+    }
+
+    memset(&hdrtm_param, 0, sizeof(hdrtm_param));
+    memset(&in_metadata, 0, sizeof(in_metadata));
+    hdrtm_param.type = VAProcFilterHighDynamicRangeToneMapping;
+    hdrtm_param.data.metadata_type = VAProcHighDynamicRangeMetadataHDR10;
+    hdrtm_param.data.metadata      = &in_metadata;
+    hdrtm_param.data.metadata_size = sizeof(VAHdrMetaDataHDR10);
+
+    ff_vaapi_vpp_make_param_buffers2(avctx,
+                                     VAProcFilterParameterBufferType,
+                                     &hdrtm_param, sizeof(hdrtm_param), 1, &ctx->buffer);
+
+    return 0;
+}
+
+static int tonemap_vaapi_filter_frame(AVFilterLink *inlink, AVFrame *input_frame)
+{
+    AVFilterContext *avctx     = inlink->dst;
+    AVFilterLink *outlink      = avctx->outputs[0];
+    VAAPIVPPContext *vpp_ctx   = avctx->priv;
+    HDRVAAPIContext *ctx       = avctx->priv;
+    AVFrame *output_frame      = NULL;
+    VASurfaceID input_surface, output_surface;
+    VARectangle input_region,  output_region;
+
+    VAProcPipelineParameterBuffer params;
+    int err;
+
+    VAHdrMetaData              out_hdr_metadata;
+
+    av_log(avctx, AV_LOG_DEBUG, "Filter input: %s, %ux%u (%"PRId64").\n",
+           av_get_pix_fmt_name(input_frame->format),
+           input_frame->width, input_frame->height, input_frame->pts);
+
+    if (vpp_ctx->va_context == VA_INVALID_ID)
+        return AVERROR(EINVAL);
+
+    err = tonemap_vaapi_set_filter_params(avctx, input_frame);
+    if (err < 0)
+        goto fail;
+
+    input_surface = (VASurfaceID)(uintptr_t)input_frame->data[3];
+    av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for transpose vpp input.\n",
+           input_surface);
+
+    output_frame = ff_get_video_buffer(outlink, vpp_ctx->output_width,
+                                       vpp_ctx->output_height);
+    if (!output_frame) {
+        err = AVERROR(ENOMEM);
+        goto fail;
+    }
+
+    output_surface = (VASurfaceID)(uintptr_t)output_frame->data[3];
+    av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for transpose vpp output.\n",
+           output_surface);
+    memset(&params, 0, sizeof(params));
+    input_region = (VARectangle) {
+        .x      = 0,
+        .y      = 0,
+        .width  = input_frame->width,
+        .height = input_frame->height,
+    };
+
+    output_region = (VARectangle) {
+        .x      = 0,
+        .y      = 0,
+        .width  = output_frame->width,
+        .height = output_frame->height,
+    };
+
+    if (vpp_ctx->nb_filter_buffers) {
+        params.filters     = &vpp_ctx->filter_buffers[0];
+        params.num_filters = vpp_ctx->nb_filter_buffers;
+    }
+
+    params.surface = input_surface;
+    params.surface_region = &input_region;
+    params.surface_color_standard =
+        ff_vaapi_vpp_colour_standard(input_frame->colorspace);
+
+    params.output_region = &output_region;
+    params.output_background_color = 0xff000000;
+    params.output_color_standard = params.surface_color_standard;
+
+    params.pipeline_flags = 0;
+    params.filter_flags = VA_FRAME_PICTURE;
+
+    switch (ctx->hdr_type)
+    {
+    case HDR_VAAPI_H2H:
+        params.surface_color_standard = VAProcColorStandardExplicit;
+        params.output_color_standard = VAProcColorStandardExplicit;
+        params.input_color_properties.colour_primaries = 9;
+        params.output_color_properties.colour_primaries = 9;
+        params.input_color_properties.transfer_characteristics = 16;
+        params.output_color_properties.transfer_characteristics = 16;
+        break;
+    case HDR_VAAPI_H2S:
+        params.surface_color_standard = VAProcColorStandardExplicit;
+        params.input_color_properties.colour_primaries = 9;
+        params.input_color_properties.transfer_characteristics = 16;
+        break;
+    default:
+        break;
+    }
+
+    if (ctx->hdr_type == HDR_VAAPI_H2H) {
+        memset(&out_hdr_metadata, 0, sizeof(out_hdr_metadata));
+        if (ctx->master_display) {
+            if (10 != sscanf(ctx->master_display,
+                             "G(%hu|%hu)B(%hu|%hu)R(%hu|%hu)WP(%hu|%hu)L(%u|%u)",
+                             &ctx->out_metadata.display_primaries_x[0],
+                             &ctx->out_metadata.display_primaries_y[0],
+                             &ctx->out_metadata.display_primaries_x[1],
+                             &ctx->out_metadata.display_primaries_y[1],
+                             &ctx->out_metadata.display_primaries_x[2],
+                             &ctx->out_metadata.display_primaries_y[2],
+                             &ctx->out_metadata.white_point_x,
+                             &ctx->out_metadata.white_point_y,
+                             &ctx->out_metadata.max_display_mastering_luminance,
+                             &ctx->out_metadata.min_display_mastering_luminance)) {
+                av_log(avctx, AV_LOG_ERROR,
+                       "Option mastering-display input invalid\n");
+                return AVERROR(EINVAL);
+            } else {
+                AVFrameSideData *metadata;
+                AVMasteringDisplayMetadata *hdr_meta;
+
+                metadata = av_frame_new_side_data(output_frame,
+                                                  AV_FRAME_DATA_MASTERING_DISPLAY_METADATA,
+                                                  sizeof(AVMasteringDisplayMetadata));
+                if (!metadata) {
+                    av_log(avctx, AV_LOG_ERROR, "Cannot create new side data for output\n");
+                    return AVERROR(ENOMEM);
+                }
+
+                if (ctx->src_display) {
+                    memcpy(metadata->data,
+                           ctx->src_display->data,
+                           sizeof(AVMasteringDisplayMetadata));
+                }
+
+                hdr_meta = (AVMasteringDisplayMetadata *)metadata->data;
+#define LAV_RED    0
+#define LAV_GREEN  1
+#define LAV_BLUE   2
+                hdr_meta->display_primaries[LAV_GREEN][0].num = ctx->out_metadata.display_primaries_x[0];
+                hdr_meta->display_primaries[LAV_GREEN][1].num = ctx->out_metadata.display_primaries_y[0];
+                hdr_meta->display_primaries[LAV_BLUE][0].num  = ctx->out_metadata.display_primaries_x[1];
+                hdr_meta->display_primaries[LAV_BLUE][1].num  = ctx->out_metadata.display_primaries_y[1];
+                hdr_meta->display_primaries[LAV_RED][0].num   = ctx->out_metadata.display_primaries_x[2];
+                hdr_meta->display_primaries[LAV_RED][1].num   = ctx->out_metadata.display_primaries_y[2];
+                hdr_meta->white_point[0].num = ctx->out_metadata.white_point_x;
+                hdr_meta->white_point[1].num = ctx->out_metadata.white_point_y;
+#undef LAV_RED
+#undef LAV_GREEN
+#undef LAV_BLUE
+                hdr_meta->has_primaries = 1;
+
+                hdr_meta->max_luminance.num = ctx->out_metadata.max_display_mastering_luminance;
+                hdr_meta->min_luminance.num = ctx->out_metadata.min_display_mastering_luminance;
+                hdr_meta->has_luminance = 1;
+
+                av_log(avctx, AV_LOG_DEBUG,
+                       "Mastering Display Metadata(out luminance):\n");
+                av_log(avctx, AV_LOG_DEBUG,
+                       "min_luminance=%u, max_luminance=%u\n",
+                       ctx->out_metadata.min_display_mastering_luminance,
+                       ctx->out_metadata.max_display_mastering_luminance);
+
+                av_log(avctx, AV_LOG_DEBUG,
+                       "Mastering Display Metadata(out primaries):\n");
+                av_log(avctx, AV_LOG_DEBUG,
+                       "G(%u,%u) B(%u,%u) R(%u,%u) WP(%u,%u)\n",
+                       ctx->out_metadata.display_primaries_x[0],
+                       ctx->out_metadata.display_primaries_y[0],
+                       ctx->out_metadata.display_primaries_x[1],
+                       ctx->out_metadata.display_primaries_y[1],
+                       ctx->out_metadata.display_primaries_x[2],
+                       ctx->out_metadata.display_primaries_y[2],
+                       ctx->out_metadata.white_point_x,
+                       ctx->out_metadata.white_point_y);
+            }
+        }
+
+        if (ctx->content_light) {
+            if (2 != sscanf(ctx->content_light,
+                            "CLL(%hu)FALL(%hu)",
+                            &ctx->out_metadata.max_content_light_level,
+                            &ctx->out_metadata.max_pic_average_light_level)) {
+                av_log(avctx, AV_LOG_ERROR,
+                       "Option content-light input invalid\n");
+                return AVERROR(EINVAL);
+            } else {
+                AVFrameSideData *metadata_lt;
+                AVContentLightMetadata *hdr_meta_lt;
+
+                metadata_lt = av_frame_new_side_data(output_frame,
+                                                     AV_FRAME_DATA_CONTENT_LIGHT_LEVEL,
+                                                     sizeof(AVContentLightMetadata));
+                if (!metadata_lt) {
+                    av_log(avctx, AV_LOG_ERROR, "Cannot create new side data for output\n");
+                    return AVERROR(ENOMEM);
+                }
+
+                if (ctx->src_light) {
+                    memcpy(metadata_lt->data,
+                           ctx->src_light->data,
+                           sizeof(AVContentLightMetadata));
+                }
+
+                hdr_meta_lt = (AVContentLightMetadata *)metadata_lt->data;
+                hdr_meta_lt->MaxCLL = ctx->out_metadata.max_content_light_level;
+                hdr_meta_lt->MaxFALL = ctx->out_metadata.max_pic_average_light_level;
+
+                av_log(avctx, AV_LOG_DEBUG,
+                       "Mastering Content Light Level (out):\n");
+                av_log(avctx, AV_LOG_DEBUG,
+                       "MaxCLL(%u) MaxFALL(%u)\n",
+                       ctx->out_metadata.max_content_light_level,
+                       ctx->out_metadata.max_pic_average_light_level);
+            }
+        }
+
+        out_hdr_metadata.metadata_type = VAProcHighDynamicRangeMetadataHDR10;
+        out_hdr_metadata.metadata      = &ctx->out_metadata;
+        out_hdr_metadata.metadata_size = sizeof(VAHdrMetaDataHDR10);
+
+        params.output_hdr_metadata = &out_hdr_metadata;
+    }
+
+    err = ff_vaapi_vpp_render_picture(avctx, &params, output_surface);
+    if (err < 0)
+        goto fail;
+
+    err = av_frame_copy_props(output_frame, input_frame);
+    if (err < 0)
+        goto fail;
+    av_frame_free(&input_frame);
+
+    av_log(avctx, AV_LOG_DEBUG, "Filter output: %s, %ux%u (%"PRId64").\n",
+           av_get_pix_fmt_name(output_frame->format),
+           output_frame->width, output_frame->height, output_frame->pts);
+
+    return ff_filter_frame(outlink, output_frame);
+
+fail:
+    av_frame_free(&input_frame);
+    av_frame_free(&output_frame);
+    return err;
+}
+
+static av_cold int tonemap_vaapi_init(AVFilterContext *avctx)
+{
+    VAAPIVPPContext *vpp_ctx = avctx->priv;
+    HDRVAAPIContext *ctx     = avctx->priv;
+
+    ff_vaapi_vpp_ctx_init(avctx);
+    vpp_ctx->build_filter_params = tonemap_vaapi_build_filter_params;
+    vpp_ctx->pipeline_uninit = ff_vaapi_vpp_pipeline_uninit;
+
+    if (ctx->hdr_type == HDR_VAAPI_H2H) {
+        vpp_ctx->output_format = AV_PIX_FMT_GBRP10;
+    } else if (ctx->hdr_type == HDR_VAAPI_H2S) {
+        vpp_ctx->output_format = AV_PIX_FMT_ARGB;
+    } else {
+        vpp_ctx->output_format = AV_PIX_FMT_NONE;
+    }
+
+    return 0;
+}
+
+static void tonemap_vaapi_vpp_ctx_uninit(AVFilterContext *avctx)
+{
+    return ff_vaapi_vpp_ctx_uninit(avctx);
+}
+
+static int tonemap_vaapi_vpp_query_formats(AVFilterContext *avctx)
+{
+    int err;
+
+    enum AVPixelFormat pix_in_fmts[] = {
+        AV_PIX_FMT_P010,     //Input
+    };
+
+    enum AVPixelFormat pix_out_fmts[] = {
+        AV_PIX_FMT_GBRP10,   //H2H RGB10
+        AV_PIX_FMT_ARGB,     //H2S RGB8
+    };
+
+    err = ff_formats_ref(ff_make_format_list(pix_in_fmts),
+                         &avctx->inputs[0]->out_formats);
+    if (err < 0)
+        return err;
+
+    err = ff_formats_ref(ff_make_format_list(pix_out_fmts),
+                         &avctx->outputs[0]->in_formats);
+    if (err < 0)
+        return err;
+
+    return ff_vaapi_vpp_query_formats(avctx);
+}
+
+static int tonemap_vaapi_vpp_config_input(AVFilterLink *inlink)
+{
+    return ff_vaapi_vpp_config_input(inlink);
+}
+
+static int tonemap_vaapi_vpp_config_output(AVFilterLink *outlink)
+{
+    return ff_vaapi_vpp_config_output(outlink);
+}
+
+#define OFFSET(x) offsetof(HDRVAAPIContext, x)
+#define FLAGS (AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_FILTERING_PARAM)
+static const AVOption tonemap_vaapi_options[] = {
+    { "type",    "hdr type",            OFFSET(hdr_type), AV_OPT_TYPE_INT, { .i64 = HDR_VAAPI_H2H }, 0, 2, FLAGS, "type" },
+        { "h2h", "vaapi P010 to RGB10", 0, AV_OPT_TYPE_CONST, {.i64=HDR_VAAPI_H2H}, INT_MIN, INT_MAX, FLAGS, "type" },
+        { "h2s", "vaapi P010 to RGB8",  0, AV_OPT_TYPE_CONST, {.i64=HDR_VAAPI_H2S}, INT_MIN, INT_MAX, FLAGS, "type" },
+    { "display", "set master display",  OFFSET(master_display), AV_OPT_TYPE_STRING, {.str=NULL}, CHAR_MIN, CHAR_MAX, FLAGS },
+    { "light",   "set content light",   OFFSET(content_light),  AV_OPT_TYPE_STRING, {.str=NULL}, CHAR_MIN, CHAR_MAX, FLAGS },
+    { NULL }
+};
+
+
+AVFILTER_DEFINE_CLASS(tonemap_vaapi);
+
+static const AVFilterPad tonemap_vaapi_inputs[] = {
+    {
+        .name         = "default",
+        .type         = AVMEDIA_TYPE_VIDEO,
+        .filter_frame = &tonemap_vaapi_filter_frame,
+        .config_props = &tonemap_vaapi_vpp_config_input,
+    },
+    { NULL }
+};
+
+static const AVFilterPad tonemap_vaapi_outputs[] = {
+    {
+        .name = "default",
+        .type = AVMEDIA_TYPE_VIDEO,
+        .config_props = &tonemap_vaapi_vpp_config_output,
+    },
+    { NULL }
+};
+
+AVFilter ff_vf_tonemap_vaapi = {
+    .name           = "tonemap_vaapi",
+    .description    = NULL_IF_CONFIG_SMALL("VAAPI VPP for tonemap"),
+    .priv_size      = sizeof(HDRVAAPIContext),
+    .init           = &tonemap_vaapi_init,
+    .uninit         = &tonemap_vaapi_vpp_ctx_uninit,
+    .query_formats  = &tonemap_vaapi_vpp_query_formats,
+    .inputs         = tonemap_vaapi_inputs,
+    .outputs        = tonemap_vaapi_outputs,
+    .priv_class     = &tonemap_vaapi_class,
+    .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
+};
diff --git a/libavutil/hwcontext_vaapi.c b/libavutil/hwcontext_vaapi.c
index 8624369bb9..f18de1d778 100644
--- a/libavutil/hwcontext_vaapi.c
+++ b/libavutil/hwcontext_vaapi.c
@@ -124,6 +124,9 @@ static const VAAPIFormatDescriptor vaapi_format_map[] = {
 #endif
     MAP(BGRA, RGB32,   BGRA, 0),
     MAP(BGRX, RGB32,   BGR0, 0),
+#ifdef VA_RT_FORMAT_RGB32_10
+    MAP(RGBA, RGB32_10, GBRP10, 0),
+#endif
     MAP(RGBA, RGB32,   RGBA, 0),
     MAP(RGBX, RGB32,   RGB0, 0),
 #ifdef VA_FOURCC_ABGR
-- 
2.20.1



More information about the ffmpeg-devel mailing list