[FFmpeg-devel] [PATCH 8/8] vfilter/pp: Add VPE post processing filter
Zhang, Guiyong
Guiyong.Zhang at verisilicon.com
Thu May 28 08:11:51 EEST 2020
VPE(Video Pipeline Engine) is VeriSilicon's hardware engine for multi
formats video encoding and decoding.
This filter uses VPI(VPE Interface) API and library for raw video
frame post-processing.
The input of this filter is raw video data, it supports most of the
popular raw data formats like NV12, YUV420P, YUV420P10BE etc.
Signed-off-by: Guiyong.zhang <guiyong.zhang at verisilicon.com>
---
configure | 1 +
libavfilter/Makefile | 1 +
libavfilter/allfilters.c | 1 +
libavfilter/vf_pp_vpe.c | 391 +++++++++++++++++++++++++++++++++++++++
4 files changed, 394 insertions(+)
create mode 100755 libavfilter/vf_pp_vpe.c
diff --git a/configure b/configure
index ab5d3e2328..7110546326 100755
--- a/configure
+++ b/configure
@@ -3642,6 +3642,7 @@ xfade_opencl_filter_deps="opencl"
yadif_cuda_filter_deps="ffnvcodec"
yadif_cuda_filter_deps_any="cuda_nvcc cuda_llvm"
spliter_vpe_filter_deps="vpe"
+pp_vpe_filter_deps="vpe"
# examples
avio_list_dir_deps="avformat avutil"
diff --git a/libavfilter/Makefile b/libavfilter/Makefile
index 3e03f2b75b..c5a0dbce5d 100644
--- a/libavfilter/Makefile
+++ b/libavfilter/Makefile
@@ -466,6 +466,7 @@ OBJS-$(CONFIG_ZMQ_FILTER) += f_zmq.o
OBJS-$(CONFIG_ZOOMPAN_FILTER) += vf_zoompan.o
OBJS-$(CONFIG_ZSCALE_FILTER) += vf_zscale.o
OBJS-$(CONFIG_SPLITER_VPE_FILTER) += vf_spliter_vpe.o
+OBJS-$(CONFIG_PP_VPE_FILTER) += vf_pp_vpe.o
OBJS-$(CONFIG_ALLRGB_FILTER) += vsrc_testsrc.o
OBJS-$(CONFIG_ALLYUV_FILTER) += vsrc_testsrc.o
diff --git a/libavfilter/allfilters.c b/libavfilter/allfilters.c
index 68f3b3597c..0a6d376a51 100644
--- a/libavfilter/allfilters.c
+++ b/libavfilter/allfilters.c
@@ -444,6 +444,7 @@ extern AVFilter ff_vf_zmq;
extern AVFilter ff_vf_zoompan;
extern AVFilter ff_vf_zscale;
extern AVFilter ff_vf_spliter_vpe;
+extern AVFilter ff_vf_pp_vpe;
extern AVFilter ff_vsrc_allrgb;
extern AVFilter ff_vsrc_allyuv;
diff --git a/libavfilter/vf_pp_vpe.c b/libavfilter/vf_pp_vpe.c
new file mode 100755
index 0000000000..ea718cc104
--- /dev/null
+++ b/libavfilter/vf_pp_vpe.c
@@ -0,0 +1,391 @@
+/*
+ * Verisilicon VPE Post Processing Filter
+ *
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERC`ABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#include <stdint.h>
+
+#include <vpe/vpi_types.h>
+#include <vpe/vpi_api.h>
+
+#include "avfilter.h"
+#include "formats.h"
+#include "internal.h"
+#include "libavutil/pixfmt.h"
+#include "libavutil/buffer.h"
+#include "libavutil/hwcontext.h"
+#include "libavutil/opt.h"
+#include "libavutil/frame.h"
+#include "libavfilter/filters.h"
+#include "libavutil/hwcontext_vpe.h"
+
+typedef struct VpePPFilter {
+ const AVClass *av_class;
+ AVBufferRef *hw_device;
+ AVBufferRef *hw_frame;
+
+ VpiCtx ctx;
+ VpiApi *vpi;
+
+ int nb_outputs;
+ int force_10bit;
+ char *low_res;
+ VpiPPOpition cfg;
+} VpePPFilter;
+
+static const enum AVPixelFormat input_pix_fmts[] = {
+ AV_PIX_FMT_NV12, AV_PIX_FMT_P010LE, AV_PIX_FMT_YUV420P,
+ AV_PIX_FMT_YUV422P, AV_PIX_FMT_NV21, AV_PIX_FMT_YUV420P10LE,
+ AV_PIX_FMT_YUV420P10BE, AV_PIX_FMT_YUV422P10LE, AV_PIX_FMT_YUV422P10BE,
+ AV_PIX_FMT_P010BE, AV_PIX_FMT_YUV444P, AV_PIX_FMT_RGB24,
+ AV_PIX_FMT_BGR24, AV_PIX_FMT_ARGB, AV_PIX_FMT_RGBA,
+ AV_PIX_FMT_ABGR, AV_PIX_FMT_BGRA, AV_PIX_FMT_NONE,
+};
+
+typedef struct PixelMapTable {
+ enum AVPixelFormat src;
+ VpiPixsFmt des;
+} PixelMapTable;
+
+static PixelMapTable ptable[] = {
+ { AV_PIX_FMT_YUV420P, VPI_FMT_YUV420P },
+ { AV_PIX_FMT_YUV422P, VPI_FMT_YUV422P },
+ { AV_PIX_FMT_NV12, VPI_FMT_NV12 },
+ { AV_PIX_FMT_NV21, VPI_FMT_NV21 },
+ { AV_PIX_FMT_YUV420P10LE, VPI_FMT_YUV420P10LE },
+ { AV_PIX_FMT_YUV420P10BE, VPI_FMT_YUV420P10BE },
+ { AV_PIX_FMT_YUV422P10LE, VPI_FMT_YUV422P10LE },
+ { AV_PIX_FMT_YUV422P10BE, VPI_FMT_YUV422P10BE },
+ { AV_PIX_FMT_P010LE, VPI_FMT_P010LE },
+ { AV_PIX_FMT_P010BE, VPI_FMT_P010BE },
+ { AV_PIX_FMT_YUV444P, VPI_FMT_YUV444P },
+ { AV_PIX_FMT_RGB24, VPI_FMT_RGB24 },
+ { AV_PIX_FMT_BGR24, VPI_FMT_BGR24 },
+ { AV_PIX_FMT_ARGB, VPI_FMT_ARGB },
+ { AV_PIX_FMT_RGBA, VPI_FMT_RGBA },
+ { AV_PIX_FMT_ABGR, VPI_FMT_ABGR },
+ { AV_PIX_FMT_BGRA, VPI_FMT_BGRA },
+};
+
+static const enum AVPixelFormat output_pix_fmts[] = {
+ AV_PIX_FMT_VPE,
+ AV_PIX_FMT_NONE,
+};
+
+static av_cold int vpe_pp_init(AVFilterContext *avf_ctx)
+{
+ VpePPFilter *ctx = avf_ctx->priv;
+ int ret = 0;
+ AVFilterPad pad = { 0 };
+
+ ret = vpi_create(&ctx->ctx, &ctx->vpi, PP_VPE);
+ if (ret)
+ return AVERROR_EXTERNAL;
+
+ ret = ctx->vpi->init(ctx->ctx, NULL);
+ if (ret)
+ return AVERROR_EXTERNAL;
+
+ pad.type = AVMEDIA_TYPE_VIDEO;
+ pad.name = "output0";
+ if ((ret = ff_insert_outpad(avf_ctx, 0, &pad)) < 0) {
+ return ret;
+ }
+
+ return 0;
+}
+
+static av_cold void vpe_pp_uninit(AVFilterContext *avf_ctx)
+{
+ VpePPFilter *ctx = avf_ctx->priv;
+
+ if (ctx->hw_device) {
+ ctx->vpi->close(ctx->ctx);
+ av_buffer_unref(&ctx->hw_device);
+ vpi_destroy(ctx->ctx);
+ avf_ctx->priv = NULL;
+ }
+}
+
+static void vpe_pp_picture_consumed(void *opaque, uint8_t *data)
+{
+ VpePPFilter *ctx = opaque;
+ VpiCtrlCmdParam cmd;
+
+ cmd.cmd = VPI_CMD_PP_CONSUME;
+ cmd.data = data;
+ ctx->vpi->control(ctx->ctx, (void *)&cmd, NULL);
+ free(data);
+}
+
+static int vpe_pp_output_avframe(VpePPFilter *ctx, VpiFrame *input,
+ AVFrame *output)
+{
+ AVHWFramesContext *hwframe_ctx = (AVHWFramesContext *)ctx->hw_frame->data;
+ AVVpeFramesContext *vpeframe_ctx = (AVVpeFramesContext *)hwframe_ctx->hwctx;
+ VpiFrame *frame_hwctx = vpeframe_ctx->frame;
+
+ if (input) {
+ output->width = input->width;
+ output->height = input->height;
+ output->linesize[0] = input->linesize[0];
+ output->linesize[1] = input->linesize[1];
+ output->linesize[2] = input->linesize[2];
+ output->key_frame = input->key_frame;
+ output->format = AV_PIX_FMT_VPE;
+ output->data[0] = (void*)input;
+ output->buf[0] =
+ av_buffer_create((uint8_t *)input, sizeof(VpiFrame),
+ vpe_pp_picture_consumed, (void *)ctx,
+ AV_BUFFER_FLAG_READONLY);
+ if (output->buf[0] == NULL)
+ return AVERROR(ENOMEM);
+
+ memcpy(frame_hwctx, input, sizeof(VpiFrame));
+ output->hw_frames_ctx = av_buffer_ref(ctx->hw_frame);
+ if (output->hw_frames_ctx == NULL)
+ return AVERROR(ENOMEM);
+
+ } else {
+ memset(output, 0, sizeof(AVFrame));
+ }
+
+ return 0;
+}
+
+static int vpe_pp_output_vpeframe(AVFrame *input, VpiFrame *output,
+ int max_frame_delay)
+{
+ memset(output, 0, sizeof(VpiFrame));
+ if (input) {
+ output->width = input->width;
+ output->height = input->height;
+ output->linesize[0] = input->linesize[0];
+ output->linesize[1] = input->linesize[1];
+ output->linesize[2] = input->linesize[2];
+ output->key_frame = input->key_frame;
+ output->pts = input->pts;
+ output->pkt_dts = input->pkt_dts;
+ output->data[0] = input->data[0];
+ output->data[1] = input->data[1];
+ output->data[2] = input->data[2];
+ output->max_frames_delay = max_frame_delay;
+ }
+
+ return 0;
+}
+
+static int vpe_pp_filter_frame(AVFilterLink *inlink, AVFrame *frame)
+{
+ AVFilterContext *avf_ctx = inlink->dst;
+ AVFilterLink *outlink = avf_ctx->outputs[0];
+ AVHWFramesContext *hwframe_ctx = NULL;
+ AVFrame *buf_out = NULL;
+ VpePPFilter *ctx = avf_ctx->priv;
+ VpiFrame in_picture, *out_picture;
+ AVVpeFramesContext *vpeframe_ctx = NULL;
+ int ret = 0;
+ int max_frame_delay = 0;
+
+ hwframe_ctx = (AVHWFramesContext *)ctx->hw_frame->data;
+ vpeframe_ctx = (AVVpeFramesContext *)hwframe_ctx->hwctx;
+ max_frame_delay = vpeframe_ctx->frame->max_frames_delay;
+ ret = vpe_pp_output_vpeframe(frame, &in_picture, max_frame_delay);
+ if (ret)
+ return ret;
+
+ out_picture = (VpiFrame *)malloc(sizeof(VpiFrame));
+ ret = ctx->vpi->process(ctx->ctx, &in_picture, out_picture);
+ if (ret)
+ return AVERROR_EXTERNAL;
+
+ buf_out = av_frame_alloc();
+ if (!buf_out)
+ return AVERROR(ENOMEM);
+
+ ret = av_frame_copy_props(buf_out, frame);
+ if (ret)
+ return ret;
+
+ ret = vpe_pp_output_avframe(ctx, out_picture, buf_out);
+ if (ret < 0)
+ return AVERROR_EXTERNAL;
+
+ av_frame_free(&frame);
+
+ ret = ff_outlink_get_status(outlink);
+ if (ret < 0)
+ return ret;
+
+ ret = ff_filter_frame(outlink, buf_out);
+ if (ret < 0)
+ return ret;
+
+ return 0;
+}
+
+static int vpe_pp_init_hwctx(AVFilterContext *ctx, AVFilterLink *inlink)
+{
+ AVHWFramesContext *hwframe_ctx;
+ int ret = 0;
+ VpePPFilter *filter = ctx->priv;
+
+ if (ctx->hw_device_ctx) {
+ filter->hw_device = av_buffer_ref(ctx->hw_device_ctx);
+ if (!filter->hw_device)
+ return AVERROR(ENOMEM);
+ } else {
+ return AVERROR(ENOMEM);
+ }
+
+ filter->hw_frame = av_hwframe_ctx_alloc(filter->hw_device);
+ if (!filter->hw_frame)
+ return AVERROR(ENOMEM);
+
+ hwframe_ctx = (AVHWFramesContext *)filter->hw_frame->data;
+ if (!hwframe_ctx->pool) {
+ hwframe_ctx->format = AV_PIX_FMT_VPE;
+ hwframe_ctx->sw_format = inlink->format;
+ hwframe_ctx->width = inlink->w;
+ hwframe_ctx->height = inlink->h;
+
+ if ((ret = av_hwframe_ctx_init(filter->hw_frame)) < 0) {
+ return ret;
+ }
+ }
+ inlink->hw_frames_ctx = filter->hw_frame;
+ return 0;
+}
+
+static int vpe_get_format(enum AVPixelFormat format)
+{
+ int i = 0;
+
+ for (i = 0; i < sizeof(ptable) / sizeof(PixelMapTable); i++) {
+ if (format == ptable[i].src)
+ return ptable[i].des;
+ }
+ return AVERROR(EINVAL);
+}
+
+static int vpe_pp_config_props(AVFilterLink *inlink)
+{
+ AVFilterContext *avf_ctx = inlink->dst;
+ AVHWFramesContext *hwframe_ctx;
+ AVVpeFramesContext *vpeframe_ctx;
+ VpePPFilter *ctx = avf_ctx->priv;
+ VpiPPOpition *cfg = &ctx->cfg;
+ VpiCtrlCmdParam cmd;
+ int ret = 0;
+
+ ret = vpe_pp_init_hwctx(avf_ctx, inlink);
+ if (ret < 0){
+ return AVERROR_EXTERNAL;
+ }
+
+ hwframe_ctx = (AVHWFramesContext *)ctx->hw_frame->data;
+ vpeframe_ctx = (AVVpeFramesContext *)hwframe_ctx->hwctx;
+ /*Get config*/
+ cfg->w = inlink->w;
+ cfg->h = inlink->h;
+ cfg->format = vpe_get_format(inlink->format);
+ cfg->nb_outputs = ctx->nb_outputs;
+ cfg->force_10bit = ctx->force_10bit;
+ cfg->low_res = ctx->low_res;
+ cfg->frame = vpeframe_ctx->frame;
+
+ cmd.cmd = VPI_CMD_PP_CONFIG;
+ cmd.data = cfg;
+ ret = ctx->vpi->control(ctx->ctx, (void *)&cmd, NULL);
+ if (ret < 0){
+ return AVERROR_EXTERNAL;
+ }
+ return 0;
+}
+
+static int vpe_pp_query_formats(AVFilterContext *avf_ctx)
+{
+ int ret;
+ AVFilterFormats *in_fmts = ff_make_format_list(input_pix_fmts);
+ AVFilterFormats *out_fmts;
+
+ ret = ff_formats_ref(in_fmts, &avf_ctx->inputs[0]->out_formats);
+ if (ret < 0){
+ av_log(NULL, AV_LOG_ERROR, "ff_formats_ref error=%d\n", ret);
+ return ret;
+ }
+
+ out_fmts = ff_make_format_list(output_pix_fmts);
+ ret = ff_formats_ref(out_fmts, &avf_ctx->outputs[0]->in_formats);
+
+ return ret;
+}
+
+#define OFFSET(x) offsetof(VpePPFilter, x)
+#define FLAGS (AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_FILTERING_PARAM)
+
+static const AVOption vpe_pp_options[] = {
+ { "outputs",
+ "set number of outputs",
+ OFFSET(nb_outputs),
+ AV_OPT_TYPE_INT,
+ { .i64 = 1 },
+ 1,
+ 4,
+ FLAGS },
+ { "low_res",
+ "specific resize configuration.",
+ OFFSET(low_res),
+ AV_OPT_TYPE_STRING,
+ { .str = NULL },
+ .flags = FLAGS },
+ { "force10bit",
+ "upsampling 8bit to 10bit",
+ OFFSET(force_10bit),
+ AV_OPT_TYPE_INT,
+ { .i64 = 0 },
+ 0,
+ 1,
+ FLAGS },
+ { NULL },
+};
+
+AVFILTER_DEFINE_CLASS(vpe_pp);
+
+static const AVFilterPad vpe_pp_inputs[] = {
+ {
+ .name = "default",
+ .type = AVMEDIA_TYPE_VIDEO,
+ .filter_frame = vpe_pp_filter_frame,
+ .config_props = vpe_pp_config_props,
+ },
+ { NULL }
+};
+
+AVFilter ff_vf_pp_vpe = {
+ .name = "vpe_pp",
+ .description = NULL_IF_CONFIG_SMALL("Filter using vpe post processing."),
+ .priv_size = sizeof(VpePPFilter),
+ .priv_class = &vpe_pp_class,
+ .init = vpe_pp_init,
+ .uninit = vpe_pp_uninit,
+ .query_formats = vpe_pp_query_formats,
+ .inputs = vpe_pp_inputs,
+ .outputs = NULL,
+ .flags = 0,
+};
--
2.19.1
More information about the ffmpeg-devel
mailing list