[FFmpeg-devel] [PATCH v2 33/71] avcodec/mpegpicture: Cache AVFrame.data and linesize values

Andreas Rheinhardt andreas.rheinhardt at outlook.com
Sat May 11 23:50:57 EEST 2024


This avoids an indirection and is in preparation for removing
the AVFrame from MpegEncContext.(cur|last|next)_pic altogether.

Signed-off-by: Andreas Rheinhardt <andreas.rheinhardt at outlook.com>
---
 libavcodec/motion_est.c                  | 16 +++----
 libavcodec/mpeg12dec.c                   | 14 +++---
 libavcodec/mpeg_er.c                     |  6 +--
 libavcodec/mpegpicture.c                 | 14 ++++++
 libavcodec/mpegpicture.h                 |  4 ++
 libavcodec/mpegvideo.c                   | 10 ++--
 libavcodec/mpegvideo_dec.c               |  4 +-
 libavcodec/mpegvideo_enc.c               | 16 +++----
 libavcodec/mpegvideo_motion.c            |  4 +-
 libavcodec/mpv_reconstruct_mb_template.c | 12 ++---
 libavcodec/msmpeg4.c                     |  4 +-
 libavcodec/mss2.c                        |  4 +-
 libavcodec/svq1enc.c                     |  6 +--
 libavcodec/vc1_block.c                   |  8 ++--
 libavcodec/vc1_mc.c                      | 60 ++++++++++++------------
 libavcodec/vc1dec.c                      | 28 +++++------
 16 files changed, 114 insertions(+), 96 deletions(-)

diff --git a/libavcodec/motion_est.c b/libavcodec/motion_est.c
index b2644b5328..fcef47a623 100644
--- a/libavcodec/motion_est.c
+++ b/libavcodec/motion_est.c
@@ -703,11 +703,11 @@ static inline int h263_mv4_search(MpegEncContext *s, int mx, int my, int shift)
         offset= (s->mb_x*8 + (mx>>1)) + (s->mb_y*8 + (my>>1))*s->uvlinesize;
 
         if(s->no_rounding){
-            s->hdsp.put_no_rnd_pixels_tab[1][dxy](c->scratchpad    , s->last_pic.f->data[1] + offset, s->uvlinesize, 8);
-            s->hdsp.put_no_rnd_pixels_tab[1][dxy](c->scratchpad + 8, s->last_pic.f->data[2] + offset, s->uvlinesize, 8);
+            s->hdsp.put_no_rnd_pixels_tab[1][dxy](c->scratchpad    , s->last_pic.data[1] + offset, s->uvlinesize, 8);
+            s->hdsp.put_no_rnd_pixels_tab[1][dxy](c->scratchpad + 8, s->last_pic.data[2] + offset, s->uvlinesize, 8);
         }else{
-            s->hdsp.put_pixels_tab       [1][dxy](c->scratchpad    , s->last_pic.f->data[1] + offset, s->uvlinesize, 8);
-            s->hdsp.put_pixels_tab       [1][dxy](c->scratchpad + 8, s->last_pic.f->data[2] + offset, s->uvlinesize, 8);
+            s->hdsp.put_pixels_tab       [1][dxy](c->scratchpad    , s->last_pic.data[1] + offset, s->uvlinesize, 8);
+            s->hdsp.put_pixels_tab       [1][dxy](c->scratchpad + 8, s->last_pic.data[2] + offset, s->uvlinesize, 8);
         }
 
         dmin_sum += s->mecc.mb_cmp[1](s, s->new_pic->data[1] + s->mb_x * 8 + s->mb_y * 8 * s->uvlinesize, c->scratchpad,     s->uvlinesize, 8);
@@ -899,7 +899,7 @@ void ff_estimate_p_frame_motion(MpegEncContext * s,
     const int shift= 1+s->quarter_sample;
     int mb_type=0;
 
-    init_ref(c, s->new_pic->data, s->last_pic.f->data, NULL, 16*mb_x, 16*mb_y, 0);
+    init_ref(c, s->new_pic->data, s->last_pic.data, NULL, 16*mb_x, 16*mb_y, 0);
 
     av_assert0(s->quarter_sample==0 || s->quarter_sample==1);
     av_assert0(s->linesize == c->stride);
@@ -1070,7 +1070,7 @@ int ff_pre_estimate_p_frame_motion(MpegEncContext * s,
     int P[10][2];
     const int shift= 1+s->quarter_sample;
     const int xy= mb_x + mb_y*s->mb_stride;
-    init_ref(c, s->new_pic->data, s->last_pic.f->data, NULL, 16*mb_x, 16*mb_y, 0);
+    init_ref(c, s->new_pic->data, s->last_pic.data, NULL, 16*mb_x, 16*mb_y, 0);
 
     av_assert0(s->quarter_sample==0 || s->quarter_sample==1);
 
@@ -1495,8 +1495,8 @@ void ff_estimate_b_frame_motion(MpegEncContext * s,
     int fmin, bmin, dmin, fbmin, bimin, fimin;
     int type=0;
     const int xy = mb_y*s->mb_stride + mb_x;
-    init_ref(c, s->new_pic->data, s->last_pic.f->data,
-             s->next_pic.f->data, 16 * mb_x, 16 * mb_y, 2);
+    init_ref(c, s->new_pic->data, s->last_pic.data,
+             s->next_pic.data, 16 * mb_x, 16 * mb_y, 2);
 
     get_limits(s, 16*mb_x, 16*mb_y);
 
diff --git a/libavcodec/mpeg12dec.c b/libavcodec/mpeg12dec.c
index 4aba5651a6..c04d351e0c 100644
--- a/libavcodec/mpeg12dec.c
+++ b/libavcodec/mpeg12dec.c
@@ -1297,12 +1297,12 @@ static int mpeg_field_start(MpegEncContext *s, const uint8_t *buf, int buf_size)
 
             for (int i = 0; i < 3; i++) {
                 if (s->picture_structure == PICT_BOTTOM_FIELD) {
-                    s->cur_pic.f->data[i] = FF_PTR_ADD(s->cur_pic.f->data[i],
-                                                       s->cur_pic.f->linesize[i]);
+                    s->cur_pic.data[i] = FF_PTR_ADD(s->cur_pic.data[i],
+                                                    s->cur_pic.linesize[i]);
                 }
-                s->cur_pic.f->linesize[i]  *= 2;
-                s->last_pic.f->linesize[i] *= 2;
-                s->next_pic.f->linesize[i] *= 2;
+                s->cur_pic.linesize[i]  *= 2;
+                s->last_pic.linesize[i] *= 2;
+                s->next_pic.linesize[i] *= 2;
             }
         }
 
@@ -1377,9 +1377,9 @@ static int mpeg_field_start(MpegEncContext *s, const uint8_t *buf, int buf_size)
             return ret;
 
         for (int i = 0; i < 3; i++) {
-            s->cur_pic.f->data[i] = s->cur_pic_ptr->f->data[i];
+            s->cur_pic.data[i] = s->cur_pic_ptr->f->data[i];
             if (s->picture_structure == PICT_BOTTOM_FIELD)
-                s->cur_pic.f->data[i] +=
+                s->cur_pic.data[i] +=
                     s->cur_pic_ptr->f->linesize[i];
         }
     }
diff --git a/libavcodec/mpeg_er.c b/libavcodec/mpeg_er.c
index bc838b05ba..8d8b2aea92 100644
--- a/libavcodec/mpeg_er.c
+++ b/libavcodec/mpeg_er.c
@@ -84,13 +84,13 @@ static void mpeg_er_decode_mb(void *opaque, int ref, int mv_dir, int mv_type,
     if (!s->chroma_y_shift)
         s->bdsp.clear_blocks(s->block[6]);
 
-    s->dest[0] = s->cur_pic.f->data[0] +
+    s->dest[0] = s->cur_pic.data[0] +
                  s->mb_y * 16 * s->linesize +
                  s->mb_x * 16;
-    s->dest[1] = s->cur_pic.f->data[1] +
+    s->dest[1] = s->cur_pic.data[1] +
                  s->mb_y * (16 >> s->chroma_y_shift) * s->uvlinesize +
                  s->mb_x * (16 >> s->chroma_x_shift);
-    s->dest[2] = s->cur_pic.f->data[2] +
+    s->dest[2] = s->cur_pic.data[2] +
                  s->mb_y * (16 >> s->chroma_y_shift) * s->uvlinesize +
                  s->mb_x * (16 >> s->chroma_x_shift);
 
diff --git a/libavcodec/mpegpicture.c b/libavcodec/mpegpicture.c
index ca265da9fc..6da9545b50 100644
--- a/libavcodec/mpegpicture.c
+++ b/libavcodec/mpegpicture.c
@@ -174,6 +174,11 @@ int ff_alloc_picture(AVCodecContext *avctx, Picture *pic, MotionEstContext *me,
     *linesize   = pic->f->linesize[0];
     *uvlinesize = pic->f->linesize[1];
 
+    for (int i = 0; i < MPV_MAX_PLANES; i++) {
+        pic->data[i]     = pic->f->data[i];
+        pic->linesize[i] = pic->f->linesize[i];
+    }
+
     ret = alloc_picture_tables(pools, pic, mb_height);
     if (ret < 0)
         goto fail;
@@ -206,7 +211,11 @@ void ff_mpeg_unref_picture(Picture *pic)
 
     free_picture_tables(pic);
 
+    memset(pic->data,     0, sizeof(pic->data));
+    memset(pic->linesize, 0, sizeof(pic->linesize));
+
     pic->dummy         = 0;
+
     pic->field_picture = 0;
     pic->b_frame_score = 0;
     pic->reference     = 0;
@@ -248,6 +257,11 @@ int ff_mpeg_ref_picture(Picture *dst, Picture *src)
     if (ret < 0)
         goto fail;
 
+    for (int i = 0; i < MPV_MAX_PLANES; i++) {
+        dst->data[i]     = src->data[i];
+        dst->linesize[i] = src->linesize[i];
+    }
+
     update_picture_tables(dst, src);
 
     ff_refstruct_replace(&dst->hwaccel_picture_private,
diff --git a/libavcodec/mpegpicture.h b/libavcodec/mpegpicture.h
index 8e3c119acc..814f71213e 100644
--- a/libavcodec/mpegpicture.h
+++ b/libavcodec/mpegpicture.h
@@ -21,6 +21,7 @@
 #ifndef AVCODEC_MPEGPICTURE_H
 #define AVCODEC_MPEGPICTURE_H
 
+#include <stddef.h>
 #include <stdint.h>
 
 #include "avcodec.h"
@@ -57,6 +58,9 @@ typedef struct Picture {
     struct AVFrame *f;
     ThreadFrame tf;
 
+    uint8_t  *data[MPV_MAX_PLANES];
+    ptrdiff_t linesize[MPV_MAX_PLANES];
+
     int8_t *qscale_table_base;
     int8_t *qscale_table;
 
diff --git a/libavcodec/mpegvideo.c b/libavcodec/mpegvideo.c
index c8a1d6487a..c24b7207b1 100644
--- a/libavcodec/mpegvideo.c
+++ b/libavcodec/mpegvideo.c
@@ -881,8 +881,8 @@ void ff_clean_intra_table_entries(MpegEncContext *s)
 }
 
 void ff_init_block_index(MpegEncContext *s){ //FIXME maybe rename
-    const int linesize   = s->cur_pic.f->linesize[0]; //not s->linesize as this would be wrong for field pics
-    const int uvlinesize = s->cur_pic.f->linesize[1];
+    const int linesize   = s->cur_pic.linesize[0]; //not s->linesize as this would be wrong for field pics
+    const int uvlinesize = s->cur_pic.linesize[1];
     const int width_of_mb = (4 + (s->avctx->bits_per_raw_sample > 8)) - s->avctx->lowres;
     const int height_of_mb = 4 - s->avctx->lowres;
 
@@ -894,9 +894,9 @@ void ff_init_block_index(MpegEncContext *s){ //FIXME maybe rename
     s->block_index[5]= s->mb_stride*(s->mb_y + s->mb_height + 2) + s->b8_stride*s->mb_height*2 + s->mb_x - 1;
     //block_index is not used by mpeg2, so it is not affected by chroma_format
 
-    s->dest[0] = s->cur_pic.f->data[0] + (int)((s->mb_x - 1U) <<  width_of_mb);
-    s->dest[1] = s->cur_pic.f->data[1] + (int)((s->mb_x - 1U) << (width_of_mb - s->chroma_x_shift));
-    s->dest[2] = s->cur_pic.f->data[2] + (int)((s->mb_x - 1U) << (width_of_mb - s->chroma_x_shift));
+    s->dest[0] = s->cur_pic.data[0] + (int)((s->mb_x - 1U) <<  width_of_mb);
+    s->dest[1] = s->cur_pic.data[1] + (int)((s->mb_x - 1U) << (width_of_mb - s->chroma_x_shift));
+    s->dest[2] = s->cur_pic.data[2] + (int)((s->mb_x - 1U) << (width_of_mb - s->chroma_x_shift));
 
     if (s->picture_structure == PICT_FRAME) {
         s->dest[0] += s->mb_y *   linesize << height_of_mb;
diff --git a/libavcodec/mpegvideo_dec.c b/libavcodec/mpegvideo_dec.c
index 9b04d6a351..570a422b6f 100644
--- a/libavcodec/mpegvideo_dec.c
+++ b/libavcodec/mpegvideo_dec.c
@@ -613,8 +613,8 @@ static av_always_inline void mpeg_motion_lowres(MpegEncContext *s,
     const int h_edge_pos = s->h_edge_pos >> lowres;
     const int v_edge_pos = s->v_edge_pos >> lowres;
     int hc = s->chroma_y_shift ? (h+1-bottom_field)>>1 : h;
-    linesize   = s->cur_pic.f->linesize[0] << field_based;
-    uvlinesize = s->cur_pic.f->linesize[1] << field_based;
+    linesize   = s->cur_pic.linesize[0] << field_based;
+    uvlinesize = s->cur_pic.linesize[1] << field_based;
 
     // FIXME obviously not perfect but qpel will not work in lowres anyway
     if (s->quarter_sample) {
diff --git a/libavcodec/mpegvideo_enc.c b/libavcodec/mpegvideo_enc.c
index e7459cc5bf..2f6aaad1c7 100644
--- a/libavcodec/mpegvideo_enc.c
+++ b/libavcodec/mpegvideo_enc.c
@@ -1655,20 +1655,20 @@ static void frame_end(MpegEncContext *s)
         !s->intra_only) {
         int hshift = s->chroma_x_shift;
         int vshift = s->chroma_y_shift;
-        s->mpvencdsp.draw_edges(s->cur_pic.f->data[0],
-                                s->cur_pic.f->linesize[0],
+        s->mpvencdsp.draw_edges(s->cur_pic.data[0],
+                                s->cur_pic.linesize[0],
                                 s->h_edge_pos, s->v_edge_pos,
                                 EDGE_WIDTH, EDGE_WIDTH,
                                 EDGE_TOP | EDGE_BOTTOM);
-        s->mpvencdsp.draw_edges(s->cur_pic.f->data[1],
-                                s->cur_pic.f->linesize[1],
+        s->mpvencdsp.draw_edges(s->cur_pic.data[1],
+                                s->cur_pic.linesize[1],
                                 s->h_edge_pos >> hshift,
                                 s->v_edge_pos >> vshift,
                                 EDGE_WIDTH >> hshift,
                                 EDGE_WIDTH >> vshift,
                                 EDGE_TOP | EDGE_BOTTOM);
-        s->mpvencdsp.draw_edges(s->cur_pic.f->data[2],
-                                s->cur_pic.f->linesize[2],
+        s->mpvencdsp.draw_edges(s->cur_pic.data[2],
+                                s->cur_pic.linesize[2],
                                 s->h_edge_pos >> hshift,
                                 s->v_edge_pos >> vshift,
                                 EDGE_WIDTH >> hshift,
@@ -2268,14 +2268,14 @@ static av_always_inline void encode_mb_internal(MpegEncContext *s,
 
         if (s->mv_dir & MV_DIR_FORWARD) {
             ff_mpv_motion(s, dest_y, dest_cb, dest_cr, 0,
-                          s->last_pic.f->data,
+                          s->last_pic.data,
                           op_pix, op_qpix);
             op_pix  = s->hdsp.avg_pixels_tab;
             op_qpix = s->qdsp.avg_qpel_pixels_tab;
         }
         if (s->mv_dir & MV_DIR_BACKWARD) {
             ff_mpv_motion(s, dest_y, dest_cb, dest_cr, 1,
-                          s->next_pic.f->data,
+                          s->next_pic.data,
                           op_pix, op_qpix);
         }
 
diff --git a/libavcodec/mpegvideo_motion.c b/libavcodec/mpegvideo_motion.c
index 3824832f9d..9c1872aa1b 100644
--- a/libavcodec/mpegvideo_motion.c
+++ b/libavcodec/mpegvideo_motion.c
@@ -93,8 +93,8 @@ void mpeg_motion_internal(MpegEncContext *s,
     ptrdiff_t uvlinesize, linesize;
 
     v_edge_pos = s->v_edge_pos >> field_based;
-    linesize   = s->cur_pic.f->linesize[0] << field_based;
-    uvlinesize = s->cur_pic.f->linesize[1] << field_based;
+    linesize   = s->cur_pic.linesize[0] << field_based;
+    uvlinesize = s->cur_pic.linesize[1] << field_based;
     block_y_half = (field_based | is_16x8);
 
     dxy   = ((motion_y & 1) << 1) | (motion_x & 1);
diff --git a/libavcodec/mpv_reconstruct_mb_template.c b/libavcodec/mpv_reconstruct_mb_template.c
index febada041a..70dab76f73 100644
--- a/libavcodec/mpv_reconstruct_mb_template.c
+++ b/libavcodec/mpv_reconstruct_mb_template.c
@@ -82,8 +82,8 @@ void mpv_reconstruct_mb_internal(MpegEncContext *s, int16_t block[12][64],
     {
         uint8_t *dest_y, *dest_cb, *dest_cr;
         int dct_linesize, dct_offset;
-        const int linesize   = s->cur_pic.f->linesize[0]; //not s->linesize as this would be wrong for field pics
-        const int uvlinesize = s->cur_pic.f->linesize[1];
+        const int linesize   = s->cur_pic.linesize[0]; //not s->linesize as this would be wrong for field pics
+        const int uvlinesize = s->cur_pic.linesize[1];
         const int readable   = IS_ENCODER || lowres_flag || s->pict_type != AV_PICTURE_TYPE_B;
         const int block_size = lowres_flag ? 8 >> s->avctx->lowres : 8;
 
@@ -137,11 +137,11 @@ void mpv_reconstruct_mb_internal(MpegEncContext *s, int16_t block[12][64],
                 const h264_chroma_mc_func *op_pix = s->h264chroma.put_h264_chroma_pixels_tab;
 
                 if (s->mv_dir & MV_DIR_FORWARD) {
-                    MPV_motion_lowres(s, dest_y, dest_cb, dest_cr, 0, s->last_pic.f->data, op_pix);
+                    MPV_motion_lowres(s, dest_y, dest_cb, dest_cr, 0, s->last_pic.data, op_pix);
                     op_pix = s->h264chroma.avg_h264_chroma_pixels_tab;
                 }
                 if (s->mv_dir & MV_DIR_BACKWARD) {
-                    MPV_motion_lowres(s, dest_y, dest_cb, dest_cr, 1, s->next_pic.f->data, op_pix);
+                    MPV_motion_lowres(s, dest_y, dest_cb, dest_cr, 1, s->next_pic.data, op_pix);
                 }
             } else {
                 op_pixels_func (*op_pix)[4];
@@ -155,12 +155,12 @@ void mpv_reconstruct_mb_internal(MpegEncContext *s, int16_t block[12][64],
                     op_qpix = s->qdsp.put_no_rnd_qpel_pixels_tab;
                 }
                 if (s->mv_dir & MV_DIR_FORWARD) {
-                    ff_mpv_motion(s, dest_y, dest_cb, dest_cr, 0, s->last_pic.f->data, op_pix, op_qpix);
+                    ff_mpv_motion(s, dest_y, dest_cb, dest_cr, 0, s->last_pic.data, op_pix, op_qpix);
                     op_pix  = s->hdsp.avg_pixels_tab;
                     op_qpix = s->qdsp.avg_qpel_pixels_tab;
                 }
                 if (s->mv_dir & MV_DIR_BACKWARD) {
-                    ff_mpv_motion(s, dest_y, dest_cb, dest_cr, 1, s->next_pic.f->data, op_pix, op_qpix);
+                    ff_mpv_motion(s, dest_y, dest_cb, dest_cr, 1, s->next_pic.data, op_pix, op_qpix);
                 }
             }
 
diff --git a/libavcodec/msmpeg4.c b/libavcodec/msmpeg4.c
index 323f083f8f..f7ebb8ba89 100644
--- a/libavcodec/msmpeg4.c
+++ b/libavcodec/msmpeg4.c
@@ -282,10 +282,10 @@ int ff_msmpeg4_pred_dc(MpegEncContext *s, int n,
                 int bs = 8 >> s->avctx->lowres;
                 if(n<4){
                     wrap= s->linesize;
-                    dest = s->cur_pic.f->data[0] + (((n >> 1) + 2*s->mb_y) * bs*  wrap ) + ((n & 1) + 2*s->mb_x) * bs;
+                    dest = s->cur_pic.data[0] + (((n >> 1) + 2*s->mb_y) * bs*  wrap ) + ((n & 1) + 2*s->mb_x) * bs;
                 }else{
                     wrap= s->uvlinesize;
-                    dest = s->cur_pic.f->data[n - 3] + (s->mb_y * bs * wrap) + s->mb_x * bs;
+                    dest = s->cur_pic.data[n - 3] + (s->mb_y * bs * wrap) + s->mb_x * bs;
                 }
                 if(s->mb_x==0) a= (1024 + (scale>>1))/scale;
                 else           a= get_dc(dest-bs, wrap, scale*8>>(2*s->avctx->lowres), bs);
diff --git a/libavcodec/mss2.c b/libavcodec/mss2.c
index 6a4b5aeb59..5d52744529 100644
--- a/libavcodec/mss2.c
+++ b/libavcodec/mss2.c
@@ -382,7 +382,7 @@ static int decode_wmv9(AVCodecContext *avctx, const uint8_t *buf, int buf_size,
     MSS12Context *c   = &ctx->c;
     VC1Context *v     = avctx->priv_data;
     MpegEncContext *s = &v->s;
-    AVFrame *f;
+    Picture *f;
     int ret;
 
     ff_mpeg_flush(avctx);
@@ -431,7 +431,7 @@ static int decode_wmv9(AVCodecContext *avctx, const uint8_t *buf, int buf_size,
 
     ff_mpv_frame_end(s);
 
-    f = s->cur_pic.f;
+    f = &s->cur_pic;
 
     if (v->respic == 3) {
         ctx->dsp.upsample_plane(f->data[0], f->linesize[0], w,      h);
diff --git a/libavcodec/svq1enc.c b/libavcodec/svq1enc.c
index 52140494bb..c75ab1800a 100644
--- a/libavcodec/svq1enc.c
+++ b/libavcodec/svq1enc.c
@@ -328,11 +328,11 @@ static int svq1_encode_plane(SVQ1EncContext *s, int plane,
         s->m.avctx                         = s->avctx;
         s->m.cur_pic_ptr                   = &s->m.cur_pic;
         s->m.last_pic_ptr              = &s->m.last_pic;
-        s->m.last_pic.f->data[0]        = ref_plane;
+        s->m.last_pic.data[0]        = ref_plane;
         s->m.linesize                      =
-        s->m.last_pic.f->linesize[0]    =
+        s->m.last_pic.linesize[0]    =
         s->m.new_pic->linesize[0]      =
-        s->m.cur_pic.f->linesize[0] = stride;
+        s->m.cur_pic.linesize[0] = stride;
         s->m.width                         = width;
         s->m.height                        = height;
         s->m.mb_width                      = block_width;
diff --git a/libavcodec/vc1_block.c b/libavcodec/vc1_block.c
index 6b5b1d0566..9cb9fd27bf 100644
--- a/libavcodec/vc1_block.c
+++ b/libavcodec/vc1_block.c
@@ -2948,7 +2948,7 @@ static void vc1_decode_skip_blocks(VC1Context *v)
 {
     MpegEncContext *s = &v->s;
 
-    if (!v->s.last_pic.f->data[0])
+    if (!v->s.last_pic.data[0])
         return;
 
     ff_er_add_slice(&s->er, 0, s->start_mb_y, s->mb_width - 1, s->end_mb_y - 1, ER_MB_END);
@@ -2957,9 +2957,9 @@ static void vc1_decode_skip_blocks(VC1Context *v)
         s->mb_x = 0;
         init_block_index(v);
         update_block_index(s);
-        memcpy(s->dest[0], s->last_pic.f->data[0] + s->mb_y * 16 * s->linesize,   s->linesize   * 16);
-        memcpy(s->dest[1], s->last_pic.f->data[1] + s->mb_y *  8 * s->uvlinesize, s->uvlinesize *  8);
-        memcpy(s->dest[2], s->last_pic.f->data[2] + s->mb_y *  8 * s->uvlinesize, s->uvlinesize *  8);
+        memcpy(s->dest[0], s->last_pic.data[0] + s->mb_y * 16 * s->linesize,   s->linesize   * 16);
+        memcpy(s->dest[1], s->last_pic.data[1] + s->mb_y *  8 * s->uvlinesize, s->uvlinesize *  8);
+        memcpy(s->dest[2], s->last_pic.data[2] + s->mb_y *  8 * s->uvlinesize, s->uvlinesize *  8);
         s->first_slice_line = 0;
     }
 }
diff --git a/libavcodec/vc1_mc.c b/libavcodec/vc1_mc.c
index e24328569d..b60a48b38f 100644
--- a/libavcodec/vc1_mc.c
+++ b/libavcodec/vc1_mc.c
@@ -184,7 +184,7 @@ void ff_vc1_mc_1mv(VC1Context *v, int dir)
 
     if ((!v->field_mode ||
          (v->ref_field_type[dir] == 1 && v->cur_field_type == 1)) &&
-        !v->s.last_pic.f->data[0])
+        !v->s.last_pic.data[0])
         return;
 
     linesize   = s->cur_pic_ptr->f->linesize[0];
@@ -219,26 +219,26 @@ void ff_vc1_mc_1mv(VC1Context *v, int dir)
     }
     if (!dir) {
         if (v->field_mode && (v->cur_field_type != v->ref_field_type[dir]) && v->second_field) {
-            srcY = s->cur_pic.f->data[0];
-            srcU = s->cur_pic.f->data[1];
-            srcV = s->cur_pic.f->data[2];
+            srcY = s->cur_pic.data[0];
+            srcU = s->cur_pic.data[1];
+            srcV = s->cur_pic.data[2];
             luty  = v->curr_luty;
             lutuv = v->curr_lutuv;
             use_ic = *v->curr_use_ic;
             interlace = 1;
         } else {
-            srcY = s->last_pic.f->data[0];
-            srcU = s->last_pic.f->data[1];
-            srcV = s->last_pic.f->data[2];
+            srcY = s->last_pic.data[0];
+            srcU = s->last_pic.data[1];
+            srcV = s->last_pic.data[2];
             luty  = v->last_luty;
             lutuv = v->last_lutuv;
             use_ic = v->last_use_ic;
             interlace = !!(s->last_pic.f->flags & AV_FRAME_FLAG_INTERLACED);
         }
     } else {
-        srcY = s->next_pic.f->data[0];
-        srcU = s->next_pic.f->data[1];
-        srcV = s->next_pic.f->data[2];
+        srcY = s->next_pic.data[0];
+        srcU = s->next_pic.data[1];
+        srcV = s->next_pic.data[2];
         luty  = v->next_luty;
         lutuv = v->next_lutuv;
         use_ic = v->next_use_ic;
@@ -464,7 +464,7 @@ void ff_vc1_mc_4mv_luma(VC1Context *v, int n, int dir, int avg)
 
     if ((!v->field_mode ||
          (v->ref_field_type[dir] == 1 && v->cur_field_type == 1)) &&
-        !v->s.last_pic.f->data[0])
+        !v->s.last_pic.data[0])
         return;
 
     linesize = s->cur_pic_ptr->f->linesize[0];
@@ -474,18 +474,18 @@ void ff_vc1_mc_4mv_luma(VC1Context *v, int n, int dir, int avg)
 
     if (!dir) {
         if (v->field_mode && (v->cur_field_type != v->ref_field_type[dir]) && v->second_field) {
-            srcY = s->cur_pic.f->data[0];
+            srcY = s->cur_pic.data[0];
             luty = v->curr_luty;
             use_ic = *v->curr_use_ic;
             interlace = 1;
         } else {
-            srcY = s->last_pic.f->data[0];
+            srcY = s->last_pic.data[0];
             luty = v->last_luty;
             use_ic = v->last_use_ic;
             interlace = !!(s->last_pic.f->flags & AV_FRAME_FLAG_INTERLACED);
         }
     } else {
-        srcY = s->next_pic.f->data[0];
+        srcY = s->next_pic.data[0];
         luty = v->next_luty;
         use_ic = v->next_use_ic;
         interlace = !!(s->next_pic.f->flags & AV_FRAME_FLAG_INTERLACED);
@@ -645,7 +645,7 @@ void ff_vc1_mc_4mv_chroma(VC1Context *v, int dir)
     int interlace;
     int uvlinesize;
 
-    if (!v->field_mode && !v->s.last_pic.f->data[0])
+    if (!v->field_mode && !v->s.last_pic.data[0])
         return;
     if (CONFIG_GRAY && s->avctx->flags & AV_CODEC_FLAG_GRAY)
         return;
@@ -664,7 +664,7 @@ void ff_vc1_mc_4mv_chroma(VC1Context *v, int dir)
         int opp_count = get_luma_mv(v, dir, &tx, &ty);
         chroma_ref_type = v->cur_field_type ^ (opp_count > 2);
     }
-    if (v->field_mode && chroma_ref_type == 1 && v->cur_field_type == 1 && !v->s.last_pic.f->data[0])
+    if (v->field_mode && chroma_ref_type == 1 && v->cur_field_type == 1 && !v->s.last_pic.data[0])
         return;
     s->cur_pic.motion_val[1][s->block_index[0] + v->blocks_off][0] = tx;
     s->cur_pic.motion_val[1][s->block_index[0] + v->blocks_off][1] = ty;
@@ -698,21 +698,21 @@ void ff_vc1_mc_4mv_chroma(VC1Context *v, int dir)
 
     if (!dir) {
         if (v->field_mode && (v->cur_field_type != chroma_ref_type) && v->second_field) {
-            srcU = s->cur_pic.f->data[1];
-            srcV = s->cur_pic.f->data[2];
+            srcU = s->cur_pic.data[1];
+            srcV = s->cur_pic.data[2];
             lutuv = v->curr_lutuv;
             use_ic = *v->curr_use_ic;
             interlace = 1;
         } else {
-            srcU = s->last_pic.f->data[1];
-            srcV = s->last_pic.f->data[2];
+            srcU = s->last_pic.data[1];
+            srcV = s->last_pic.data[2];
             lutuv = v->last_lutuv;
             use_ic = v->last_use_ic;
             interlace = !!(s->last_pic.f->flags & AV_FRAME_FLAG_INTERLACED);
         }
     } else {
-        srcU = s->next_pic.f->data[1];
-        srcV = s->next_pic.f->data[2];
+        srcU = s->next_pic.data[1];
+        srcV = s->next_pic.data[2];
         lutuv = v->next_lutuv;
         use_ic = v->next_use_ic;
         interlace = !!(s->next_pic.f->flags & AV_FRAME_FLAG_INTERLACED);
@@ -880,14 +880,14 @@ void ff_vc1_mc_4mv_chroma4(VC1Context *v, int dir, int dir2, int avg)
         else
             uvsrc_y = av_clip(uvsrc_y, -8, s->avctx->coded_height >> 1);
         if (i < 2 ? dir : dir2) {
-            srcU = s->next_pic.f->data[1];
-            srcV = s->next_pic.f->data[2];
+            srcU = s->next_pic.data[1];
+            srcV = s->next_pic.data[2];
             lutuv  = v->next_lutuv;
             use_ic = v->next_use_ic;
             interlace = !!(s->next_pic.f->flags & AV_FRAME_FLAG_INTERLACED);
         } else {
-            srcU = s->last_pic.f->data[1];
-            srcV = s->last_pic.f->data[2];
+            srcU = s->last_pic.data[1];
+            srcV = s->last_pic.data[2];
             lutuv  = v->last_lutuv;
             use_ic = v->last_use_ic;
             interlace = !!(s->last_pic.f->flags & AV_FRAME_FLAG_INTERLACED);
@@ -1012,7 +1012,7 @@ void ff_vc1_interp_mc(VC1Context *v)
     int interlace;
     int linesize, uvlinesize;
 
-    if (!v->field_mode && !v->s.next_pic.f->data[0])
+    if (!v->field_mode && !v->s.next_pic.data[0])
         return;
 
     linesize   = s->cur_pic_ptr->f->linesize[0];
@@ -1030,9 +1030,9 @@ void ff_vc1_interp_mc(VC1Context *v)
         uvmx = uvmx + ((uvmx < 0) ? -(uvmx & 1) : (uvmx & 1));
         uvmy = uvmy + ((uvmy < 0) ? -(uvmy & 1) : (uvmy & 1));
     }
-    srcY = s->next_pic.f->data[0];
-    srcU = s->next_pic.f->data[1];
-    srcV = s->next_pic.f->data[2];
+    srcY = s->next_pic.data[0];
+    srcU = s->next_pic.data[1];
+    srcV = s->next_pic.data[2];
 
     interlace = !!(s->next_pic.f->flags & AV_FRAME_FLAG_INTERLACED);
 
diff --git a/libavcodec/vc1dec.c b/libavcodec/vc1dec.c
index 93398e3fb2..d8d58bb7eb 100644
--- a/libavcodec/vc1dec.c
+++ b/libavcodec/vc1dec.c
@@ -235,15 +235,15 @@ static void vc1_draw_sprites(VC1Context *v, SpriteData* sd)
                            v->sprite_output_frame->linesize[plane] * row;
 
             for (sprite = 0; sprite <= v->two_sprites; sprite++) {
-                uint8_t *iplane = s->cur_pic.f->data[plane];
-                int      iline  = s->cur_pic.f->linesize[plane];
+                uint8_t *iplane = s->cur_pic.data[plane];
+                int      iline  = s->cur_pic.linesize[plane];
                 int      ycoord = yoff[sprite] + yadv[sprite] * row;
                 int      yline  = ycoord >> 16;
                 int      next_line;
                 ysub[sprite] = ycoord & 0xFFFF;
                 if (sprite) {
-                    iplane = s->last_pic.f->data[plane];
-                    iline  = s->last_pic.f->linesize[plane];
+                    iplane = s->last_pic.data[plane];
+                    iline  = s->last_pic.linesize[plane];
                 }
                 next_line = FFMIN(yline + 1, (v->sprite_height >> !!plane) - 1) * iline;
                 if (!(xoff[sprite] & 0xFFFF) && xadv[sprite] == 1 << 16) {
@@ -317,12 +317,12 @@ static int vc1_decode_sprites(VC1Context *v, GetBitContext* gb)
     if (ret < 0)
         return ret;
 
-    if (!s->cur_pic.f || !s->cur_pic.f->data[0]) {
+    if (!s->cur_pic.data[0]) {
         av_log(avctx, AV_LOG_ERROR, "Got no sprites\n");
         return AVERROR_UNKNOWN;
     }
 
-    if (v->two_sprites && (!s->last_pic_ptr || !s->last_pic.f->data[0])) {
+    if (v->two_sprites && (!s->last_pic_ptr || !s->last_pic.data[0])) {
         av_log(avctx, AV_LOG_WARNING, "Need two sprites, only got one\n");
         v->two_sprites = 0;
     }
@@ -340,14 +340,14 @@ static void vc1_sprite_flush(AVCodecContext *avctx)
 {
     VC1Context *v     = avctx->priv_data;
     MpegEncContext *s = &v->s;
-    AVFrame *f = s->cur_pic.f;
+    Picture *f = &s->cur_pic;
     int plane, i;
 
     /* Windows Media Image codecs have a convergence interval of two keyframes.
        Since we can't enforce it, clear to black the missing sprite. This is
        wrong but it looks better than doing nothing. */
 
-    if (f && f->data[0])
+    if (f->data[0])
         for (plane = 0; plane < (CONFIG_GRAY && s->avctx->flags & AV_CODEC_FLAG_GRAY ? 1 : 3); plane++)
             for (i = 0; i < v->sprite_height>>!!plane; i++)
                 memset(f->data[plane] + i * f->linesize[plane],
@@ -1230,9 +1230,9 @@ static int vc1_decode_frame(AVCodecContext *avctx, AVFrame *pict,
 
         v->end_mb_x = s->mb_width;
         if (v->field_mode) {
-            s->cur_pic.f->linesize[0] <<= 1;
-            s->cur_pic.f->linesize[1] <<= 1;
-            s->cur_pic.f->linesize[2] <<= 1;
+            s->cur_pic.linesize[0] <<= 1;
+            s->cur_pic.linesize[1] <<= 1;
+            s->cur_pic.linesize[2] <<= 1;
             s->linesize                      <<= 1;
             s->uvlinesize                    <<= 1;
         }
@@ -1307,9 +1307,9 @@ static int vc1_decode_frame(AVCodecContext *avctx, AVFrame *pict,
         }
         if (v->field_mode) {
             v->second_field = 0;
-            s->cur_pic.f->linesize[0] >>= 1;
-            s->cur_pic.f->linesize[1] >>= 1;
-            s->cur_pic.f->linesize[2] >>= 1;
+            s->cur_pic.linesize[0] >>= 1;
+            s->cur_pic.linesize[1] >>= 1;
+            s->cur_pic.linesize[2] >>= 1;
             s->linesize                      >>= 1;
             s->uvlinesize                    >>= 1;
             if (v->s.pict_type != AV_PICTURE_TYPE_BI && v->s.pict_type != AV_PICTURE_TYPE_B) {
-- 
2.40.1



More information about the ffmpeg-devel mailing list