[FFmpeg-cvslog] HAM6/HAM8 support for IFF demuxer/decoder
Sebastian Vater
git at videolan.org
Sun Apr 24 13:13:54 CEST 2011
ffmpeg | branch: master | Sebastian Vater <cdgs.basty at googlemail.com> | Wed Apr 20 13:56:56 2011 +0200| [183132872a1d8bc8a32e7fd8f994fa2f1b2d6bfc] | committer: Michael Niedermayer
HAM6/HAM8 support for IFF demuxer/decoder
Signed-off-by: Michael Niedermayer <michaelni at gmx.at>
> http://git.videolan.org/gitweb.cgi/ffmpeg.git/?a=commit;h=183132872a1d8bc8a32e7fd8f994fa2f1b2d6bfc
---
libavcodec/iff.c | 276 ++++++++++++++++++++++++++++++++++++++++++++++++++---
libavformat/iff.c | 78 ++++++++++++++--
2 files changed, 333 insertions(+), 21 deletions(-)
diff --git a/libavcodec/iff.c b/libavcodec/iff.c
index e64ce1e..2467781 100644
--- a/libavcodec/iff.c
+++ b/libavcodec/iff.c
@@ -30,10 +30,72 @@
#include "avcodec.h"
#include "get_bits.h"
+// TODO: masking bits
+typedef enum {
+ MASK_NONE,
+ MASK_HAS_MASK,
+ MASK_HAS_TRANSPARENT_COLOR,
+ MASK_LASSO
+} mask_type;
+
+/**
+ * Gets the actual extra data after video preperties which contains
+ * the raw CMAP palette data beyond the IFF extra context.
+ *
+ * @param avctx the AVCodecContext where to extract raw palette data from
+ * @return pointer to raw CMAP palette data
+ */
+static av_always_inline uint8_t *get_palette_data(const AVCodecContext *const avctx) {
+ return avctx->extradata + AV_RB16(avctx->extradata);
+}
+
+/**
+ * Gets the size of CMAP palette data beyond the IFF extra context.
+ * Please note that any value < 2 of IFF extra context or
+ * raw extradata < 0 is considered as illegal extradata.
+ *
+ * @param avctx the AVCodecContext where to extract palette data size from
+ * @return size of raw palette data in bytes
+ */
+static av_always_inline int get_palette_size(const AVCodecContext *const avctx) {
+ return avctx->extradata_size - AV_RB16(avctx->extradata);
+}
+
+/**
+ * Gets the actual raw image data after video properties which
+ * contains the raw image data beyond the IFF extra context.
+ *
+ * @param avpkt the AVPacket where to extract raw image data from
+ * @return pointer to raw image data
+ */
+static av_always_inline uint8_t *get_image_data(const AVPacket *const avpkt) {
+ return avpkt->data + AV_RB16(avpkt->data);
+}
+
+/**
+ * Gets the size of raw image data beyond the IFF extra context.
+ * Please note that any value < 2 of either IFF extra context
+ * or raw image data is considered as an illegal packet.
+ *
+ * @param avpkt the AVPacket where to extract image data size from
+ * @return size of raw image data in bytes
+ */
+static av_always_inline int get_image_size(const AVPacket *const avpkt) {
+ return avpkt->size - AV_RB16(avpkt->data);
+}
+
typedef struct {
AVFrame frame;
int planesize;
uint8_t * planebuf;
+ uint8_t * ham_buf; ///< temporary buffer for planar to chunky conversation
+ uint32_t *ham_palbuf; ///< HAM decode table
+ unsigned compression; ///< delta compression method used
+ unsigned bpp; ///< bits per plane to decode (differs from bits_per_coded_sample if HAM)
+ unsigned ham; ///< 0 if non-HAM or number of hold bits (6 for bpp > 6, 4 otherwise)
+ unsigned flags; ///< 1 for EHB, 0 is no extra half darkening
+ unsigned transparency; ///< TODO: transparency color index in palette
+ unsigned masking; ///< TODO: masking method used
int init; // 1 if buffer and palette data already initialized, 0 otherwise
} IffContext;
@@ -122,6 +184,7 @@ static av_always_inline uint32_t gray2rgb(const uint32_t x) {
static int ff_cmap_read_palette(AVCodecContext *avctx, uint32_t *pal)
{
int count, i;
+ const uint8_t *const extradata = get_palette_data(avctx);
if (avctx->bits_per_coded_sample > 8) {
av_log(avctx, AV_LOG_ERROR, "bit_per_coded_sample > 8 not supported\n");
@@ -130,10 +193,10 @@ static int ff_cmap_read_palette(AVCodecContext *avctx, uint32_t *pal)
count = 1 << avctx->bits_per_coded_sample;
// If extradata is smaller than actually needed, fill the remaining with black.
- count = FFMIN(avctx->extradata_size / 3, count);
+ count = FFMIN(get_palette_size(avctx) / 3, count);
if (count) {
for (i=0; i < count; i++) {
- pal[i] = 0xFF000000 | AV_RB24( avctx->extradata + i*3 );
+ pal[i] = 0xFF000000 | AV_RB24(extradata + i*3);
}
} else { // Create gray-scale color palette for bps < 8
count = 1 << avctx->bits_per_coded_sample;
@@ -145,15 +208,123 @@ static int ff_cmap_read_palette(AVCodecContext *avctx, uint32_t *pal)
return 0;
}
+/**
+ * Extracts the IFF extra context and updates internal
+ * decoder structures.
+ *
+ * @param avctx the AVCodecContext where to extract extra context to
+ * @param avpkt the AVPacket to extract extra context from or NULL to use avctx
+ * @return 0 in case of success, a negative error code otherwise
+ */
+static int extract_header(AVCodecContext *const avctx,
+ const AVPacket *const avpkt) {
+ const uint8_t *buf;
+ unsigned buf_size;
+ IffContext *s = avctx->priv_data;
+ if (avpkt) {
+ if (avpkt->size < 2)
+ return AVERROR_INVALIDDATA;
+ buf = avpkt->data;
+ buf_size = bytestream_get_be16(&buf);
+ if (buf_size <= 1 || get_image_size(avpkt) <= 1) {
+ av_log(avctx, AV_LOG_ERROR,
+ "Invalid image size received: %u -> image data offset: %d\n",
+ buf_size, get_image_size(avpkt));
+ return AVERROR_INVALIDDATA;
+ }
+ } else {
+ if (avctx->extradata_size < 2)
+ return AVERROR_INVALIDDATA;
+ buf = avctx->extradata;
+ buf_size = bytestream_get_be16(&buf);
+ if (buf_size <= 1 || get_palette_size(avctx) < 0) {
+ av_log(avctx, AV_LOG_ERROR,
+ "Invalid palette size received: %u -> palette data offset: %d\n",
+ buf_size, get_palette_size(avctx));
+ return AVERROR_INVALIDDATA;
+ }
+ }
+
+ if (buf_size > 8) {
+ s->compression = bytestream_get_byte(&buf);
+ s->bpp = bytestream_get_byte(&buf);
+ s->ham = bytestream_get_byte(&buf);
+ s->flags = bytestream_get_byte(&buf);
+ s->transparency = bytestream_get_be16(&buf);
+ s->masking = bytestream_get_byte(&buf);
+ if (s->masking == MASK_HAS_TRANSPARENT_COLOR) {
+ av_log(avctx, AV_LOG_ERROR, "Transparency not supported\n");
+ return AVERROR_PATCHWELCOME;
+ } else if (s->masking != MASK_NONE) {
+ av_log(avctx, AV_LOG_ERROR, "Masking not supported\n");
+ return AVERROR_PATCHWELCOME;
+ }
+ if (!s->bpp || s->bpp > 32) {
+ av_log(avctx, AV_LOG_ERROR, "Invalid number of bitplanes: %u\n", s->bpp);
+ return AVERROR_INVALIDDATA;
+ } else if (s->ham >= 8) {
+ av_log(avctx, AV_LOG_ERROR, "Invalid number of hold bits for HAM: %u\n", s->ham);
+ return AVERROR_INVALIDDATA;
+ }
+
+ av_freep(&s->ham_buf);
+ av_freep(&s->ham_palbuf);
+
+ if (s->ham) {
+ int i, count = FFMIN(get_palette_size(avctx) / 3, 1 << s->ham);
+ const uint8_t *const extradata = get_palette_data(avctx);
+ s->ham_buf = av_malloc((s->planesize * 8) + FF_INPUT_BUFFER_PADDING_SIZE);
+ if (!s->ham_buf)
+ return AVERROR(ENOMEM);
+
+ s->ham_palbuf = av_malloc((8 * (1 << s->ham) * sizeof (uint32_t)) + FF_INPUT_BUFFER_PADDING_SIZE);
+ if (!s->ham_palbuf) {
+ av_freep(&s->ham_buf);
+ return AVERROR(ENOMEM);
+ }
+
+ if (count) { // HAM with color palette attached
+ // prefill with black and palette and set HAM take direct value mask to zero
+ memset(s->ham_palbuf, 0, (1 << s->ham) * 2 * sizeof (uint32_t));
+ for (i=0; i < count; i++) {
+ s->ham_palbuf[i*2+1] = AV_RL24(extradata + i*3);
+ }
+ count = 1 << s->ham;
+ } else { // HAM with grayscale color palette
+ count = 1 << s->ham;
+ for (i=0; i < count; i++) {
+ s->ham_palbuf[i*2] = 0; // take direct color value from palette
+ s->ham_palbuf[i*2+1] = av_le2ne32(gray2rgb((i * 255) >> s->ham));
+ }
+ }
+ for (i=0; i < count; i++) {
+ uint32_t tmp = i << (8 - s->ham);
+ tmp |= tmp >> s->ham;
+ s->ham_palbuf[(i+count)*2] = 0x00FFFF; // just modify blue color component
+ s->ham_palbuf[(i+count*2)*2] = 0xFFFF00; // just modify red color component
+ s->ham_palbuf[(i+count*3)*2] = 0xFF00FF; // just modify green color component
+ s->ham_palbuf[(i+count)*2+1] = tmp << 16;
+ s->ham_palbuf[(i+count*2)*2+1] = tmp;
+ s->ham_palbuf[(i+count*3)*2+1] = tmp << 8;
+ }
+ } else if (s->flags & 1) { // EHB (ExtraHalfBrite) color palette
+ av_log(avctx, AV_LOG_ERROR, "ExtraHalfBrite (EHB) mode not supported\n");
+ return AVERROR_PATCHWELCOME;
+ }
+ }
+
+ return 0;
+}
+
static av_cold int decode_init(AVCodecContext *avctx)
{
IffContext *s = avctx->priv_data;
int err;
if (avctx->bits_per_coded_sample <= 8) {
- avctx->pix_fmt = (avctx->bits_per_coded_sample < 8 ||
- avctx->extradata_size) ? PIX_FMT_PAL8
- : PIX_FMT_GRAY8;
+ avctx->pix_fmt = (avctx->bits_per_coded_sample < 8) ||
+ (avctx->extradata_size >= 2 && get_palette_size(avctx)) ? PIX_FMT_PAL8
+ : PIX_FMT_GRAY8;
} else if (avctx->bits_per_coded_sample <= 32) {
avctx->pix_fmt = PIX_FMT_BGR32;
} else {
@@ -167,6 +338,10 @@ static av_cold int decode_init(AVCodecContext *avctx)
if (!s->planebuf)
return AVERROR(ENOMEM);
+ s->bpp = avctx->bits_per_coded_sample;
+
+ if ((err = extract_header(avctx, NULL)) < 0)
+ return err;
s->frame.reference = 1;
return 0;
@@ -214,6 +389,39 @@ static void decodeplane32(uint32_t *dst, const uint8_t *buf, int buf_size, int p
} while (--buf_size);
}
+#define DECODE_HAM_PLANE32(x) \
+ first = buf[x] << 1; \
+ second = buf[(x)+1] << 1; \
+ delta &= pal[first++]; \
+ delta |= pal[first]; \
+ dst[x] = delta; \
+ delta &= pal[second++]; \
+ delta |= pal[second]; \
+ dst[(x)+1] = delta
+
+/**
+ * Converts one line of HAM6/8-encoded chunky buffer to 24bpp.
+ *
+ * @param dst the destination 24bpp buffer
+ * @param buf the source 8bpp chunky buffer
+ * @param pal the HAM decode table
+ * @param buf_size the plane size in bytes
+ */
+static void decode_ham_plane32(uint32_t *dst, const uint8_t *buf,
+ const uint32_t *const pal, unsigned buf_size)
+{
+ uint32_t delta = 0;
+ do {
+ uint32_t first, second;
+ DECODE_HAM_PLANE32(0);
+ DECODE_HAM_PLANE32(2);
+ DECODE_HAM_PLANE32(4);
+ DECODE_HAM_PLANE32(6);
+ buf += 8;
+ dst += 8;
+ } while (--buf_size);
+}
+
/**
* Decode one complete byterun1 encoded line.
*
@@ -250,11 +458,14 @@ static int decode_frame_ilbm(AVCodecContext *avctx,
AVPacket *avpkt)
{
IffContext *s = avctx->priv_data;
- const uint8_t *buf = avpkt->data;
- int buf_size = avpkt->size;
+ const uint8_t *buf = avpkt->size >= 2 ? get_image_data(avpkt) : NULL;
+ const int buf_size = avpkt->size >= 2 ? get_image_size(avpkt) : 0;
const uint8_t *buf_end = buf+buf_size;
int y, plane, res;
+ if ((res = extract_header(avctx, avpkt)) < 0)
+ return res;
+
if (s->init) {
if ((res = avctx->reget_buffer(avctx, &s->frame)) < 0) {
av_log(avctx, AV_LOG_ERROR, "reget_buffer() failed\n");
@@ -274,16 +485,26 @@ static int decode_frame_ilbm(AVCodecContext *avctx,
for(y = 0; y < avctx->height; y++ ) {
uint8_t *row = &s->frame.data[0][ y*s->frame.linesize[0] ];
memset(row, 0, avctx->width);
- for (plane = 0; plane < avctx->bits_per_coded_sample && buf < buf_end; plane++) {
+ for (plane = 0; plane < s->bpp && buf < buf_end; plane++) {
decodeplane8(row, buf, FFMIN(s->planesize, buf_end - buf), plane);
buf += s->planesize;
}
}
+ } else if (s->ham) { // HAM to PIX_FMT_BGR32
+ for (y = 0; y < avctx->height; y++) {
+ uint8_t *row = &s->frame.data[0][ y*s->frame.linesize[0] ];
+ memset(s->ham_buf, 0, avctx->width);
+ for (plane = 0; plane < s->bpp && buf < buf_end; plane++) {
+ decodeplane8(s->ham_buf, buf, FFMIN(s->planesize, buf_end - buf), plane);
+ buf += s->planesize;
+ }
+ decode_ham_plane32((uint32_t *) row, s->ham_buf, s->ham_palbuf, s->planesize);
+ }
} else { // PIX_FMT_BGR32
for(y = 0; y < avctx->height; y++ ) {
uint8_t *row = &s->frame.data[0][y*s->frame.linesize[0]];
memset(row, 0, avctx->width << 2);
- for (plane = 0; plane < avctx->bits_per_coded_sample && buf < buf_end; plane++) {
+ for (plane = 0; plane < s->bpp && buf < buf_end; plane++) {
decodeplane32((uint32_t *) row, buf, FFMIN(s->planesize, buf_end - buf), plane);
buf += s->planesize;
}
@@ -295,6 +516,13 @@ static int decode_frame_ilbm(AVCodecContext *avctx,
memcpy(row, buf, FFMIN(avctx->width, buf_end - buf));
buf += avctx->width + (avctx->width % 2); // padding if odd
}
+ } else { // IFF-PBM: HAM to PIX_FMT_BGR32
+ for (y = 0; y < avctx->height; y++) {
+ uint8_t *row = &s->frame.data[0][ y*s->frame.linesize[0] ];
+ memcpy(s->ham_buf, buf, FFMIN(avctx->width, buf_end - buf));
+ buf += avctx->width + (avctx->width & 1); // padding if odd
+ decode_ham_plane32((uint32_t *) row, s->ham_buf, s->ham_palbuf, avctx->width);
+ }
}
*data_size = sizeof(AVFrame);
@@ -307,11 +535,13 @@ static int decode_frame_byterun1(AVCodecContext *avctx,
AVPacket *avpkt)
{
IffContext *s = avctx->priv_data;
- const uint8_t *buf = avpkt->data;
- int buf_size = avpkt->size;
+ const uint8_t *buf = avpkt->size >= 2 ? get_image_data(avpkt) : NULL;
+ const int buf_size = avpkt->size >= 2 ? get_image_size(avpkt) : 0;
const uint8_t *buf_end = buf+buf_size;
int y, plane, res;
+ if ((res = extract_header(avctx, avpkt)) < 0)
+ return res;
if (s->init) {
if ((res = avctx->reget_buffer(avctx, &s->frame)) < 0) {
av_log(avctx, AV_LOG_ERROR, "reget_buffer() failed\n");
@@ -331,26 +561,42 @@ static int decode_frame_byterun1(AVCodecContext *avctx,
for(y = 0; y < avctx->height ; y++ ) {
uint8_t *row = &s->frame.data[0][ y*s->frame.linesize[0] ];
memset(row, 0, avctx->width);
- for (plane = 0; plane < avctx->bits_per_coded_sample; plane++) {
+ for (plane = 0; plane < s->bpp; plane++) {
buf += decode_byterun(s->planebuf, s->planesize, buf, buf_end);
decodeplane8(row, s->planebuf, s->planesize, plane);
}
}
+ } else if (s->ham) { // HAM to PIX_FMT_BGR32
+ for (y = 0; y < avctx->height ; y++) {
+ uint8_t *row = &s->frame.data[0][y*s->frame.linesize[0]];
+ memset(s->ham_buf, 0, avctx->width);
+ for (plane = 0; plane < s->bpp; plane++) {
+ buf += decode_byterun(s->planebuf, s->planesize, buf, buf_end);
+ decodeplane8(s->ham_buf, s->planebuf, s->planesize, plane);
+ }
+ decode_ham_plane32((uint32_t *) row, s->ham_buf, s->ham_palbuf, s->planesize);
+ }
} else { //PIX_FMT_BGR32
for(y = 0; y < avctx->height ; y++ ) {
uint8_t *row = &s->frame.data[0][y*s->frame.linesize[0]];
memset(row, 0, avctx->width << 2);
- for (plane = 0; plane < avctx->bits_per_coded_sample; plane++) {
+ for (plane = 0; plane < s->bpp; plane++) {
buf += decode_byterun(s->planebuf, s->planesize, buf, buf_end);
decodeplane32((uint32_t *) row, s->planebuf, s->planesize, plane);
}
}
}
- } else {
+ } else if (avctx->pix_fmt == PIX_FMT_PAL8 || avctx->pix_fmt == PIX_FMT_GRAY8) { // IFF-PBM
for(y = 0; y < avctx->height ; y++ ) {
uint8_t *row = &s->frame.data[0][y*s->frame.linesize[0]];
buf += decode_byterun(row, avctx->width, buf, buf_end);
}
+ } else { // IFF-PBM: HAM to PIX_FMT_BGR32
+ for (y = 0; y < avctx->height ; y++) {
+ uint8_t *row = &s->frame.data[0][y*s->frame.linesize[0]];
+ buf += decode_byterun(s->ham_buf, avctx->width, buf, buf_end);
+ decode_ham_plane32((uint32_t *) row, s->ham_buf, s->ham_palbuf, avctx->width);
+ }
}
*data_size = sizeof(AVFrame);
@@ -364,6 +610,8 @@ static av_cold int decode_end(AVCodecContext *avctx)
if (s->frame.data[0])
avctx->release_buffer(avctx, &s->frame);
av_freep(&s->planebuf);
+ av_freep(&s->ham_buf);
+ av_freep(&s->ham_palbuf);
return 0;
}
diff --git a/libavformat/iff.c b/libavformat/iff.c
index 2494212..da4e858 100644
--- a/libavformat/iff.c
+++ b/libavformat/iff.c
@@ -29,6 +29,7 @@
* http://wiki.multimedia.cx/index.php?title=IFF
*/
+#include "libavcodec/bytestream.h"
#include "libavutil/intreadwrite.h"
#include "avformat.h"
@@ -40,6 +41,7 @@
#define ID_PBM MKTAG('P','B','M',' ')
#define ID_ILBM MKTAG('I','L','B','M')
#define ID_BMHD MKTAG('B','M','H','D')
+#define ID_CAMG MKTAG('C','A','M','G')
#define ID_CMAP MKTAG('C','M','A','P')
#define ID_FORM MKTAG('F','O','R','M')
@@ -60,6 +62,16 @@
#define PACKET_SIZE 1024
+/**
+ * This number of bytes if added at the beginning of each AVPacket
+ * which contain additional information about video properties
+ * which has to be shared between demuxer and decoder.
+ * This number may change between frames, e.g. the demuxer might
+ * set it to smallest possible size of 2 to indicate that there's
+ * no extradata changing in this frame.
+ */
+#define IFF_EXTRA_VIDEO_SIZE 9
+
typedef enum {
COMP_NONE,
COMP_FIB,
@@ -76,6 +88,12 @@ typedef struct {
uint32_t body_size;
uint32_t sent_bytes;
uint32_t audio_frame_count;
+ unsigned compression; ///< delta compression method used
+ unsigned bpp; ///< bits per plane to decode (differs from bits_per_coded_sample if HAM)
+ unsigned ham; ///< 0 if non-HAM or number of hold bits (6 for bpp > 6, 4 otherwise)
+ unsigned flags; ///< 1 for EHB, 0 is no extra half darkening
+ unsigned transparency; ///< transparency color index in palette
+ unsigned masking; ///< masking method used
} IffDemuxContext;
@@ -126,8 +144,12 @@ static int iff_read_header(AVFormatContext *s,
IffDemuxContext *iff = s->priv_data;
AVIOContext *pb = s->pb;
AVStream *st;
+ uint8_t *buf;
uint32_t chunk_id, data_size;
int compression = -1;
+ uint32_t screenmode = 0;
+ unsigned transparency = 0;
+ unsigned masking = 0; // no mask
st = av_new_stream(s, 0);
if (!st)
@@ -171,12 +193,18 @@ static int iff_read_header(AVFormatContext *s,
st->codec->channels = (avio_rb32(pb) < 6) ? 1 : 2;
break;
+ case ID_CAMG:
+ if (data_size < 4)
+ return AVERROR_INVALIDDATA;
+ screenmode = avio_rb32(pb);
+ break;
+
case ID_CMAP:
- st->codec->extradata_size = data_size;
- st->codec->extradata = av_malloc(data_size);
+ st->codec->extradata_size = data_size + IFF_EXTRA_VIDEO_SIZE;
+ st->codec->extradata = av_malloc(data_size + IFF_EXTRA_VIDEO_SIZE + FF_INPUT_BUFFER_PADDING_SIZE);
if (!st->codec->extradata)
return AVERROR(ENOMEM);
- if (avio_read(pb, st->codec->extradata, data_size) < 0)
+ if (avio_read(pb, st->codec->extradata + IFF_EXTRA_VIDEO_SIZE, data_size) < 0)
return AVERROR(EIO);
break;
@@ -188,12 +216,15 @@ static int iff_read_header(AVFormatContext *s,
st->codec->height = avio_rb16(pb);
avio_skip(pb, 4); // x, y offset
st->codec->bits_per_coded_sample = avio_r8(pb);
- if (data_size >= 11) {
- avio_skip(pb, 1); // masking
+ if (data_size >= 10)
+ masking = avio_r8(pb);
+ if (data_size >= 11)
compression = avio_r8(pb);
+ if (data_size >= 14) {
+ avio_skip(pb, 1); // padding
+ transparency = avio_rb16(pb);
}
if (data_size >= 16) {
- avio_skip(pb, 3); // paddding, transparent
st->sample_aspect_ratio.num = avio_r8(pb);
st->sample_aspect_ratio.den = avio_r8(pb);
}
@@ -253,6 +284,31 @@ static int iff_read_header(AVFormatContext *s,
break;
case AVMEDIA_TYPE_VIDEO:
+ iff->compression = compression;
+ iff->bpp = st->codec->bits_per_coded_sample;
+ if ((screenmode & 0x800 /* Hold And Modify */) && iff->bpp <= 8) {
+ iff->ham = iff->bpp > 6 ? 6 : 4;
+ st->codec->bits_per_coded_sample = 24;
+ }
+ iff->flags = (screenmode & 0x80 /* Extra HalfBrite */) && iff->bpp <= 8;
+ iff->masking = masking;
+ iff->transparency = transparency;
+
+ if (!st->codec->extradata) {
+ st->codec->extradata_size = IFF_EXTRA_VIDEO_SIZE;
+ st->codec->extradata = av_malloc(IFF_EXTRA_VIDEO_SIZE + FF_INPUT_BUFFER_PADDING_SIZE);
+ if (!st->codec->extradata)
+ return AVERROR(ENOMEM);
+ }
+ buf = st->codec->extradata;
+ bytestream_put_be16(&buf, IFF_EXTRA_VIDEO_SIZE);
+ bytestream_put_byte(&buf, iff->compression);
+ bytestream_put_byte(&buf, iff->bpp);
+ bytestream_put_byte(&buf, iff->ham);
+ bytestream_put_byte(&buf, iff->flags);
+ bytestream_put_be16(&buf, iff->transparency);
+ bytestream_put_byte(&buf, iff->masking);
+
switch (compression) {
case BITMAP_RAW:
st->codec->codec_id = CODEC_ID_IFF_ILBM;
@@ -293,7 +349,15 @@ static int iff_read_packet(AVFormatContext *s,
}
interleave_stereo(sample_buffer, pkt->data, PACKET_SIZE);
} else if (st->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
- ret = av_get_packet(pb, pkt, iff->body_size);
+ uint8_t *buf;
+
+ if (av_new_packet(pkt, iff->body_size + 2) < 0) {
+ return AVERROR(ENOMEM);
+ }
+
+ buf = pkt->data;
+ bytestream_put_be16(&buf, 2);
+ ret = avio_read(pb, buf, iff->body_size);
} else {
ret = av_get_packet(pb, pkt, PACKET_SIZE);
}
More information about the ffmpeg-cvslog
mailing list