39 void *
data,
int *got_frame,
43 int buf_size = avpkt->
size;
47 unsigned int fsize, hsize;
52 int i, j, n, linesize;
64 if (bytestream_get_byte(&buf) !=
'B' ||
65 bytestream_get_byte(&buf) !=
'M') {
70 fsize = bytestream_get_le32(&buf);
71 if (buf_size < fsize) {
80 hsize = bytestream_get_le32(&buf);
81 ihsize = bytestream_get_le32(&buf);
82 if (ihsize + 14 > hsize) {
88 if (fsize == 14 || fsize == ihsize + 14)
102 width = bytestream_get_le32(&buf);
103 height = bytestream_get_le32(&buf);
106 width = bytestream_get_le16(&buf);
107 height = bytestream_get_le16(&buf);
115 if (bytestream_get_le16(&buf) != 1) {
120 depth = bytestream_get_le16(&buf);
123 comp = bytestream_get_le32(&buf);
135 rgb[0] = bytestream_get_le32(&buf);
136 rgb[1] = bytestream_get_le32(&buf);
137 rgb[2] = bytestream_get_le32(&buf);
148 rgb[0] = (rgb[0] >> 15) & 3;
149 rgb[1] = (rgb[1] >> 15) & 3;
150 rgb[2] = (rgb[2] >> 15) & 3;
152 if (rgb[0] + rgb[1] + rgb[2] != 3 ||
153 rgb[0] == rgb[1] || rgb[0] == rgb[2] || rgb[1] == rgb[2]) {
171 if (rgb[0] == 0xF800 && rgb[1] == 0x07E0 && rgb[2] == 0x001F)
173 else if (rgb[0] == 0x7C00 && rgb[1] == 0x03E0 && rgb[2] == 0x001F)
175 else if (rgb[0] == 0x0F00 && rgb[1] == 0x00F0 && rgb[2] == 0x000F)
178 av_log(avctx,
AV_LOG_ERROR,
"Unknown bitfields %0X %0X %0X\n", rgb[0], rgb[1], rgb[2]);
184 if (hsize - ihsize - 14 > 0)
191 if (hsize - ihsize - 14 > 0) {
220 dsize = buf_size - hsize;
223 n = ((avctx->
width * depth) / 8 + 3) & ~3;
227 dsize, n * avctx->
height);
244 int colors = 1 << depth;
246 memset(p->
data[1], 0, 1024);
251 t = bytestream_get_le32(&buf);
252 if (t < 0 || t > (1 << depth)) {
253 av_log(avctx,
AV_LOG_ERROR,
"Incorrect number of colors - %X for bitdepth %d\n", t, depth);
258 buf = buf0 + 14 + ihsize;
260 if ((hsize-ihsize-14) < (colors << 2)) {
261 for (i = 0; i < colors; i++)
262 ((uint32_t*)p->
data[1])[i] = bytestream_get_le24(&buf);
264 for (i = 0; i < colors; i++)
265 ((uint32_t*)p->
data[1])[i] = bytestream_get_le32(&buf);
283 for (i = 0; i < avctx->
height; i++) {
285 for (j = 0; j < n; j++) {
286 ptr[j*8+0] = buf[j] >> 7;
287 ptr[j*8+1] = (buf[j] >> 6) & 1;
288 ptr[j*8+2] = (buf[j] >> 5) & 1;
289 ptr[j*8+3] = (buf[j] >> 4) & 1;
290 ptr[j*8+4] = (buf[j] >> 3) & 1;
291 ptr[j*8+5] = (buf[j] >> 2) & 1;
292 ptr[j*8+6] = (buf[j] >> 1) & 1;
293 ptr[j*8+7] = buf[j] & 1;
301 for (i = 0; i < avctx->
height; i++) {
308 for (i = 0; i < avctx->
height; i++) {
310 for (j = 0; j < n; j++) {
311 ptr[j*2+0] = (buf[j] >> 4) & 0xF;
312 ptr[j*2+1] = buf[j] & 0xF;
319 for (i = 0; i < avctx->
height; i++) {
320 const uint16_t *src = (
const uint16_t *) buf;
321 uint16_t *dst = (uint16_t *) ptr;
323 for (j = 0; j < avctx->
width; j++)
331 for (i = 0; i < avctx->
height; i++) {
335 for (j = 0; j < avctx->
width; j++) {
336 dst[0] = src[rgb[2]];
337 dst[1] = src[rgb[1]];
338 dst[2] = src[rgb[0]];
This structure describes decoded (raw) audio or video data.
#define AV_PIX_FMT_RGB444
void(* release_buffer)(struct AVCodecContext *c, AVFrame *pic)
Called to release buffers which were allocated with get_buffer.
AVFrame * coded_frame
the picture in the bitstream
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
static av_always_inline void bytestream2_init(GetByteContext *g, const uint8_t *buf, int buf_size)
four components are given, that's all.
static int decode(MimicContext *ctx, int quality, int num_coeffs, int is_iframe)
8 bit with PIX_FMT_RGB32 palette
#define AV_PIX_FMT_RGB555
#define AV_PIX_FMT_RGB565
static int init(AVCodecParserContext *s)
struct BMPContext BMPContext
static int bmp_decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
int reference
is this picture used as reference The values for this are the same as the MpegEncContext.picture_structure variable, that is 1->top field, 2->bottom field, 3->frame/both fields.
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
void av_log(void *avcl, int level, const char *fmt,...)
const char * name
Name of the codec implementation.
enum AVPictureType pict_type
Picture type of the frame, see ?_TYPE below.
int width
picture width / height.
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame)
Get a buffer for a frame.
packed RGB 8:8:8, 24bpp, BGRBGR...
static av_cold int bmp_decode_end(AVCodecContext *avctx)
int linesize[AV_NUM_DATA_POINTERS]
Size, in bytes, of the data for each picture/channel plane.
main external API structure.
static void close(AVCodecParserContext *s)
static av_cold int bmp_decode_init(AVCodecContext *avctx)
void avcodec_get_frame_defaults(AVFrame *frame)
Set the fields of the given AVFrame to default values.
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
common internal api header.
int ff_msrle_decode(AVCodecContext *avctx, AVPicture *pic, int depth, GetByteContext *gb)
Decode stream in MS RLE format into frame.
int key_frame
1 -> keyframe, 0-> not
static void comp(unsigned char *dst, int dst_stride, unsigned char *src, int src_stride, int add)
This structure stores compressed data.