Go to the documentation of this file.
43 uint8_t *
d =
frame->data[0] + *y *
frame->linesize[0];
44 if (*x +
run >=
s->width) {
45 int n =
s->width - *x;
61 int *x,
int *y,
int *plane,
int bits_per_plane)
64 int shift = *plane * bits_per_plane;
65 unsigned mask = ((1
U << bits_per_plane) - 1) <<
shift;
69 int pixels_per_value = 8/bits_per_plane;
75 for (j = 8-bits_per_plane; j >= 0; j -= bits_per_plane) {
78 while (xl ==
s->width) {
84 if (planel >=
s->nb_planes)
86 value <<= bits_per_plane;
87 mask <<= bits_per_plane;
90 if (
s->nb_planes == 1 &&
91 run*pixels_per_value >=
s->width &&
92 pixels_per_value < (
s->width / pixels_per_value * pixels_per_value)
94 for (; xl < pixels_per_value; xl ++) {
95 j = (j < bits_per_plane ? 8 : j) - bits_per_plane;
99 run -=
s->width / pixels_per_value;
100 xl =
s->width / pixels_per_value * pixels_per_value;
113 [0] = { 0, 3, 5, 7 },
114 [1] = { 0, 2, 4, 6 },
115 [2] = { 0, 3, 4, 7 },
116 [3] = { 0, 11, 13, 15 },
117 [4] = { 0, 10, 12, 14 },
118 [5] = { 0, 11, 12, 15 },
122 void *
data,
int *got_frame,
128 int bits_per_plane, bpp, etype, esize, npal, pos_after_pal;
136 if (bytestream2_get_le16u(&
s->g) != 0x1234)
139 s->width = bytestream2_get_le16u(&
s->g);
140 s->height = bytestream2_get_le16u(&
s->g);
142 tmp = bytestream2_get_byteu(&
s->g);
143 bits_per_plane =
tmp & 0xF;
144 s->nb_planes = (
tmp >> 4) + 1;
145 bpp = bits_per_plane *
s->nb_planes;
146 if (bits_per_plane > 8 || bpp < 1 || bpp > 32) {
151 if (bytestream2_peek_byte(&
s->g) == 0xFF || bpp == 1 || bpp == 4 || bpp == 8) {
153 etype = bytestream2_get_le16(&
s->g);
154 esize = bytestream2_get_le16(&
s->g);
166 if (
s->width != avctx->
width ||
s->height != avctx->
height) {
174 memset(
frame->data[0], 0,
s->height *
frame->linesize[0]);
176 frame->palette_has_changed = 1;
179 palette = (uint32_t*)
frame->data[1];
180 if (etype == 1 && esize > 1 && bytestream2_peek_byte(&
s->g) < 6) {
181 int idx = bytestream2_get_byte(&
s->g);
183 for (
i = 0;
i < npal;
i++)
185 }
else if (etype == 2) {
186 npal =
FFMIN(esize, 16);
187 for (
i = 0;
i < npal;
i++) {
188 int pal_idx = bytestream2_get_byte(&
s->g);
191 }
else if (etype == 3) {
192 npal =
FFMIN(esize, 16);
193 for (
i = 0;
i < npal;
i++) {
194 int pal_idx = bytestream2_get_byte(&
s->g);
197 }
else if (etype == 4 || etype == 5) {
198 npal =
FFMIN(esize / 3, 256);
199 for (
i = 0;
i < npal;
i++) {
200 palette[
i] = bytestream2_get_be24(&
s->g) << 2;
201 palette[
i] |= 0xFFU << 24 | palette[i] >> 6 & 0x30303;
206 palette[0] = 0xFF000000;
207 palette[1] = 0xFFFFFFFF;
208 }
else if (bpp == 2) {
210 for (
i = 0;
i < npal;
i++)
224 if (bytestream2_get_le16(&
s->g)) {
228 int stop_size, marker,
t1,
t2;
231 t2 = bytestream2_get_le16(&
s->g);
235 marker = bytestream2_get_byte(&
s->g);
237 while (plane < s->nb_planes &&
240 val = bytestream2_get_byte(&
s->g);
242 run = bytestream2_get_byte(&
s->g);
244 run = bytestream2_get_le16(&
s->g);
245 val = bytestream2_get_byte(&
s->g);
250 if (bits_per_plane == 8) {
260 if (
s->nb_planes - plane > 1)
263 if (plane < s->nb_planes && x < avctx->
width) {
264 int run = (y + 1) * avctx->
width - x;
265 if (bits_per_plane == 8)
static void picmemset(PicContext *s, AVFrame *frame, unsigned value, int run, int *x, int *y, int *plane, int bits_per_plane)
static const uint8_t cga_mode45_index[6][4]
const uint32_t ff_cga_palette[16]
static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
static av_always_inline int bytestream2_seek(GetByteContext *g, int offset, int whence)
This structure describes decoded (raw) audio or video data.
static av_always_inline void bytestream2_skip(GetByteContext *g, unsigned int size)
static double val(void *priv, double ch)
static const uint16_t mask[17]
void av_memcpy_backptr(uint8_t *dst, int back, int cnt)
Overlapping memcpy() implementation.
static void decode(AVCodecContext *dec_ctx, AVPacket *pkt, AVFrame *frame, FILE *outfile)
#define AVERROR_PATCHWELCOME
Not yet implemented in FFmpeg, patches welcome.
const uint32_t ff_ega_palette[64]
@ AV_PICTURE_TYPE_I
Intra.
static void picmemset_8bpp(PicContext *s, AVFrame *frame, int value, int run, int *x, int *y)
static av_always_inline int bytestream2_get_bytes_left(GetByteContext *g)
static av_always_inline int bytestream2_tell(GetByteContext *g)
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() or get_encode_buffer() for allocating buffers and supports custom allocators.
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
const AVCodec ff_pictor_decoder
#define i(width, name, range_min, range_max)
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default value
const char * name
Name of the codec implementation.
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
@ AV_PIX_FMT_PAL8
8 bits with AV_PIX_FMT_RGB32 palette
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
main external API structure.
static int shift(int a, int b)
int ff_set_dimensions(AVCodecContext *s, int width, int height)
Check that the provided frame dimensions are valid and set them on the codec context.
#define avpriv_request_sample(...)
This structure stores compressed data.
int width
picture width / height.
static av_always_inline void bytestream2_init(GetByteContext *g, const uint8_t *buf, int buf_size)
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
int av_image_check_size(unsigned int w, unsigned int h, int log_offset, void *log_ctx)
Check if the given dimension of an image is valid, meaning that all bytes of the image can be address...