38 #define BITSTREAM_READER_LE 44 #define RUNTIME_GAMMA 0 46 #define VGA__TAG MKTAG('V', 'G', 'A', ' ') 47 #define PALT_TAG MKTAG('P', 'A', 'L', 'T') 48 #define SHOT_TAG MKTAG('S', 'H', 'O', 'T') 49 #define PALETTE_COUNT 256 50 #define PALETTE_SIZE (PALETTE_COUNT * 3) 51 #define PALETTES_MAX 256 122 const uint8_t * ptr = src + byte*2;
123 int ptr_len = src_len - 1 - byte*2;
125 uint8_t *dest_end = dest + dest_len;
133 while (val != 0x16) {
137 idx = val - 0x17 +
get_bits1(&gb) * byte;
143 if (dest >= dest_end)
150 return dest - dest_start;
164 uint8_t *dest_end = dest + dest_len;
169 opcode = bytestream2_get_byte(&ctx);
173 if ((opcode & 0x80) == 0) {
176 back = ((opcode & 0x60) << 3) + bytestream2_get_byte(&ctx) + 1;
177 size2 = ((opcode & 0x1c) >> 2) + 3;
178 }
else if ((opcode & 0x40) == 0) {
179 size = bytestream2_peek_byte(&ctx) >> 6;
181 back = (bytestream2_get_be16(&ctx) & 0x3fff) + 1;
182 size2 = (opcode & 0x3f) + 4;
186 back = ((opcode & 0x10) << 12) + bytestream2_get_be16(&ctx) + 1;
187 size2 = ((opcode & 0x0c) << 6) + bytestream2_get_byte(&ctx) + 5;
190 if (dest_end - dest < size + size2 ||
191 dest + size - dest_org < back ||
199 int finish = opcode >= 0xfc;
200 size = finish ? opcode & 3 : ((opcode & 0x1f) << 2) + 4;
213 const uint8_t *pixel_buffer,
int x,
int y,
int pixel_count)
222 palette_plane = frame->
data[0];
224 line_inc = stride -
width;
225 index = y * stride + x;
227 while (pixel_count && index < s->
frame_size) {
228 int count =
FFMIN(pixel_count, width - current_x);
229 memcpy(palette_plane + index, pixel_buffer, count);
230 pixel_count -=
count;
232 pixel_buffer +=
count;
235 if (current_x >= width) {
244 int pixel_count,
int motion_x,
249 int curframe_index, prevframe_index;
250 int curframe_x, prevframe_x;
252 uint8_t *palette_plane, *prev_palette_plane;
254 if (y + motion_y < 0 || y + motion_y >= s->
avctx->
height ||
255 x + motion_x < 0 || x + motion_x >= s->
avctx->
width)
258 palette_plane = frame->
data[0];
260 if (!prev_palette_plane)
261 prev_palette_plane = palette_plane;
263 line_inc = stride -
width;
264 curframe_index = y * stride + x;
266 prevframe_index = (y + motion_y) * stride + x + motion_x;
267 prevframe_x = x + motion_x;
269 if (prev_palette_plane == palette_plane &&
FFABS(motion_x + width*motion_y) < pixel_count) {
274 while (pixel_count &&
277 int count =
FFMIN3(pixel_count, width - curframe_x,
278 width - prevframe_x);
280 memcpy(palette_plane + curframe_index,
281 prev_palette_plane + prevframe_index, count);
282 pixel_count -=
count;
283 curframe_index +=
count;
284 prevframe_index +=
count;
286 prevframe_x +=
count;
288 if (curframe_x >= width) {
289 curframe_index += line_inc;
293 if (prevframe_x >= width) {
294 prevframe_index += line_inc;
305 int total_pixels = width *
height;
309 int motion_x, motion_y;
318 const uint8_t *huffman_segment;
321 const uint8_t *imagedata_segment;
322 int huffman_offset, size_offset, vector_offset, imagedata_offset,
333 if (huffman_offset >= s->
size ||
334 size_offset >= s->
size ||
335 vector_offset >= s->
size ||
336 imagedata_offset >= s->
size)
339 huffman_segment = s->
buf + huffman_offset;
342 imagedata_segment = s->
buf + imagedata_offset;
345 huffman_segment, s->
size - huffman_offset)) < 0)
347 opcode_buffer_end = opcode_buffer + ret;
349 if (imagedata_segment[0] == 2) {
351 &imagedata_segment[1], s->
size - imagedata_offset - 1);
354 imagedata_size = s->
size - imagedata_offset - 1;
355 imagedata_buffer = &imagedata_segment[1];
360 while (total_pixels && opcode_buffer < opcode_buffer_end) {
362 opcode = *opcode_buffer++;
389 size += (opcode - 10);
398 size = bytestream2_get_byte(&size_segment);
407 size = bytestream2_get_be16(&size_segment);
416 size = bytestream2_get_be24(&size_segment);
420 if (size > total_pixels)
430 if (imagedata_size < size)
433 imagedata_buffer +=
size;
434 imagedata_size -=
size;
443 vector = bytestream2_get_byte(&vector_segment);
454 total_pixels -=
size;
455 y += (x +
size) / width;
456 x = (x +
size) % width;
462 static inline unsigned mul(
unsigned a,
unsigned b)
464 return (
a * b) >> 16;
467 static inline unsigned pow4(
unsigned a)
469 unsigned square = mul(a, a);
470 return mul(square, square);
473 static inline unsigned pow5(
unsigned a)
475 return mul(pow4(a), a);
479 unsigned lo, hi = 0xff40, target;
481 in = (in << 2) | (in >> 6);
487 lo = target = in << 8;
489 unsigned mid = (lo + hi) >> 1;
490 unsigned pow = pow5(mid);
491 if (pow > target) hi = mid;
494 return (pow4((lo + hi) >> 1) + 0x80) >> 8;
509 0x00, 0x09, 0x10, 0x16, 0x1C, 0x21, 0x27, 0x2C,
510 0x31, 0x35, 0x3A, 0x3F, 0x43, 0x48, 0x4C, 0x50,
511 0x54, 0x59, 0x5D, 0x61, 0x65, 0x69, 0x6D, 0x71,
512 0x75, 0x79, 0x7D, 0x80, 0x84, 0x88, 0x8C, 0x8F,
513 0x93, 0x97, 0x9A, 0x9E, 0xA2, 0xA5, 0xA9, 0xAC,
514 0xB0, 0xB3, 0xB7, 0xBA, 0xBE, 0xC1, 0xC5, 0xC8,
515 0xCB, 0xCF, 0xD2, 0xD5, 0xD9, 0xDC, 0xDF, 0xE3,
516 0xE6, 0xE9, 0xED, 0xF0, 0xF3, 0xF6, 0xFA, 0xFD,
517 0x03, 0x0B, 0x12, 0x18, 0x1D, 0x23, 0x28, 0x2D,
518 0x32, 0x36, 0x3B, 0x40, 0x44, 0x49, 0x4D, 0x51,
519 0x56, 0x5A, 0x5E, 0x62, 0x66, 0x6A, 0x6E, 0x72,
520 0x76, 0x7A, 0x7D, 0x81, 0x85, 0x89, 0x8D, 0x90,
521 0x94, 0x98, 0x9B, 0x9F, 0xA2, 0xA6, 0xAA, 0xAD,
522 0xB1, 0xB4, 0xB8, 0xBB, 0xBF, 0xC2, 0xC5, 0xC9,
523 0xCC, 0xD0, 0xD3, 0xD6, 0xDA, 0xDD, 0xE0, 0xE4,
524 0xE7, 0xEA, 0xED, 0xF1, 0xF4, 0xF7, 0xFA, 0xFD,
525 0x05, 0x0D, 0x13, 0x19, 0x1F, 0x24, 0x29, 0x2E,
526 0x33, 0x38, 0x3C, 0x41, 0x45, 0x4A, 0x4E, 0x52,
527 0x57, 0x5B, 0x5F, 0x63, 0x67, 0x6B, 0x6F, 0x73,
528 0x77, 0x7B, 0x7E, 0x82, 0x86, 0x8A, 0x8D, 0x91,
529 0x95, 0x99, 0x9C, 0xA0, 0xA3, 0xA7, 0xAA, 0xAE,
530 0xB2, 0xB5, 0xB9, 0xBC, 0xBF, 0xC3, 0xC6, 0xCA,
531 0xCD, 0xD0, 0xD4, 0xD7, 0xDA, 0xDE, 0xE1, 0xE4,
532 0xE8, 0xEB, 0xEE, 0xF1, 0xF5, 0xF8, 0xFB, 0xFD,
533 0x07, 0x0E, 0x15, 0x1A, 0x20, 0x25, 0x2A, 0x2F,
534 0x34, 0x39, 0x3D, 0x42, 0x46, 0x4B, 0x4F, 0x53,
535 0x58, 0x5C, 0x60, 0x64, 0x68, 0x6C, 0x70, 0x74,
536 0x78, 0x7C, 0x7F, 0x83, 0x87, 0x8B, 0x8E, 0x92,
537 0x96, 0x99, 0x9D, 0xA1, 0xA4, 0xA8, 0xAB, 0xAF,
538 0xB2, 0xB6, 0xB9, 0xBD, 0xC0, 0xC4, 0xC7, 0xCB,
539 0xCE, 0xD1, 0xD5, 0xD8, 0xDB, 0xDF, 0xE2, 0xE5,
540 0xE9, 0xEC, 0xEF, 0xF2, 0xF6, 0xF9, 0xFC, 0xFD
545 void *
data,
int *got_frame,
550 int ret, buf_size = avpkt->
size;
561 tag = bytestream2_get_le32(&ctx);
562 size = bytestream2_get_be32(&ctx);
582 int r = gamma_corr(bytestream2_get_byteu(&ctx));
583 int g = gamma_corr(bytestream2_get_byteu(&ctx));
584 int b = gamma_corr(bytestream2_get_byteu(&ctx));
590 *tmpptr++ = (0xFF
U << 24) | (r << 16) | (g << 8) | b;
597 new_pal = bytestream2_get_le32(&ctx);
623 memcpy(frame->
data[1],
static int xan_wc3_decode_frame(XanContext *s, AVFrame *frame)
const char const char void * val
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
This structure describes decoded (raw) audio or video data.
Memory handling functions.
static av_cold int init(AVCodecContext *avctx)
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
static av_always_inline void bytestream2_init(GetByteContext *g, const uint8_t *buf, int buf_size)
static void xan_unpack(uint8_t *dest, int dest_len, const uint8_t *src, int src_len)
unpack simple compression
static const uint8_t gamma_lookup[256]
This is a gamma correction that xan3 applies to all palette entries.
static void decode(AVCodecContext *dec_ctx, AVPacket *pkt, AVFrame *frame, FILE *outfile)
void void avpriv_request_sample(void *avc, const char *msg,...) av_printf_format(2
Log a generic warning message about a missing feature.
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
8 bits with AV_PIX_FMT_RGB32 palette
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
AVCodec ff_xan_wc3_decoder
void av_memcpy_backptr(uint8_t *dst, int back, int cnt)
Overlapping memcpy() implementation.
bitstream reader API header.
void * av_realloc_array(void *ptr, size_t nmemb, size_t size)
Allocate, reallocate, or free an array.
static int get_bits_left(GetBitContext *gb)
static int xan_huffman_decode(uint8_t *dest, int dest_len, const uint8_t *src, int src_len)
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
static av_cold int xan_decode_init(AVCodecContext *avctx)
static av_always_inline void bytestream2_skip(GetByteContext *g, unsigned int size)
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
static av_always_inline unsigned int bytestream2_get_buffer(GetByteContext *g, uint8_t *dst, unsigned int size)
static av_always_inline unsigned int bytestream2_get_bytes_left(GetByteContext *g)
const char * name
Name of the codec implementation.
int width
picture width / height.
#define FFABS(a)
Absolute value, Note, INT_MIN / INT64_MIN result in undefined behavior as they are not representable ...
static void xan_wc3_output_pixel_run(XanContext *s, AVFrame *frame, const uint8_t *pixel_buffer, int x, int y, int pixel_count)
Libavcodec external API header.
static av_cold int xan_decode_end(AVCodecContext *avctx)
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
static int init_get_bits8(GetBitContext *s, const uint8_t *buffer, int byte_size)
Initialize GetBitContext.
main external API structure.
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
static unsigned int get_bits1(GetBitContext *s)
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(const int16_t *) pi >> 8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(const int32_t *) pi >> 24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(const float *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(const float *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(const float *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(const double *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(const double *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(const double *) pi *(1U<< 31)))) #define SET_CONV_FUNC_GROUP(ofmt, ifmt) static void set_generic_function(AudioConvert *ac) { } void ff_audio_convert_free(AudioConvert **ac) { if(! *ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);} AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enum AVSampleFormat out_fmt, enum AVSampleFormat in_fmt, int channels, int sample_rate, int apply_map) { AudioConvert *ac;int in_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) return NULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method !=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt) > 2) { ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc) { av_free(ac);return NULL;} return ac;} in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar) { ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar ? ac->channels :1;} else if(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;else ac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);return ac;} int ff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in) { int use_generic=1;int len=in->nb_samples;int p;if(ac->dc) { av_log(ac->avr, AV_LOG_TRACE, "%d samples - audio_convert: %s to %s (dithered)\", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));return ff_convert_dither(ac-> in
static av_const int sign_extend(int val, unsigned bits)
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
common internal api header.
static int xan_decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
static void xan_wc3_copy_pixel_run(XanContext *s, AVFrame *frame, int x, int y, int pixel_count, int motion_x, int motion_y)
This structure stores compressed data.
#define AV_GET_BUFFER_FLAG_REF
The decoder will keep a reference to the frame and may reuse it later.
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() for allocating buffers and supports custom allocators.