73 UINT32_MAX > INT_MAX ? UINT32_MAX : INT_MAX);
97 int crop_unit_x, crop_unit_y;
103 { 1, 1 }, { 12, 11 }, { 10, 11 }, { 16, 11 },
104 { 40, 33 }, { 24, 11 }, { 20, 11 }, { 32, 11 },
105 { 80, 33 }, { 18, 11 }, { 15, 11 }, { 64, 33 },
106 { 160, 99 }, { 4, 3 }, { 3, 2 }, { 2, 1 },
114 if (num == sar_idc[i].num &&
115 den == sar_idc[i].den)
129 #define SET_OR_INFER(field, value, present_flag, infer) do { \ 133 } else if (!present_flag) \ 185 UINT32_MAX > INT_MAX ? UINT32_MAX : INT_MAX);
209 #define CROP(border, unit) do { \ 210 if (ctx->crop_ ## border >= 0) { \ 211 if (ctx->crop_ ## border % unit != 0) { \ 212 av_log(bsf, AV_LOG_ERROR, "Invalid value for crop_%s: " \ 213 "must be a multiple of %d.\n", #border, unit); \ 214 return AVERROR(EINVAL); \ 216 sps->conf_win_ ## border ## _offset = \ 217 ctx->crop_ ## border / unit; \ 218 sps->conformance_window_flag = 1; \ 221 CROP(left, crop_unit_x);
222 CROP(right, crop_unit_x);
223 CROP(top, crop_unit_y);
224 CROP(bottom, crop_unit_y);
263 int pic_type = 0, temporal_id = 8, layer_id = 0;
265 for (i = 0; i < au->
nb_units; i++) {
286 .nuh_layer_id = layer_id,
287 .nuh_temporal_id_plus1 = temporal_id + 1,
300 for (i = 0; i < au->
nb_units; i++) {
351 for (i = 0; i < au->
nb_units; i++) {
383 #define OFFSET(x) offsetof(H265MetadataContext, x) 384 #define FLAGS (AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_BSF_PARAM) 386 {
"aud",
"Access Unit Delimiter NAL units",
390 { .i64 =
PASS }, .flags =
FLAGS, .unit =
"aud" },
392 { .i64 =
INSERT }, .flags =
FLAGS, .unit =
"aud" },
394 { .i64 =
REMOVE }, .flags =
FLAGS, .unit =
"aud" },
396 {
"sample_aspect_ratio",
"Set sample aspect ratio (table E-1)",
398 { .dbl = 0.0 }, 0, 65535,
FLAGS },
400 {
"video_format",
"Set video format (table E-2)",
402 { .i64 = -1 }, -1, 7,
FLAGS },
403 {
"video_full_range_flag",
"Set video full range flag",
405 { .i64 = -1 }, -1, 1,
FLAGS },
406 {
"colour_primaries",
"Set colour primaries (table E-3)",
408 { .i64 = -1 }, -1, 255,
FLAGS },
409 {
"transfer_characteristics",
"Set transfer characteristics (table E-4)",
411 { .i64 = -1 }, -1, 255,
FLAGS },
412 {
"matrix_coefficients",
"Set matrix coefficients (table E-5)",
414 { .i64 = -1 }, -1, 255,
FLAGS },
416 {
"chroma_sample_loc_type",
"Set chroma sample location type (figure E-1)",
418 { .i64 = -1 }, -1, 6,
FLAGS },
421 "Set VPS and VUI tick rate (num_units_in_tick / time_scale)",
423 { .dbl = 0.0 }, 0, UINT_MAX,
FLAGS },
424 {
"num_ticks_poc_diff_one",
425 "Set VPS and VUI number of ticks per POC increment",
427 { .i64 = -1 }, -1, INT_MAX, FLAGS },
429 {
"crop_left",
"Set left border crop offset",
432 {
"crop_right",
"Set right border crop offset",
435 {
"crop_top",
"Set top border crop offset",
438 {
"crop_bottom",
"Set bottom border crop offset",
457 .
name =
"hevc_metadata",
459 .priv_class = &h265_metadata_class,
int nb_units
Number of units in this fragment.
int chroma_sample_loc_type
uint8_t transfer_characteristics
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
AVCodecParameters * par_out
Parameters of the output stream.
uint8_t vps_poc_proportional_to_timing_flag
int transfer_characteristics
#define LIBAVUTIL_VERSION_INT
int ff_cbs_write_packet(CodedBitstreamContext *ctx, AVPacket *pkt, CodedBitstreamFragment *frag)
Write the bitstream of a fragment to a packet.
static av_cold int init(AVCodecContext *avctx)
uint8_t vui_timing_info_present_flag
int ff_cbs_init(CodedBitstreamContext **ctx_ptr, enum AVCodecID codec_id, void *log_ctx)
Create and initialise a new context for the given codec.
CodedBitstreamUnitType type
Codec-specific type of this unit.
The bitstream filter state.
const char * av_default_item_name(void *ptr)
Return the context name.
int ff_cbs_insert_unit_content(CodedBitstreamContext *ctx, CodedBitstreamFragment *frag, int position, CodedBitstreamUnitType type, void *content, AVBufferRef *content_buf)
Insert a new unit into a fragment with the given content.
uint8_t vui_parameters_present_flag
void * priv_data
Opaque filter-specific private data.
uint32_t vps_num_units_in_tick
const char * class_name
The name of the class; usually it is the same name as the context structure type to which the AVClass...
static void filter(int16_t *output, ptrdiff_t out_stride, int16_t *low, ptrdiff_t low_stride, int16_t *high, ptrdiff_t high_stride, int len, int clip)
void av_packet_free(AVPacket **pkt)
Free the packet, if the packet is reference counted, it will be unreferenced first.
uint8_t vps_timing_info_present_flag
int ff_cbs_read_packet(CodedBitstreamContext *ctx, CodedBitstreamFragment *frag, const AVPacket *pkt)
Read the data bitstream from a packet into a fragment, then split into units and decompose.
uint8_t matrix_coefficients
uint8_t aspect_ratio_info_present_flag
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
void * content
Pointer to the decomposed form of this unit.
CodedBitstreamUnit * units
Pointer to an array of units of length nb_units.
uint8_t video_full_range_flag
uint8_t chroma_sample_loc_type_bottom_field
uint8_t video_signal_type_present_flag
uint32_t vui_num_ticks_poc_diff_one_minus1
AVCodecID
Identify the syntax and semantics of the bitstream.
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
void ff_cbs_fragment_uninit(CodedBitstreamContext *ctx, CodedBitstreamFragment *frag)
Free all allocated memory in a fragment.
uint8_t chroma_loc_info_present_flag
uint8_t chroma_sample_loc_type_top_field
H265RawNALUnitHeader nal_unit_header
int av_packet_copy_props(AVPacket *dst, const AVPacket *src)
Copy only "properties" fields from src to dst.
int ff_cbs_write_extradata(CodedBitstreamContext *ctx, AVCodecParameters *par, CodedBitstreamFragment *frag)
Write the bitstream of a fragment to the extradata in codec parameters.
int ff_cbs_delete_unit(CodedBitstreamContext *ctx, CodedBitstreamFragment *frag, int position)
Delete a unit from a fragment and free all memory it uses.
#define FF_ARRAY_ELEMS(a)
CodedBitstreamContext * cbc
int video_full_range_flag
Coded bitstream fragment structure, combining one or more units.
void av_packet_unref(AVPacket *pkt)
Wipe the packet.
uint32_t vui_num_units_in_tick
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(const uint8_t *) pi - 0x80) *(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(const int16_t *) pi >> 8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t, *(const int16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(const int32_t *) pi >> 24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t, *(const int32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(const float *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(const float *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(const float *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(const double *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(const double *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(const double *) pi *(1U<< 31)))) #define SET_CONV_FUNC_GROUP(ofmt, ifmt) static void set_generic_function(AudioConvert *ac) { } void ff_audio_convert_free(AudioConvert **ac) { if(! *ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);} AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enum AVSampleFormat out_fmt, enum AVSampleFormat in_fmt, int channels, int sample_rate, int apply_map) { AudioConvert *ac;int in_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) return NULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method !=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt) > 2) { ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc) { av_free(ac);return NULL;} return ac;} in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar) { ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar ? ac->channels :1;} else if(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;else ac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);return ac;} int ff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in) { int use_generic=1;int len=in->nb_samples;int p;if(ac->dc) { av_log(ac->avr, AV_LOG_TRACE, "%d samples - audio_convert: %s to %s (dithered)\", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));return ff_convert_dither(ac-> in
CodedBitstreamFragment access_unit
Describe the class of an AVClass context structure.
Context structure for coded bitstream operations.
Rational number (pair of numerator and denominator).
static int FUNC() sps(CodedBitstreamContext *ctx, RWContext *rw, H264RawSPS *current)
uint8_t chroma_format_idc
void ff_cbs_close(CodedBitstreamContext **ctx_ptr)
Close a context and free all internal state.
int ff_cbs_read_extradata(CodedBitstreamContext *ctx, CodedBitstreamFragment *frag, const AVCodecParameters *par)
Read the extradata bitstream found in codec parameters into a fragment, then split into units and dec...
static int FUNC() vps(CodedBitstreamContext *ctx, RWContext *rw, H265RawVPS *current)
uint8_t vui_poc_proportional_to_timing_flag
common internal and external API header
int num_ticks_poc_diff_one
static enum AVCodecID codec_ids[]
int ff_bsf_get_packet(AVBSFContext *ctx, AVPacket **pkt)
Called by the bitstream filters to get the next packet for filtering.
H265RawSliceHeader header
uint8_t * extradata
Extra binary data needed for initializing the decoder, codec-dependent.
int poc_proportional_to_timing_flag
AVRational sample_aspect_ratio
uint32_t vps_num_ticks_poc_diff_one_minus1
uint8_t separate_colour_plane_flag
uint8_t colour_description_present_flag
This structure stores compressed data.
AVCodecParameters * par_in
Parameters of the input stream.