diff --git a/lib/upipe-av/upipe_av.c b/lib/upipe-av/upipe_av.c index 11756db9f..1883bded6 100644 --- a/lib/upipe-av/upipe_av.c +++ b/lib/upipe-av/upipe_av.c @@ -29,10 +29,14 @@ #include "upipe/udeal.h" #include "upipe/uprobe.h" +#include "upipe/upipe.h" +#include "upipe/uref_pic.h" +#include "upipe/uref_pic_flow.h" #include "upipe-av/upipe_av.h" #include #include +#include #include "upipe_av_internal.h" @@ -119,3 +123,71 @@ void upipe_av_clean(void) if (logprobe) uprobe_release(logprobe); } + +/** @This sets frame properties from flow definition and uref packets. + * + * @param upipe upipe used for logging + * @param frame av frame to setup + * @param flow_def flow definition packet + * @param uref uref structure + * @return an error code + */ +int upipe_av_set_frame_properties(struct upipe *upipe, + AVFrame *frame, + struct uref *flow_def, + struct uref *uref) +{ + frame->key_frame = ubase_check(uref_pic_get_key(uref)); + frame->interlaced_frame = !ubase_check(uref_pic_get_progressive(uref)); + frame->top_field_first = ubase_check(uref_pic_get_tff(uref)); + frame->color_range = ubase_check(uref_pic_flow_get_full_range( + flow_def)) ? AVCOL_RANGE_JPEG : AVCOL_RANGE_MPEG; + + int val; + if (ubase_check(uref_pic_flow_get_colour_primaries_val(flow_def, &val))) + frame->color_primaries = val; + if (ubase_check(uref_pic_flow_get_transfer_characteristics_val(flow_def, &val))) + frame->color_trc = val; + if (ubase_check(uref_pic_flow_get_matrix_coefficients_val(flow_def, &val))) + frame->colorspace = val; + + uint64_t max_cll; + uint64_t max_fall; + if (ubase_check(uref_pic_flow_get_max_cll(flow_def, &max_cll)) && + ubase_check(uref_pic_flow_get_max_fall(flow_def, &max_fall))) { + AVContentLightMetadata *clm = + av_content_light_metadata_create_side_data(frame); + if (!clm) { + upipe_err(upipe, "unable to create content light metadata"); + return UBASE_ERR_EXTERNAL; + } + clm->MaxCLL = max_cll; + clm->MaxFALL = max_fall; + } + + struct uref_pic_mastering_display mdcv; + if (ubase_check(uref_pic_flow_get_mastering_display(flow_def, &mdcv))) { + AVMasteringDisplayMetadata *mdm = + av_mastering_display_metadata_create_side_data(frame); + if (!mdm) { + upipe_err(upipe, "unable to create mastering display metadata"); + return UBASE_ERR_EXTERNAL; + } + int chroma = 50000; + int luma = 10000; + mdm->display_primaries[0][0] = av_make_q(mdcv.red_x, chroma); + mdm->display_primaries[0][1] = av_make_q(mdcv.red_y, chroma); + mdm->display_primaries[1][0] = av_make_q(mdcv.green_x, chroma); + mdm->display_primaries[1][1] = av_make_q(mdcv.green_y, chroma); + mdm->display_primaries[2][0] = av_make_q(mdcv.blue_x, chroma); + mdm->display_primaries[2][1] = av_make_q(mdcv.blue_y, chroma); + mdm->white_point[0] = av_make_q(mdcv.white_x, chroma); + mdm->white_point[1] = av_make_q(mdcv.white_y, chroma); + mdm->has_primaries = 1; + mdm->max_luminance = av_make_q(mdcv.max_luminance, luma); + mdm->min_luminance = av_make_q(mdcv.min_luminance, luma); + mdm->has_luminance = 1; + } + + return UBASE_ERR_NONE; +} diff --git a/lib/upipe-av/upipe_av_internal.h b/lib/upipe-av/upipe_av_internal.h index d333be123..dc48e3d71 100644 --- a/lib/upipe-av/upipe_av_internal.h +++ b/lib/upipe-av/upipe_av_internal.h @@ -42,6 +42,11 @@ /** extra hardware frames for decode and filter */ #define UPIPE_AV_EXTRA_HW_FRAMES 32 +/** @hidden */ +struct uref; +/** @hidden */ +struct upipe; + /** @hidden */ enum AVCodecID; #if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(54, 51, 100) @@ -124,4 +129,17 @@ const char *upipe_av_to_flow_def(enum AVCodecID id); */ enum AVCodecID upipe_av_from_flow_def(const char *flow_def); +/** @This sets frame properties from flow definition and uref packets. + * + * @param upipe upipe used for logging + * @param frame av frame to setup + * @param flow_def flow definition packet + * @param uref uref structure + * @return an error code + */ +int upipe_av_set_frame_properties(struct upipe *upipe, + AVFrame *frame, + struct uref *flow_def, + struct uref *uref); + #endif diff --git a/lib/upipe-av/upipe_avcodec_decode.c b/lib/upipe-av/upipe_avcodec_decode.c index ba232f3f0..f6bd26ada 100644 --- a/lib/upipe-av/upipe_avcodec_decode.c +++ b/lib/upipe-av/upipe_avcodec_decode.c @@ -69,6 +69,7 @@ #include #include #include +#include #include "upipe-av/upipe_av_pixfmt.h" #include "upipe-av/upipe_av_samplefmt.h" #include "upipe_av_internal.h" @@ -395,6 +396,59 @@ static int upipe_avcdec_get_buffer_pic(struct AVCodecContext *context, if (context->color_range == AVCOL_RANGE_JPEG) uref_pic_flow_set_full_range(flow_def_attr); + AVFrameSideData *sd = av_frame_get_side_data( + frame, AV_FRAME_DATA_CONTENT_LIGHT_LEVEL); + if (sd) { + AVContentLightMetadata *clm = (AVContentLightMetadata *)sd->data; + UBASE_FATAL(upipe, uref_pic_flow_set_max_cll( + flow_def_attr, clm->MaxCLL)) + UBASE_FATAL(upipe, uref_pic_flow_set_max_fall( + flow_def_attr, clm->MaxFALL)) + } else if (!frame->key_frame) { + uint64_t max_cll; + if (ubase_check(uref_pic_flow_get_max_cll( + upipe_avcdec->flow_def_format, &max_cll))) { + UBASE_FATAL(upipe, uref_pic_flow_set_max_cll( + flow_def_attr, max_cll)) + } + uint64_t max_fall; + if (ubase_check(uref_pic_flow_get_max_fall( + upipe_avcdec->flow_def_format, &max_fall))) { + UBASE_FATAL(upipe, uref_pic_flow_set_max_fall( + flow_def_attr, max_fall)) + } + } + + sd = av_frame_get_side_data( + frame, AV_FRAME_DATA_MASTERING_DISPLAY_METADATA); + if (sd) { + AVMasteringDisplayMetadata *mdcv = + (AVMasteringDisplayMetadata *)sd->data; + AVRational chroma = { 1, 50000 }; + AVRational luma = { 1, 10000 }; + UBASE_FATAL(upipe, uref_pic_flow_set_mastering_display(flow_def_attr, + &(struct uref_pic_mastering_display){ + .red_x = av_rescale_q(1, mdcv->display_primaries[0][0], chroma), + .red_y = av_rescale_q(1, mdcv->display_primaries[0][1], chroma), + .green_x = av_rescale_q(1, mdcv->display_primaries[1][0], chroma), + .green_y = av_rescale_q(1, mdcv->display_primaries[1][1], chroma), + .blue_x = av_rescale_q(1, mdcv->display_primaries[2][0], chroma), + .blue_y = av_rescale_q(1, mdcv->display_primaries[2][1], chroma), + .white_x = av_rescale_q(1, mdcv->white_point[0], chroma), + .white_y = av_rescale_q(1, mdcv->white_point[1], chroma), + .min_luminance = av_rescale_q(1, mdcv->min_luminance, luma), + .max_luminance = av_rescale_q(1, mdcv->max_luminance, luma), + })) + } else if (!frame->key_frame) { + const uint8_t *mdcv; + size_t size; + if (ubase_check(uref_pic_flow_get_mdcv(upipe_avcdec->flow_def_format, + &mdcv, &size))) { + UBASE_FATAL(upipe, uref_pic_flow_set_mdcv( + flow_def_attr, mdcv, size)) + } + } + if (unlikely(upipe_avcdec->ubuf_mgr != NULL && udict_cmp(upipe_avcdec->flow_def_format->udict, flow_def_attr->udict))) { diff --git a/lib/upipe-av/upipe_avcodec_encode.c b/lib/upipe-av/upipe_avcodec_encode.c index 94873a5d2..ddad62ba5 100644 --- a/lib/upipe-av/upipe_avcodec_encode.c +++ b/lib/upipe-av/upipe_avcodec_encode.c @@ -74,6 +74,7 @@ #include #include #include +#include #include #include #include @@ -879,8 +880,12 @@ static void upipe_avcenc_encode_video(struct upipe *upipe, frame->format = context->pix_fmt; frame->width = hsize; frame->height = vsize; - frame->interlaced_frame = !ubase_check(uref_pic_get_progressive(uref)); - frame->top_field_first = ubase_check(uref_pic_get_tff(uref)); + + if (!ubase_check(upipe_av_set_frame_properties( + upipe, frame, upipe_avcenc->flow_def_attr, uref))) { + uref_free(uref); + return; + } } /* set picture type */ @@ -1402,6 +1407,12 @@ static int upipe_avcenc_set_flow_def(struct upipe *upipe, struct uref *flow_def) !ubase_check(uref_pic_flow_copy_transfer_characteristics( flow_def_check, flow_def)) || !ubase_check(uref_pic_flow_copy_matrix_coefficients( + flow_def_check, flow_def)) || + !ubase_check(uref_pic_flow_copy_mdcv( + flow_def_check, flow_def)) || + !ubase_check(uref_pic_flow_copy_max_cll( + flow_def_check, flow_def)) || + !ubase_check(uref_pic_flow_copy_max_fall( flow_def_check, flow_def)))) { uref_free(flow_def_check); upipe_throw_fatal(upipe, UBASE_ERR_ALLOC); @@ -1534,6 +1545,58 @@ static int upipe_avcenc_set_flow_def(struct upipe *upipe, struct uref *flow_def) context->field_order = AV_FIELD_BB; } +#if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(61, 2, 100) + uint64_t max_cll; + uint64_t max_fall; + if (ubase_check(uref_pic_flow_get_max_cll(flow_def, &max_cll)) && + ubase_check(uref_pic_flow_get_max_fall(flow_def, &max_fall))) { + AVFrameSideData *sd = av_frame_side_data_new( + &context->decoded_side_data, + &context->nb_decoded_side_data, + AV_FRAME_DATA_CONTENT_LIGHT_LEVEL, + sizeof(AVContentLightMetadata), + AV_FRAME_SIDE_DATA_FLAG_UNIQUE); + if (!sd) { + uref_free(flow_def_check); + return UBASE_ERR_EXTERNAL; + } + AVContentLightMetadata *clm = + (AVContentLightMetadata *)sd->data; + clm->MaxCLL = max_cll; + clm->MaxFALL = max_fall; + } + + struct uref_pic_mastering_display mdcv; + if (ubase_check(uref_pic_flow_get_mastering_display(flow_def, &mdcv))) { + AVFrameSideData *sd = av_frame_side_data_new( + &context->decoded_side_data, + &context->nb_decoded_side_data, + AV_FRAME_DATA_MASTERING_DISPLAY_METADATA, + sizeof(AVMasteringDisplayMetadata), + AV_FRAME_SIDE_DATA_FLAG_UNIQUE); + if (!sd) { + uref_free(flow_def_check); + return UBASE_ERR_EXTERNAL; + } + AVMasteringDisplayMetadata *mdm = + (AVMasteringDisplayMetadata *)sd->data; + int chroma = 50000; + int luma = 10000; + mdm->display_primaries[0][0] = av_make_q(mdcv.red_x, chroma); + mdm->display_primaries[0][1] = av_make_q(mdcv.red_y, chroma); + mdm->display_primaries[1][0] = av_make_q(mdcv.green_x, chroma); + mdm->display_primaries[1][1] = av_make_q(mdcv.green_y, chroma); + mdm->display_primaries[2][0] = av_make_q(mdcv.blue_x, chroma); + mdm->display_primaries[2][1] = av_make_q(mdcv.blue_y, chroma); + mdm->white_point[0] = av_make_q(mdcv.white_x, chroma); + mdm->white_point[1] = av_make_q(mdcv.white_y, chroma); + mdm->has_primaries = 1; + mdm->max_luminance = av_make_q(mdcv.max_luminance, luma); + mdm->min_luminance = av_make_q(mdcv.min_luminance, luma); + mdm->has_luminance = 1; + } +#endif + upipe_avcenc_store_flow_def_check(upipe, flow_def_check); } else { diff --git a/lib/upipe-av/upipe_avfilter.c b/lib/upipe-av/upipe_avfilter.c index 50af13661..80ec7a64c 100644 --- a/lib/upipe-av/upipe_avfilter.c +++ b/lib/upipe-av/upipe_avfilter.c @@ -59,6 +59,7 @@ #include #include #include +#include #include #include "upipe_av_internal.h" @@ -496,6 +497,35 @@ static int build_video_flow_def(struct uref *flow_def, UBASE_RETURN(uref_pic_flow_set_matrix_coefficients_val( flow_def, matrix_coefficients)) + AVFrameSideData *sd = av_frame_get_side_data( + frame, AV_FRAME_DATA_CONTENT_LIGHT_LEVEL); + if (sd) { + AVContentLightMetadata *clm = (AVContentLightMetadata *)sd->data; + UBASE_RETURN(uref_pic_flow_set_max_cll(flow_def, clm->MaxCLL)) + UBASE_RETURN(uref_pic_flow_set_max_fall(flow_def, clm->MaxFALL)) + } + sd = av_frame_get_side_data( + frame, AV_FRAME_DATA_MASTERING_DISPLAY_METADATA); + if (sd) { + AVMasteringDisplayMetadata *mdcv = + (AVMasteringDisplayMetadata *)sd->data; + AVRational chroma = { 1, 50000 }; + AVRational luma = { 1, 10000 }; + UBASE_RETURN(uref_pic_flow_set_mastering_display(flow_def, + &(struct uref_pic_mastering_display){ + .red_x = av_rescale_q(1, mdcv->display_primaries[0][0], chroma), + .red_y = av_rescale_q(1, mdcv->display_primaries[0][1], chroma), + .green_x = av_rescale_q(1, mdcv->display_primaries[1][0], chroma), + .green_y = av_rescale_q(1, mdcv->display_primaries[1][1], chroma), + .blue_x = av_rescale_q(1, mdcv->display_primaries[2][0], chroma), + .blue_y = av_rescale_q(1, mdcv->display_primaries[2][1], chroma), + .white_x = av_rescale_q(1, mdcv->white_point[0], chroma), + .white_y = av_rescale_q(1, mdcv->white_point[1], chroma), + .min_luminance = av_rescale_q(1, mdcv->min_luminance, luma), + .max_luminance = av_rescale_q(1, mdcv->max_luminance, luma), + })) + } + return UBASE_ERR_NONE; } @@ -1108,20 +1138,13 @@ static int upipe_avfilt_avframe_from_uref_pic(struct upipe *upipe, frame->extended_data = frame->data; frame->width = hsize; frame->height = vsize; - frame->key_frame = ubase_check(uref_pic_get_key(uref)); frame->format = video->pix_fmt; - frame->interlaced_frame = !ubase_check(uref_pic_get_progressive(uref)); - frame->top_field_first = ubase_check(uref_pic_get_tff(uref)); - frame->color_range = ubase_check(uref_pic_flow_get_full_range( - flow_def)) ? AVCOL_RANGE_JPEG : AVCOL_RANGE_MPEG; - - int val; - if (ubase_check(uref_pic_flow_get_colour_primaries_val(flow_def, &val))) - frame->color_primaries = val; - if (ubase_check(uref_pic_flow_get_transfer_characteristics_val(flow_def, &val))) - frame->color_trc = val; - if (ubase_check(uref_pic_flow_get_matrix_coefficients_val(flow_def, &val))) - frame->colorspace = val; + + int err = upipe_av_set_frame_properties(upipe, frame, flow_def, uref); + if (!ubase_check(err)) { + uref_free(uref); + return err; + } uint64_t pts = UINT64_MAX; if (ubase_check(uref_clock_get_pts_prog(uref, &pts)))