tkurbad-overlay/www-client/thorium/files/ffmpeg-nb_coded_side_data-r1.patch
2025-07-10 11:02:22 +02:00

225 lines
9.5 KiB
Diff

From 84d1d7adcd1697348e3f821811179c525d3b5717 Mon Sep 17 00:00:00 2001
From: Dale Curtis <dalecurtis@chromium.org>
Date: Tue, 28 Nov 2023 00:41:42 +0000
Subject: [PATCH] Roll src/third_party/ffmpeg/ c1d0dc413..866768f35 (492
commits)
---
diff --git a/media/ffmpeg/ffmpeg_common.cc b/media/ffmpeg/ffmpeg_common.cc
index 3c97e4a6cf4c781..a55e4ab51d5b353 100644
--- a/media/ffmpeg/ffmpeg_common.cc
+++ b/media/ffmpeg/ffmpeg_common.cc
@@ -653,15 +653,6 @@ bool AVStreamToVideoDecoderConfig(const AVStream* stream,
profile = ProfileIDToVideoCodecProfile(codec_context->profile);
}
- void* display_matrix =
- av_stream_get_side_data(stream, AV_PKT_DATA_DISPLAYMATRIX, nullptr);
-
- VideoTransformation video_transformation = VideoTransformation();
- if (display_matrix) {
- video_transformation = VideoTransformation::FromFFmpegDisplayMatrix(
- static_cast<int32_t*>(display_matrix));
- }
-
if (!color_space.IsSpecified()) {
// VP9 frames may have color information, but that information cannot
// express new color spaces, like HDR. For that reason, color space
@@ -718,88 +709,92 @@ bool AVStreamToVideoDecoderConfig(const AVStream* stream,
codec_context->extradata + codec_context->extradata_size);
}
- if (stream->nb_side_data) {
- for (int i = 0; i < stream->nb_side_data; ++i) {
- AVPacketSideData side_data = stream->side_data[i];
- switch (side_data.type) {
- case AV_PKT_DATA_MASTERING_DISPLAY_METADATA: {
- AVMasteringDisplayMetadata* mdcv =
- reinterpret_cast<AVMasteringDisplayMetadata*>(side_data.data);
- gfx::HdrMetadataSmpteSt2086 smpte_st_2086;
- if (mdcv->has_primaries) {
- smpte_st_2086.primaries = {
- static_cast<float>(av_q2d(mdcv->display_primaries[0][0])),
- static_cast<float>(av_q2d(mdcv->display_primaries[0][1])),
- static_cast<float>(av_q2d(mdcv->display_primaries[1][0])),
- static_cast<float>(av_q2d(mdcv->display_primaries[1][1])),
- static_cast<float>(av_q2d(mdcv->display_primaries[2][0])),
- static_cast<float>(av_q2d(mdcv->display_primaries[2][1])),
- static_cast<float>(av_q2d(mdcv->white_point[0])),
- static_cast<float>(av_q2d(mdcv->white_point[1])),
- };
- }
- if (mdcv->has_luminance) {
- smpte_st_2086.luminance_max = av_q2d(mdcv->max_luminance);
- smpte_st_2086.luminance_min = av_q2d(mdcv->min_luminance);
- }
-
- // TODO(https://crbug.com/1446302): Consider rejecting metadata that
- // does not specify all values.
- if (mdcv->has_primaries || mdcv->has_luminance) {
- hdr_metadata.smpte_st_2086 = smpte_st_2086;
- }
- break;
+ VideoTransformation video_transformation = VideoTransformation();
+ for (int i = 0; i < stream->codecpar->nb_coded_side_data; ++i) {
+ const auto& side_data = stream->codecpar->coded_side_data[i];
+ switch (side_data.type) {
+ case AV_PKT_DATA_DISPLAYMATRIX: {
+ CHECK_EQ(side_data.size, sizeof(int32_t) * 3 * 3);
+ video_transformation = VideoTransformation::FromFFmpegDisplayMatrix(
+ reinterpret_cast<int32_t*>(side_data.data));
+ break;
+ }
+ case AV_PKT_DATA_MASTERING_DISPLAY_METADATA: {
+ AVMasteringDisplayMetadata* mdcv =
+ reinterpret_cast<AVMasteringDisplayMetadata*>(side_data.data);
+ gfx::HdrMetadataSmpteSt2086 smpte_st_2086;
+ if (mdcv->has_primaries) {
+ smpte_st_2086.primaries = {
+ static_cast<float>(av_q2d(mdcv->display_primaries[0][0])),
+ static_cast<float>(av_q2d(mdcv->display_primaries[0][1])),
+ static_cast<float>(av_q2d(mdcv->display_primaries[1][0])),
+ static_cast<float>(av_q2d(mdcv->display_primaries[1][1])),
+ static_cast<float>(av_q2d(mdcv->display_primaries[2][0])),
+ static_cast<float>(av_q2d(mdcv->display_primaries[2][1])),
+ static_cast<float>(av_q2d(mdcv->white_point[0])),
+ static_cast<float>(av_q2d(mdcv->white_point[1])),
+ };
}
- case AV_PKT_DATA_CONTENT_LIGHT_LEVEL: {
- AVContentLightMetadata* clli =
- reinterpret_cast<AVContentLightMetadata*>(side_data.data);
- hdr_metadata.cta_861_3 =
- gfx::HdrMetadataCta861_3(clli->MaxCLL, clli->MaxFALL);
- break;
+ if (mdcv->has_luminance) {
+ smpte_st_2086.luminance_max = av_q2d(mdcv->max_luminance);
+ smpte_st_2086.luminance_min = av_q2d(mdcv->min_luminance);
+ }
+
+ // TODO(https://crbug.com/1446302): Consider rejecting metadata that
+ // does not specify all values.
+ if (mdcv->has_primaries || mdcv->has_luminance) {
+ hdr_metadata.smpte_st_2086 = smpte_st_2086;
}
+ break;
+ }
+ case AV_PKT_DATA_CONTENT_LIGHT_LEVEL: {
+ AVContentLightMetadata* clli =
+ reinterpret_cast<AVContentLightMetadata*>(side_data.data);
+ hdr_metadata.cta_861_3 =
+ gfx::HdrMetadataCta861_3(clli->MaxCLL, clli->MaxFALL);
+ break;
+ }
#if BUILDFLAG(ENABLE_PLATFORM_DOLBY_VISION)
- case AV_PKT_DATA_DOVI_CONF: {
- AVDOVIDecoderConfigurationRecord* dovi =
- reinterpret_cast<AVDOVIDecoderConfigurationRecord*>(
- side_data.data);
- VideoType type;
- type.codec = VideoCodec::kDolbyVision;
- type.level = dovi->dv_level;
- type.color_space = color_space;
- type.hdr_metadata_type = gfx::HdrMetadataType::kSmpteSt2094_10;
- switch (dovi->dv_profile) {
- case 0:
- type.profile = VideoCodecProfile::DOLBYVISION_PROFILE0;
- break;
- case 5:
- type.profile = VideoCodecProfile::DOLBYVISION_PROFILE5;
- break;
- case 7:
- type.profile = VideoCodecProfile::DOLBYVISION_PROFILE7;
- break;
- case 8:
- type.profile = VideoCodecProfile::DOLBYVISION_PROFILE8;
- break;
- case 9:
- type.profile = VideoCodecProfile::DOLBYVISION_PROFILE9;
- break;
- default:
- type.profile = VideoCodecProfile::VIDEO_CODEC_PROFILE_UNKNOWN;
- break;
- }
- // Treat dolby vision contents as dolby vision codec only if the
- // device support clear DV decoding, otherwise use the original
- // HEVC or AVC codec and profile.
- if (media::IsSupportedVideoType(type)) {
- codec = type.codec;
- profile = type.profile;
- }
- break;
+ case AV_PKT_DATA_DOVI_CONF: {
+ AVDOVIDecoderConfigurationRecord* dovi =
+ reinterpret_cast<AVDOVIDecoderConfigurationRecord*>(side_data.data);
+ VideoType type;
+ type.codec = VideoCodec::kDolbyVision;
+ type.level = dovi->dv_level;
+ type.color_space = color_space;
+ type.hdr_metadata_type = gfx::HdrMetadataType::kSmpteSt2094_10;
+ switch (dovi->dv_profile) {
+ case 0:
+ type.profile = VideoCodecProfile::DOLBYVISION_PROFILE0;
+ break;
+ case 5:
+ type.profile = VideoCodecProfile::DOLBYVISION_PROFILE5;
+ break;
+ case 7:
+ type.profile = VideoCodecProfile::DOLBYVISION_PROFILE7;
+ break;
+ case 8:
+ type.profile = VideoCodecProfile::DOLBYVISION_PROFILE8;
+ break;
+ case 9:
+ type.profile = VideoCodecProfile::DOLBYVISION_PROFILE9;
+ break;
+ default:
+ type.profile = VideoCodecProfile::VIDEO_CODEC_PROFILE_UNKNOWN;
+ break;
+ }
+ // Treat dolby vision contents as dolby vision codec only if the
+ // device support clear DV decoding, otherwise use the original
+ // HEVC or AVC codec and profile.
+ if (media::IsSupportedVideoType(type)) {
+ codec = type.codec;
+ profile = type.profile;
}
+ break;
+ }
#endif // BUILDFLAG(ENABLE_PLATFORM_DOLBY_VISION)
default:
break;
- }
}
}
diff --git a/media/filters/audio_video_metadata_extractor.cc b/media/filters/audio_video_metadata_extractor.cc
index cb002490a6322bf..e841b4ad13383ac 100644
--- a/media/filters/audio_video_metadata_extractor.cc
+++ b/media/filters/audio_video_metadata_extractor.cc
@@ -113,13 +113,16 @@ bool AudioVideoMetadataExtractor::Extract(DataSource* source,
if (!stream)
continue;
- void* display_matrix =
- av_stream_get_side_data(stream, AV_PKT_DATA_DISPLAYMATRIX, nullptr);
- if (display_matrix) {
- rotation_ = VideoTransformation::FromFFmpegDisplayMatrix(
- static_cast<int32_t*>(display_matrix))
- .rotation;
- info.tags["rotate"] = base::NumberToString(rotation_);
+ for (int j = 0; j < stream->codecpar->nb_coded_side_data; j++) {
+ const AVPacketSideData& sd = stream->codecpar->coded_side_data[j];
+ if (sd.type == AV_PKT_DATA_DISPLAYMATRIX) {
+ CHECK_EQ(sd.size, sizeof(int32_t) * 3 * 3);
+ rotation_ = VideoTransformation::FromFFmpegDisplayMatrix(
+ reinterpret_cast<int32_t*>(sd.data))
+ .rotation;
+ info.tags["rotate"] = base::NumberToString(rotation_);
+ break;
+ }
}
// Extract dictionary from streams also. Needed for containers that attach