Add VideoRangeType to video conditions

This is used to distinguish whether the client supports specific VideoRangeType,
such as SDR, HDR10, HLG and DOVI. Usage is similar to Video Profile condition.
This commit is contained in:
nyanmisaka 2022-06-16 21:32:54 +08:00
parent fd4ffc6ba3
commit be72001ff9
14 changed files with 191 additions and 30 deletions

View file

@ -221,6 +221,7 @@ namespace Emby.Dlna.Didl
streamInfo.IsDirectStream,
streamInfo.RunTimeTicks ?? 0,
streamInfo.TargetVideoProfile,
streamInfo.TargetVideoRangeType,
streamInfo.TargetVideoLevel,
streamInfo.TargetFramerate ?? 0,
streamInfo.TargetPacketLength,
@ -376,6 +377,7 @@ namespace Emby.Dlna.Didl
targetHeight,
streamInfo.TargetVideoBitDepth,
streamInfo.TargetVideoProfile,
streamInfo.TargetVideoRangeType,
streamInfo.TargetVideoLevel,
streamInfo.TargetFramerate ?? 0,
streamInfo.TargetPacketLength,

View file

@ -561,6 +561,7 @@ namespace Emby.Dlna.PlayTo
streamInfo.IsDirectStream,
streamInfo.RunTimeTicks ?? 0,
streamInfo.TargetVideoProfile,
streamInfo.TargetVideoRangeType,
streamInfo.TargetVideoLevel,
streamInfo.TargetFramerate ?? 0,
streamInfo.TargetPacketLength,

View file

@ -312,7 +312,7 @@ namespace Jellyfin.Api.Helpers
responseHeaders.Add(
"contentFeatures.dlna.org",
ContentFeatureBuilder.BuildVideoHeader(profile, state.OutputContainer, videoCodec, audioCodec, state.OutputWidth, state.OutputHeight, state.TargetVideoBitDepth, state.OutputVideoBitrate, state.TargetTimestamp, isStaticallyStreamed, state.RunTimeTicks, state.TargetVideoProfile, state.TargetVideoLevel, state.TargetFramerate, state.TargetPacketLength, state.TranscodeSeekInfo, state.IsTargetAnamorphic, state.IsTargetInterlaced, state.TargetRefFrames, state.TargetVideoStreamCount, state.TargetAudioStreamCount, state.TargetVideoCodecTag, state.IsTargetAVC).FirstOrDefault() ?? string.Empty);
ContentFeatureBuilder.BuildVideoHeader(profile, state.OutputContainer, videoCodec, audioCodec, state.OutputWidth, state.OutputHeight, state.TargetVideoBitDepth, state.OutputVideoBitrate, state.TargetTimestamp, isStaticallyStreamed, state.RunTimeTicks, state.TargetVideoProfile, state.TargetVideoRangeType, state.TargetVideoLevel, state.TargetFramerate, state.TargetPacketLength, state.TranscodeSeekInfo, state.IsTargetAnamorphic, state.IsTargetInterlaced, state.TargetRefFrames, state.TargetVideoStreamCount, state.TargetAudioStreamCount, state.TargetVideoCodecTag, state.IsTargetAVC).FirstOrDefault() ?? string.Empty);
}
}
@ -533,6 +533,7 @@ namespace Jellyfin.Api.Helpers
state.TargetVideoBitDepth,
state.OutputVideoBitrate,
state.TargetVideoProfile,
state.TargetVideoRangeType,
state.TargetVideoLevel,
state.TargetFramerate,
state.TargetPacketLength,

View file

@ -75,6 +75,12 @@ namespace MediaBrowser.Controller.MediaEncoding
/// <value>The profile.</value>
public string Profile { get; set; }
/// <summary>
/// Gets or sets the video range type.
/// </summary>
/// <value>The video range type.</value>
public string VideoRangeType { get; set; }
/// <summary>
/// Gets or sets the level.
/// </summary>

View file

@ -1753,6 +1753,21 @@ namespace MediaBrowser.Controller.MediaEncoding
}
}
var requestedRangeTypes = state.GetRequestedRangeTypes(videoStream.Codec);
if (requestedProfiles.Length > 0)
{
if (string.IsNullOrEmpty(videoStream.VideoRangeType))
{
return false;
}
if (!string.IsNullOrEmpty(videoStream.VideoRangeType)
&& !requestedRangeTypes.Contains(videoStream.VideoRangeType, StringComparison.OrdinalIgnoreCase))
{
return false;
}
}
// Video width must fall within requested value
if (request.MaxWidth.HasValue
&& (!videoStream.Width.HasValue || videoStream.Width.Value > request.MaxWidth.Value))

View file

@ -366,6 +366,28 @@ namespace MediaBrowser.Controller.MediaEncoding
}
}
/// <summary>
/// Gets the target video range type.
/// </summary>
public string TargetVideoRangeType
{
get
{
if (BaseRequest.Static || EncodingHelper.IsCopyCodec(OutputVideoCodec))
{
return VideoStream?.VideoRangeType;
}
var requestedRangeType = GetRequestedRangeTypes(ActualOutputVideoCodec).FirstOrDefault();
if (!string.IsNullOrEmpty(requestedRangeType))
{
return requestedRangeType;
}
return null;
}
}
public string TargetVideoCodecTag
{
get
@ -579,6 +601,26 @@ namespace MediaBrowser.Controller.MediaEncoding
return Array.Empty<string>();
}
public string[] GetRequestedRangeTypes(string codec)
{
if (!string.IsNullOrEmpty(BaseRequest.VideoRangeType))
{
return BaseRequest.VideoRangeType.Split(new[] { '|', ',' }, StringSplitOptions.RemoveEmptyEntries);
}
if (!string.IsNullOrEmpty(codec))
{
var rangetype = BaseRequest.GetOption(codec, "rangetype");
if (!string.IsNullOrEmpty(rangetype))
{
return rangetype.Split(new[] { '|', ',' }, StringSplitOptions.RemoveEmptyEntries);
}
}
return Array.Empty<string>();
}
public string GetRequestedLevel(string codec)
{
if (!string.IsNullOrEmpty(BaseRequest.Level))

View file

@ -16,6 +16,7 @@ namespace MediaBrowser.Model.Dlna
int? videoBitDepth,
int? videoBitrate,
string? videoProfile,
string? videoRangeType,
double? videoLevel,
float? videoFramerate,
int? packetLength,
@ -42,6 +43,8 @@ namespace MediaBrowser.Model.Dlna
return IsConditionSatisfied(condition, videoLevel);
case ProfileConditionValue.VideoProfile:
return IsConditionSatisfied(condition, videoProfile);
case ProfileConditionValue.VideoRangeType:
return IsConditionSatisfied(condition, videoRangeType);
case ProfileConditionValue.VideoCodecTag:
return IsConditionSatisfied(condition, videoCodecTag);
case ProfileConditionValue.PacketLength:

View file

@ -128,6 +128,7 @@ namespace MediaBrowser.Model.Dlna
bool isDirectStream,
long? runtimeTicks,
string videoProfile,
string videoRangeType,
double? videoLevel,
float? videoFramerate,
int? packetLength,
@ -176,6 +177,7 @@ namespace MediaBrowser.Model.Dlna
bitDepth,
videoBitrate,
videoProfile,
videoRangeType,
videoLevel,
videoFramerate,
packetLength,

View file

@ -423,6 +423,7 @@ namespace MediaBrowser.Model.Dlna
/// <param name="bitDepth">The bit depth.</param>
/// <param name="videoBitrate">The video bitrate.</param>
/// <param name="videoProfile">The video profile.</param>
/// <param name="videoRangeType">The video range type.</param>
/// <param name="videoLevel">The video level.</param>
/// <param name="videoFramerate">The video framerate.</param>
/// <param name="packetLength">The packet length.</param>
@ -444,6 +445,7 @@ namespace MediaBrowser.Model.Dlna
int? bitDepth,
int? videoBitrate,
string videoProfile,
string videoRangeType,
double? videoLevel,
float? videoFramerate,
int? packetLength,
@ -483,7 +485,7 @@ namespace MediaBrowser.Model.Dlna
var anyOff = false;
foreach (ProfileCondition c in i.Conditions)
{
if (!ConditionProcessor.IsVideoConditionSatisfied(GetModelProfileCondition(c), width, height, bitDepth, videoBitrate, videoProfile, videoLevel, videoFramerate, packetLength, timestamp, isAnamorphic, isInterlaced, refFrames, numVideoStreams, numAudioStreams, videoCodecTag, isAvc))
if (!ConditionProcessor.IsVideoConditionSatisfied(GetModelProfileCondition(c), width, height, bitDepth, videoBitrate, videoProfile, videoRangeType, videoLevel, videoFramerate, packetLength, timestamp, isAnamorphic, isInterlaced, refFrames, numVideoStreams, numAudioStreams, videoCodecTag, isAvc))
{
anyOff = true;
break;

View file

@ -26,6 +26,7 @@ namespace MediaBrowser.Model.Dlna
IsAvc = 20,
IsInterlaced = 21,
AudioSampleRate = 22,
AudioBitDepth = 23
AudioBitDepth = 23,
VideoRangeType = 24
}
}

View file

@ -221,6 +221,9 @@ namespace MediaBrowser.Model.Dlna
case ProfileConditionValue.VideoProfile:
return TranscodeReason.VideoProfileNotSupported;
case ProfileConditionValue.VideoRangeType:
return TranscodeReason.VideoRangeTypeNotSupported;
case ProfileConditionValue.VideoTimestamp:
// TODO
return 0;
@ -748,9 +751,9 @@ namespace MediaBrowser.Model.Dlna
var appliedVideoConditions = options.Profile.CodecProfiles
.Where(i => i.Type == CodecType.Video &&
i.ContainsAnyCodec(videoCodec, container) &&
i.ApplyConditions.All(applyCondition => ConditionProcessor.IsVideoConditionSatisfied(applyCondition, videoStream?.Width, videoStream?.Height, videoStream?.BitDepth, videoStream?.BitRate, videoStream?.Profile, videoStream?.Level, videoFramerate, videoStream?.PacketLength, timestamp, videoStream?.IsAnamorphic, videoStream?.IsInterlaced, videoStream?.RefFrames, numVideoStreams, numAudioStreams, videoStream?.CodecTag, videoStream?.IsAVC)))
i.ApplyConditions.All(applyCondition => ConditionProcessor.IsVideoConditionSatisfied(applyCondition, videoStream?.Width, videoStream?.Height, videoStream?.BitDepth, videoStream?.BitRate, videoStream?.Profile, videoStream?.VideoRangeType, videoStream?.Level, videoFramerate, videoStream?.PacketLength, timestamp, videoStream?.IsAnamorphic, videoStream?.IsInterlaced, videoStream?.RefFrames, numVideoStreams, numAudioStreams, videoStream?.CodecTag, videoStream?.IsAVC)))
.Select(i =>
i.Conditions.All(condition => ConditionProcessor.IsVideoConditionSatisfied(condition, videoStream?.Width, videoStream?.Height, videoStream?.BitDepth, videoStream?.BitRate, videoStream?.Profile, videoStream?.Level, videoFramerate, videoStream?.PacketLength, timestamp, videoStream?.IsAnamorphic, videoStream?.IsInterlaced, videoStream?.RefFrames, numVideoStreams, numAudioStreams, videoStream?.CodecTag, videoStream?.IsAVC)));
i.Conditions.All(condition => ConditionProcessor.IsVideoConditionSatisfied(condition, videoStream?.Width, videoStream?.Height, videoStream?.BitDepth, videoStream?.BitRate, videoStream?.Profile, videoStream?.VideoRangeType, videoStream?.Level, videoFramerate, videoStream?.PacketLength, timestamp, videoStream?.IsAnamorphic, videoStream?.IsInterlaced, videoStream?.RefFrames, numVideoStreams, numAudioStreams, videoStream?.CodecTag, videoStream?.IsAVC)));
// An empty appliedVideoConditions means that the codec has no conditions for the current video stream
var conditionsSatisfied = appliedVideoConditions.All(satisfied => satisfied);
@ -834,6 +837,7 @@ namespace MediaBrowser.Model.Dlna
int? videoBitrate = videoStream?.BitRate;
double? videoLevel = videoStream?.Level;
string videoProfile = videoStream?.Profile;
string videoRangeType = videoStream?.VideoRangeType;
float videoFramerate = videoStream == null ? 0 : videoStream.AverageFrameRate ?? videoStream.AverageFrameRate ?? 0;
bool? isAnamorphic = videoStream?.IsAnamorphic;
bool? isInterlaced = videoStream?.IsInterlaced;
@ -850,7 +854,7 @@ namespace MediaBrowser.Model.Dlna
var appliedVideoConditions = options.Profile.CodecProfiles
.Where(i => i.Type == CodecType.Video &&
i.ContainsAnyCodec(videoCodec, container) &&
i.ApplyConditions.All(applyCondition => ConditionProcessor.IsVideoConditionSatisfied(applyCondition, width, height, bitDepth, videoBitrate, videoProfile, videoLevel, videoFramerate, packetLength, timestamp, isAnamorphic, isInterlaced, refFrames, numVideoStreams, numAudioStreams, videoCodecTag, isAvc)));
i.ApplyConditions.All(applyCondition => ConditionProcessor.IsVideoConditionSatisfied(applyCondition, width, height, bitDepth, videoBitrate, videoProfile, videoRangeType, videoLevel, videoFramerate, packetLength, timestamp, isAnamorphic, isInterlaced, refFrames, numVideoStreams, numAudioStreams, videoCodecTag, isAvc)));
var isFirstAppliedCodecProfile = true;
foreach (var i in appliedVideoConditions)
{
@ -1081,6 +1085,7 @@ namespace MediaBrowser.Model.Dlna
int? videoBitrate = videoStream?.BitRate;
double? videoLevel = videoStream?.Level;
string videoProfile = videoStream?.Profile;
string videoRangeType = videoStream?.VideoRangeType;
float videoFramerate = videoStream == null ? 0 : videoStream.AverageFrameRate ?? videoStream.AverageFrameRate ?? 0;
bool? isAnamorphic = videoStream?.IsAnamorphic;
bool? isInterlaced = videoStream?.IsInterlaced;
@ -1098,7 +1103,7 @@ namespace MediaBrowser.Model.Dlna
int? numVideoStreams = mediaSource.GetStreamCount(MediaStreamType.Video);
var checkVideoConditions = (ProfileCondition[] conditions) =>
conditions.Where(applyCondition => !ConditionProcessor.IsVideoConditionSatisfied(applyCondition, width, height, bitDepth, videoBitrate, videoProfile, videoLevel, videoFramerate, packetLength, timestamp, isAnamorphic, isInterlaced, refFrames, numVideoStreams, numAudioStreams, videoCodecTag, isAvc));
conditions.Where(applyCondition => !ConditionProcessor.IsVideoConditionSatisfied(applyCondition, width, height, bitDepth, videoBitrate, videoProfile, videoRangeType, videoLevel, videoFramerate, packetLength, timestamp, isAnamorphic, isInterlaced, refFrames, numVideoStreams, numAudioStreams, videoCodecTag, isAvc));
// Check container conditions
var containerProfileReasons = AggregateFailureConditions(
@ -1852,6 +1857,38 @@ namespace MediaBrowser.Model.Dlna
break;
}
case ProfileConditionValue.VideoRangeType:
{
if (string.IsNullOrEmpty(qualifier))
{
continue;
}
// change from split by | to comma
// strip spaces to avoid having to encode
var values = value
.Split('|', StringSplitOptions.RemoveEmptyEntries);
if (condition.Condition == ProfileConditionType.Equals)
{
item.SetOption(qualifier, "rangetype", string.Join(',', values));
}
else if (condition.Condition == ProfileConditionType.EqualsAny)
{
var currentValue = item.GetOption(qualifier, "rangetype");
if (!string.IsNullOrEmpty(currentValue) && values.Any(value => value == currentValue))
{
item.SetOption(qualifier, "rangetype", currentValue);
}
else
{
item.SetOption(qualifier, "rangetype", string.Join(',', values));
}
}
break;
}
case ProfileConditionValue.Height:
{
if (!enableNonQualifiedConditions)

View file

@ -280,6 +280,29 @@ namespace MediaBrowser.Model.Dlna
}
}
/// <summary>
/// Gets the target video range type that will be in the output stream.
/// </summary>
public string TargetVideoRangeType
{
get
{
if (IsDirectStream)
{
return TargetVideoStream?.VideoRangeType;
}
var targetVideoCodecs = TargetVideoCodec;
var videoCodec = targetVideoCodecs.Length == 0 ? null : targetVideoCodecs[0];
if (!string.IsNullOrEmpty(videoCodec))
{
return GetOption(videoCodec, "rangetype");
}
return TargetVideoStream?.VideoRangeType;
}
}
/// <summary>
/// Gets the target video codec tag.
/// </summary>

View file

@ -104,32 +104,23 @@ namespace MediaBrowser.Model.Entities
{
get
{
if (Type != MediaStreamType.Video)
{
return null;
}
var (videoRange, videoRangeType) = getVideoColorRange();
var colorTransfer = ColorTransfer;
return videoRange;
}
}
if (string.Equals(colorTransfer, "smpte2084", StringComparison.OrdinalIgnoreCase)
|| string.Equals(colorTransfer, "arib-std-b67", StringComparison.OrdinalIgnoreCase))
{
return "HDR";
}
/// <summary>
/// Gets the video range type.
/// </summary>
/// <value>The video range type.</value>
public string VideoRangeType
{
get
{
var (videoRange, videoRangeType) = getVideoColorRange();
// For some Dolby Vision files, no color transfer is provided, so check the codec
var codecTag = CodecTag;
if (string.Equals(codecTag, "dovi", StringComparison.OrdinalIgnoreCase)
|| string.Equals(codecTag, "dvh1", StringComparison.OrdinalIgnoreCase)
|| string.Equals(codecTag, "dvhe", StringComparison.OrdinalIgnoreCase)
|| string.Equals(codecTag, "dav1", StringComparison.OrdinalIgnoreCase))
{
return "HDR";
}
return "SDR";
return videoRangeType;
}
}
@ -571,5 +562,39 @@ namespace MediaBrowser.Model.Entities
return true;
}
public (string VideoRange, string VideoRangeType) getVideoColorRange()
{
if (Type != MediaStreamType.Video)
{
return (null, null);
}
var colorTransfer = ColorTransfer;
if (string.Equals(colorTransfer, "smpte2084", StringComparison.OrdinalIgnoreCase))
{
return ("HDR", "HDR10");
}
if (string.Equals(colorTransfer, "arib-std-b67", StringComparison.OrdinalIgnoreCase))
{
return ("HDR", "HLG");
}
// For some Dolby Vision files, no color transfer is provided, so check the codec
var codecTag = CodecTag;
if (string.Equals(codecTag, "dovi", StringComparison.OrdinalIgnoreCase)
|| string.Equals(codecTag, "dvh1", StringComparison.OrdinalIgnoreCase)
|| string.Equals(codecTag, "dvhe", StringComparison.OrdinalIgnoreCase)
|| string.Equals(codecTag, "dav1", StringComparison.OrdinalIgnoreCase))
{
return ("HDR", "DOVI");
}
return ("SDR", "SDR");
}
}
}

View file

@ -17,6 +17,7 @@ namespace MediaBrowser.Model.Session
// Video Constraints
VideoProfileNotSupported = 1 << 6,
VideoRangeTypeNotSupported = 1 << 24,
VideoLevelNotSupported = 1 << 7,
VideoResolutionNotSupported = 1 << 8,
VideoBitDepthNotSupported = 1 << 9,