Merge pull request #6934 from nyanmisaka/hwa

HWA pipeline refactor, AMD/Intel/Nvidia full hardware filtering support, AV1 hwdec
This commit is contained in:
Cody Robibero 2021-12-26 17:32:06 +00:00 committed by GitHub
commit 78bb581f0c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
10 changed files with 3307 additions and 2447 deletions

View file

@ -39,7 +39,8 @@ namespace Jellyfin.Api.Controllers
[Authorize(Policy = Policies.DefaultAuthorization)]
public class DynamicHlsController : BaseJellyfinApiController
{
private const string DefaultEncoderPreset = "veryfast";
private const string DefaultVodEncoderPreset = "veryfast";
private const string DefaultEventEncoderPreset = "superfast";
private const TranscodingJobType TranscodingJobType = MediaBrowser.Controller.MediaEncoding.TranscodingJobType.Hls;
private readonly ILibraryManager _libraryManager;
@ -105,6 +106,253 @@ namespace Jellyfin.Api.Controllers
_encodingOptions = serverConfigurationManager.GetEncodingOptions();
}
/// <summary>
/// Gets a hls live stream.
/// </summary>
/// <param name="itemId">The item id.</param>
/// <param name="container">The audio container.</param>
/// <param name="static">Optional. If true, the original file will be streamed statically without any encoding. Use either no url extension or the original file extension. true/false.</param>
/// <param name="params">The streaming parameters.</param>
/// <param name="tag">The tag.</param>
/// <param name="deviceProfileId">Optional. The dlna device profile id to utilize.</param>
/// <param name="playSessionId">The play session id.</param>
/// <param name="segmentContainer">The segment container.</param>
/// <param name="segmentLength">The segment lenght.</param>
/// <param name="minSegments">The minimum number of segments.</param>
/// <param name="mediaSourceId">The media version id, if playing an alternate version.</param>
/// <param name="deviceId">The device id of the client requesting. Used to stop encoding processes when needed.</param>
/// <param name="audioCodec">Optional. Specify a audio codec to encode to, e.g. mp3. If omitted the server will auto-select using the url's extension. Options: aac, mp3, vorbis, wma.</param>
/// <param name="enableAutoStreamCopy">Whether or not to allow automatic stream copy if requested values match the original source. Defaults to true.</param>
/// <param name="allowVideoStreamCopy">Whether or not to allow copying of the video stream url.</param>
/// <param name="allowAudioStreamCopy">Whether or not to allow copying of the audio stream url.</param>
/// <param name="breakOnNonKeyFrames">Optional. Whether to break on non key frames.</param>
/// <param name="audioSampleRate">Optional. Specify a specific audio sample rate, e.g. 44100.</param>
/// <param name="maxAudioBitDepth">Optional. The maximum audio bit depth.</param>
/// <param name="audioBitRate">Optional. Specify an audio bitrate to encode to, e.g. 128000. If omitted this will be left to encoder defaults.</param>
/// <param name="audioChannels">Optional. Specify a specific number of audio channels to encode to, e.g. 2.</param>
/// <param name="maxAudioChannels">Optional. Specify a maximum number of audio channels to encode to, e.g. 2.</param>
/// <param name="profile">Optional. Specify a specific an encoder profile (varies by encoder), e.g. main, baseline, high.</param>
/// <param name="level">Optional. Specify a level for the encoder profile (varies by encoder), e.g. 3, 3.1.</param>
/// <param name="framerate">Optional. A specific video framerate to encode to, e.g. 23.976. Generally this should be omitted unless the device has specific requirements.</param>
/// <param name="maxFramerate">Optional. A specific maximum video framerate to encode to, e.g. 23.976. Generally this should be omitted unless the device has specific requirements.</param>
/// <param name="copyTimestamps">Whether or not to copy timestamps when transcoding with an offset. Defaults to false.</param>
/// <param name="startTimeTicks">Optional. Specify a starting offset, in ticks. 1 tick = 10000 ms.</param>
/// <param name="width">Optional. The fixed horizontal resolution of the encoded video.</param>
/// <param name="height">Optional. The fixed vertical resolution of the encoded video.</param>
/// <param name="videoBitRate">Optional. Specify a video bitrate to encode to, e.g. 500000. If omitted this will be left to encoder defaults.</param>
/// <param name="subtitleStreamIndex">Optional. The index of the subtitle stream to use. If omitted no subtitles will be used.</param>
/// <param name="subtitleMethod">Optional. Specify the subtitle delivery method.</param>
/// <param name="maxRefFrames">Optional.</param>
/// <param name="maxVideoBitDepth">Optional. The maximum video bit depth.</param>
/// <param name="requireAvc">Optional. Whether to require avc.</param>
/// <param name="deInterlace">Optional. Whether to deinterlace the video.</param>
/// <param name="requireNonAnamorphic">Optional. Whether to require a non anamorphic stream.</param>
/// <param name="transcodingMaxAudioChannels">Optional. The maximum number of audio channels to transcode.</param>
/// <param name="cpuCoreLimit">Optional. The limit of how many cpu cores to use.</param>
/// <param name="liveStreamId">The live stream id.</param>
/// <param name="enableMpegtsM2TsMode">Optional. Whether to enable the MpegtsM2Ts mode.</param>
/// <param name="videoCodec">Optional. Specify a video codec to encode to, e.g. h264. If omitted the server will auto-select using the url's extension. Options: h265, h264, mpeg4, theora, vp8, vp9, vpx (deprecated), wmv.</param>
/// <param name="subtitleCodec">Optional. Specify a subtitle codec to encode to.</param>
/// <param name="transcodeReasons">Optional. The transcoding reason.</param>
/// <param name="audioStreamIndex">Optional. The index of the audio stream to use. If omitted the first audio stream will be used.</param>
/// <param name="videoStreamIndex">Optional. The index of the video stream to use. If omitted the first video stream will be used.</param>
/// <param name="context">Optional. The <see cref="EncodingContext"/>.</param>
/// <param name="streamOptions">Optional. The streaming options.</param>
/// <param name="maxWidth">Optional. The max width.</param>
/// <param name="maxHeight">Optional. The max height.</param>
/// <param name="enableSubtitlesInManifest">Optional. Whether to enable subtitles in the manifest.</param>
/// <response code="200">Hls live stream retrieved.</response>
/// <returns>A <see cref="FileResult"/> containing the hls file.</returns>
[HttpGet("Videos/{itemId}/live.m3u8")]
[ProducesResponseType(StatusCodes.Status200OK)]
[ProducesPlaylistFile]
public async Task<ActionResult> GetLiveHlsStream(
[FromRoute, Required] Guid itemId,
[FromQuery] string? container,
[FromQuery] bool? @static,
[FromQuery] string? @params,
[FromQuery] string? tag,
[FromQuery] string? deviceProfileId,
[FromQuery] string? playSessionId,
[FromQuery] string? segmentContainer,
[FromQuery] int? segmentLength,
[FromQuery] int? minSegments,
[FromQuery] string? mediaSourceId,
[FromQuery] string? deviceId,
[FromQuery] string? audioCodec,
[FromQuery] bool? enableAutoStreamCopy,
[FromQuery] bool? allowVideoStreamCopy,
[FromQuery] bool? allowAudioStreamCopy,
[FromQuery] bool? breakOnNonKeyFrames,
[FromQuery] int? audioSampleRate,
[FromQuery] int? maxAudioBitDepth,
[FromQuery] int? audioBitRate,
[FromQuery] int? audioChannels,
[FromQuery] int? maxAudioChannels,
[FromQuery] string? profile,
[FromQuery] string? level,
[FromQuery] float? framerate,
[FromQuery] float? maxFramerate,
[FromQuery] bool? copyTimestamps,
[FromQuery] long? startTimeTicks,
[FromQuery] int? width,
[FromQuery] int? height,
[FromQuery] int? videoBitRate,
[FromQuery] int? subtitleStreamIndex,
[FromQuery] SubtitleDeliveryMethod? subtitleMethod,
[FromQuery] int? maxRefFrames,
[FromQuery] int? maxVideoBitDepth,
[FromQuery] bool? requireAvc,
[FromQuery] bool? deInterlace,
[FromQuery] bool? requireNonAnamorphic,
[FromQuery] int? transcodingMaxAudioChannels,
[FromQuery] int? cpuCoreLimit,
[FromQuery] string? liveStreamId,
[FromQuery] bool? enableMpegtsM2TsMode,
[FromQuery] string? videoCodec,
[FromQuery] string? subtitleCodec,
[FromQuery] string? transcodeReasons,
[FromQuery] int? audioStreamIndex,
[FromQuery] int? videoStreamIndex,
[FromQuery] EncodingContext? context,
[FromQuery] Dictionary<string, string> streamOptions,
[FromQuery] int? maxWidth,
[FromQuery] int? maxHeight,
[FromQuery] bool? enableSubtitlesInManifest)
{
VideoRequestDto streamingRequest = new VideoRequestDto
{
Id = itemId,
Container = container,
Static = @static ?? false,
Params = @params,
Tag = tag,
DeviceProfileId = deviceProfileId,
PlaySessionId = playSessionId,
SegmentContainer = segmentContainer,
SegmentLength = segmentLength,
MinSegments = minSegments,
MediaSourceId = mediaSourceId,
DeviceId = deviceId,
AudioCodec = audioCodec,
EnableAutoStreamCopy = enableAutoStreamCopy ?? true,
AllowAudioStreamCopy = allowAudioStreamCopy ?? true,
AllowVideoStreamCopy = allowVideoStreamCopy ?? true,
BreakOnNonKeyFrames = breakOnNonKeyFrames ?? false,
AudioSampleRate = audioSampleRate,
MaxAudioChannels = maxAudioChannels,
AudioBitRate = audioBitRate,
MaxAudioBitDepth = maxAudioBitDepth,
AudioChannels = audioChannels,
Profile = profile,
Level = level,
Framerate = framerate,
MaxFramerate = maxFramerate,
CopyTimestamps = copyTimestamps ?? false,
StartTimeTicks = startTimeTicks,
Width = width,
Height = height,
VideoBitRate = videoBitRate,
SubtitleStreamIndex = subtitleStreamIndex,
SubtitleMethod = subtitleMethod ?? SubtitleDeliveryMethod.Encode,
MaxRefFrames = maxRefFrames,
MaxVideoBitDepth = maxVideoBitDepth,
RequireAvc = requireAvc ?? false,
DeInterlace = deInterlace ?? false,
RequireNonAnamorphic = requireNonAnamorphic ?? false,
TranscodingMaxAudioChannels = transcodingMaxAudioChannels,
CpuCoreLimit = cpuCoreLimit,
LiveStreamId = liveStreamId,
EnableMpegtsM2TsMode = enableMpegtsM2TsMode ?? false,
VideoCodec = videoCodec,
SubtitleCodec = subtitleCodec,
TranscodeReasons = transcodeReasons,
AudioStreamIndex = audioStreamIndex,
VideoStreamIndex = videoStreamIndex,
Context = context ?? EncodingContext.Streaming,
StreamOptions = streamOptions,
MaxHeight = maxHeight,
MaxWidth = maxWidth,
EnableSubtitlesInManifest = enableSubtitlesInManifest ?? true
};
// CTS lifecycle is managed internally.
var cancellationTokenSource = new CancellationTokenSource();
// Due to CTS.Token calling ThrowIfDisposed (https://github.com/dotnet/runtime/issues/29970) we have to "cache" the token
// since it gets disposed when ffmpeg exits
var cancellationToken = cancellationTokenSource.Token;
using var state = await StreamingHelpers.GetStreamingState(
streamingRequest,
Request,
_authContext,
_mediaSourceManager,
_userManager,
_libraryManager,
_serverConfigurationManager,
_mediaEncoder,
_encodingHelper,
_dlnaManager,
_deviceManager,
_transcodingJobHelper,
TranscodingJobType,
cancellationToken)
.ConfigureAwait(false);
TranscodingJobDto? job = null;
var playlistPath = Path.ChangeExtension(state.OutputFilePath, ".m3u8");
if (!System.IO.File.Exists(playlistPath))
{
var transcodingLock = _transcodingJobHelper.GetTranscodingLock(playlistPath);
await transcodingLock.WaitAsync(cancellationToken).ConfigureAwait(false);
try
{
if (!System.IO.File.Exists(playlistPath))
{
// If the playlist doesn't already exist, startup ffmpeg
try
{
job = await _transcodingJobHelper.StartFfMpeg(
state,
playlistPath,
GetCommandLineArguments(playlistPath, state, true, 0),
Request,
TranscodingJobType,
cancellationTokenSource)
.ConfigureAwait(false);
job.IsLiveOutput = true;
}
catch
{
state.Dispose();
throw;
}
minSegments = state.MinSegments;
if (minSegments > 0)
{
await HlsHelpers.WaitForMinimumSegmentCount(playlistPath, minSegments, _logger, cancellationToken).ConfigureAwait(false);
}
}
}
finally
{
transcodingLock.Release();
}
}
job ??= _transcodingJobHelper.OnTranscodeBeginRequest(playlistPath, TranscodingJobType);
if (job != null)
{
_transcodingJobHelper.OnTranscodeEndRequest(job);
}
var playlistText = HlsHelpers.GetLivePlaylistText(playlistPath, state);
return Content(playlistText, MimeTypes.GetMimeType("playlist.m3u8"));
}
/// <summary>
/// Gets a video hls playlist stream.
/// </summary>
@ -1149,7 +1397,7 @@ namespace Jellyfin.Api.Controllers
.AppendLine("#EXT-X-MEDIA-SEQUENCE:0");
var index = 0;
var segmentExtension = GetSegmentFileExtension(streamingRequest.SegmentContainer);
var segmentExtension = EncodingHelper.GetSegmentFileExtension(streamingRequest.SegmentContainer);
var queryString = Request.QueryString;
if (isHlsInFmp4)
@ -1214,7 +1462,7 @@ namespace Jellyfin.Api.Controllers
var segmentPath = GetSegmentPath(state, playlistPath, segmentId);
var segmentExtension = GetSegmentFileExtension(state.Request.SegmentContainer);
var segmentExtension = EncodingHelper.GetSegmentFileExtension(state.Request.SegmentContainer);
TranscodingJobDto? job;
@ -1286,7 +1534,7 @@ namespace Jellyfin.Api.Controllers
job = await _transcodingJobHelper.StartFfMpeg(
state,
playlistPath,
GetCommandLineArguments(playlistPath, state, true, segmentId),
GetCommandLineArguments(playlistPath, state, false, segmentId),
Request,
TranscodingJobType,
cancellationTokenSource).ConfigureAwait(false);
@ -1346,7 +1594,7 @@ namespace Jellyfin.Api.Controllers
return segments;
}
private string GetCommandLineArguments(string outputPath, StreamState state, bool isEncoding, int startNumber)
private string GetCommandLineArguments(string outputPath, StreamState state, bool isEventPlaylist, int startNumber)
{
var videoCodec = _encodingHelper.GetVideoEncoder(state, _encodingOptions);
var threads = EncodingHelper.GetNumberOfThreads(state, _encodingOptions, videoCodec);
@ -1361,15 +1609,13 @@ namespace Jellyfin.Api.Controllers
state.BaseRequest.BreakOnNonKeyFrames = false;
}
// If isEncoding is true we're actually starting ffmpeg
var startNumberParam = isEncoding ? startNumber.ToString(CultureInfo.InvariantCulture) : "0";
var inputModifier = _encodingHelper.GetInputModifier(state, _encodingOptions);
var mapArgs = state.IsOutputVideo ? _encodingHelper.GetMapArgs(state) : string.Empty;
var directory = Path.GetDirectoryName(outputPath) ?? throw new ArgumentException($"Provided path ({outputPath}) is not valid.", nameof(outputPath));
var outputFileNameWithoutExtension = Path.GetFileNameWithoutExtension(outputPath);
var outputPrefix = Path.Combine(directory, outputFileNameWithoutExtension);
var outputExtension = GetSegmentFileExtension(state.Request.SegmentContainer);
var outputExtension = EncodingHelper.GetSegmentFileExtension(state.Request.SegmentContainer);
var outputTsArg = outputPrefix + "%d" + outputExtension;
var segmentFormat = outputExtension.TrimStart('.');
@ -1398,19 +1644,30 @@ namespace Jellyfin.Api.Controllers
? _encodingOptions.MaxMuxingQueueSize.ToString(CultureInfo.InvariantCulture)
: "128";
var baseUrlParam = string.Empty;
if (isEventPlaylist)
{
baseUrlParam = string.Format(
CultureInfo.InvariantCulture,
" -hls_base_url \"hls/{0}/\"",
Path.GetFileNameWithoutExtension(outputPath));
}
return string.Format(
CultureInfo.InvariantCulture,
"{0} {1} -map_metadata -1 -map_chapters -1 -threads {2} {3} {4} {5} -copyts -avoid_negative_ts disabled -max_muxing_queue_size {6} -f hls -max_delay 5000000 -hls_time {7} -hls_segment_type {8} -start_number {9} -hls_segment_filename \"{10}\" -hls_playlist_type vod -hls_list_size 0 -y \"{11}\"",
"{0} {1} -map_metadata -1 -map_chapters -1 -threads {2} {3} {4} {5} -copyts -avoid_negative_ts disabled -max_muxing_queue_size {6} -f hls -max_delay 5000000 -hls_time {7} -hls_segment_type {8} -start_number {9}{10} -hls_segment_filename \"{12}\" -hls_playlist_type {11} -hls_list_size 0 -y \"{13}\"",
inputModifier,
_encodingHelper.GetInputArgument(state, _encodingOptions),
threads,
mapArgs,
GetVideoArguments(state, startNumber),
GetVideoArguments(state, startNumber, isEventPlaylist),
GetAudioArguments(state),
maxMuxingQueueSize,
state.SegmentLength.ToString(CultureInfo.InvariantCulture),
segmentFormat,
startNumberParam,
startNumber.ToString(CultureInfo.InvariantCulture),
baseUrlParam,
isEventPlaylist ? "event" : "vod",
outputTsArg,
outputPath).Trim();
}
@ -1505,8 +1762,9 @@ namespace Jellyfin.Api.Controllers
/// </summary>
/// <param name="state">The <see cref="StreamState"/>.</param>
/// <param name="startNumber">The first number in the hls sequence.</param>
/// <param name="isEventPlaylist">Whether the playlist is EVENT or VOD.</param>
/// <returns>The command line arguments for video transcoding.</returns>
private string GetVideoArguments(StreamState state, int startNumber)
private string GetVideoArguments(StreamState state, int startNumber, bool isEventPlaylist)
{
if (state.VideoStream == null)
{
@ -1539,6 +1797,7 @@ namespace Jellyfin.Api.Controllers
// See if we can save come cpu cycles by avoiding encoding.
if (EncodingHelper.IsCopyCodec(codec))
{
// If h264_mp4toannexb is ever added, do not use it for live tv.
if (state.VideoStream != null && !string.Equals(state.VideoStream.NalLengthSize, "0", StringComparison.OrdinalIgnoreCase))
{
string bitStreamArgs = EncodingHelper.GetBitStreamArgs(state.VideoStream);
@ -1549,15 +1808,13 @@ namespace Jellyfin.Api.Controllers
}
args += " -start_at_zero";
// args += " -flags -global_header";
}
else
{
args += _encodingHelper.GetVideoQualityParam(state, codec, _encodingOptions, DefaultEncoderPreset);
args += _encodingHelper.GetVideoQualityParam(state, codec, _encodingOptions, isEventPlaylist ? DefaultEventEncoderPreset : DefaultVodEncoderPreset);
// Set the key frame params for video encoding to match the hls segment time.
args += _encodingHelper.GetHlsVideoKeyFrameArguments(state, codec, state.SegmentLength, false, startNumber);
args += _encodingHelper.GetHlsVideoKeyFrameArguments(state, codec, state.SegmentLength, isEventPlaylist, startNumber);
// Currenly b-frames in libx265 breaks the FMP4-HLS playback on iOS, disable it for now.
if (string.Equals(codec, "libx265", StringComparison.OrdinalIgnoreCase))
@ -1567,27 +1824,25 @@ namespace Jellyfin.Api.Controllers
// args += " -mixed-refs 0 -refs 3 -x264opts b_pyramid=0:weightb=0:weightp=0";
var hasGraphicalSubs = state.SubtitleStream != null && !state.SubtitleStream.IsTextSubtitleStream && state.SubtitleDeliveryMethod == SubtitleDeliveryMethod.Encode;
if (hasGraphicalSubs)
{
// Graphical subs overlay and resolution params.
args += _encodingHelper.GetGraphicalSubtitleParam(state, _encodingOptions, codec);
}
else
{
// Resolution params.
args += _encodingHelper.GetOutputSizeParam(state, _encodingOptions, codec);
}
// video processing filters.
args += _encodingHelper.GetVideoProcessingFilterParam(state, _encodingOptions, codec);
// -start_at_zero is necessary to use with -ss when seeking,
// otherwise the target position cannot be determined.
if (!(state.SubtitleStream != null && state.SubtitleStream.IsExternal && !state.SubtitleStream.IsTextSubtitleStream))
if (state.SubtitleStream != null)
{
args += " -start_at_zero";
// Disable start_at_zero for external graphical subs
if (!(state.SubtitleStream.IsExternal && !state.SubtitleStream.IsTextSubtitleStream))
{
args += " -start_at_zero";
}
}
}
// args += " -flags -global_header";
// TODO why was this not enabled for VOD?
if (isEventPlaylist)
{
args += " -flags -global_header";
}
if (!string.IsNullOrEmpty(state.OutputVideoSync))
@ -1600,22 +1855,12 @@ namespace Jellyfin.Api.Controllers
return args;
}
private string GetSegmentFileExtension(string? segmentContainer)
{
if (!string.IsNullOrWhiteSpace(segmentContainer))
{
return "." + segmentContainer;
}
return ".ts";
}
private string GetSegmentPath(StreamState state, string playlist, int index)
{
var folder = Path.GetDirectoryName(playlist) ?? throw new ArgumentException($"Provided path ({playlist}) is not valid.", nameof(playlist));
var filename = Path.GetFileNameWithoutExtension(playlist);
return Path.Combine(folder, filename + index.ToString(CultureInfo.InvariantCulture) + GetSegmentFileExtension(state.Request.SegmentContainer));
return Path.Combine(folder, filename + index.ToString(CultureInfo.InvariantCulture) + EncodingHelper.GetSegmentFileExtension(state.Request.SegmentContainer));
}
private async Task<ActionResult> GetSegmentResult(

View file

@ -1,586 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Globalization;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using Jellyfin.Api.Attributes;
using Jellyfin.Api.Constants;
using Jellyfin.Api.Helpers;
using Jellyfin.Api.Models.PlaybackDtos;
using Jellyfin.Api.Models.StreamingDtos;
using MediaBrowser.Common.Configuration;
using MediaBrowser.Controller.Configuration;
using MediaBrowser.Controller.Devices;
using MediaBrowser.Controller.Dlna;
using MediaBrowser.Controller.Library;
using MediaBrowser.Controller.MediaEncoding;
using MediaBrowser.Controller.Net;
using MediaBrowser.Model.Configuration;
using MediaBrowser.Model.Dlna;
using MediaBrowser.Model.Net;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Logging;
namespace Jellyfin.Api.Controllers
{
/// <summary>
/// The video hls controller.
/// </summary>
[Route("")]
[Authorize(Policy = Policies.DefaultAuthorization)]
public class VideoHlsController : BaseJellyfinApiController
{
private const string DefaultEncoderPreset = "superfast";
private const TranscodingJobType TranscodingJobType = MediaBrowser.Controller.MediaEncoding.TranscodingJobType.Hls;
private readonly EncodingHelper _encodingHelper;
private readonly IDlnaManager _dlnaManager;
private readonly IAuthorizationContext _authContext;
private readonly IUserManager _userManager;
private readonly ILibraryManager _libraryManager;
private readonly IMediaSourceManager _mediaSourceManager;
private readonly IServerConfigurationManager _serverConfigurationManager;
private readonly IMediaEncoder _mediaEncoder;
private readonly IDeviceManager _deviceManager;
private readonly TranscodingJobHelper _transcodingJobHelper;
private readonly ILogger<VideoHlsController> _logger;
private readonly EncodingOptions _encodingOptions;
/// <summary>
/// Initializes a new instance of the <see cref="VideoHlsController"/> class.
/// </summary>
/// <param name="mediaEncoder">Instance of the <see cref="IMediaEncoder"/> interface.</param>
/// <param name="dlnaManager">Instance of the <see cref="IDlnaManager"/> interface.</param>
/// <param name="userManger">Instance of the <see cref="IUserManager"/> interface.</param>
/// <param name="authorizationContext">Instance of the <see cref="IAuthorizationContext"/> interface.</param>
/// <param name="libraryManager">Instance of the <see cref="ILibraryManager"/> interface.</param>
/// <param name="mediaSourceManager">Instance of the <see cref="IMediaSourceManager"/> interface.</param>
/// <param name="serverConfigurationManager">Instance of the <see cref="IServerConfigurationManager"/> interface.</param>
/// <param name="deviceManager">Instance of the <see cref="IDeviceManager"/> interface.</param>
/// <param name="transcodingJobHelper">The <see cref="TranscodingJobHelper"/> singleton.</param>
/// <param name="logger">Instance of the <see cref="ILogger{VideoHlsController}"/>.</param>
/// <param name="encodingHelper">Instance of <see cref="EncodingHelper"/>.</param>
public VideoHlsController(
IMediaEncoder mediaEncoder,
IDlnaManager dlnaManager,
IUserManager userManger,
IAuthorizationContext authorizationContext,
ILibraryManager libraryManager,
IMediaSourceManager mediaSourceManager,
IServerConfigurationManager serverConfigurationManager,
IDeviceManager deviceManager,
TranscodingJobHelper transcodingJobHelper,
ILogger<VideoHlsController> logger,
EncodingHelper encodingHelper)
{
_dlnaManager = dlnaManager;
_authContext = authorizationContext;
_userManager = userManger;
_libraryManager = libraryManager;
_mediaSourceManager = mediaSourceManager;
_serverConfigurationManager = serverConfigurationManager;
_mediaEncoder = mediaEncoder;
_deviceManager = deviceManager;
_transcodingJobHelper = transcodingJobHelper;
_logger = logger;
_encodingHelper = encodingHelper;
_encodingOptions = serverConfigurationManager.GetEncodingOptions();
}
/// <summary>
/// Gets a hls live stream.
/// </summary>
/// <param name="itemId">The item id.</param>
/// <param name="container">The audio container.</param>
/// <param name="static">Optional. If true, the original file will be streamed statically without any encoding. Use either no url extension or the original file extension. true/false.</param>
/// <param name="params">The streaming parameters.</param>
/// <param name="tag">The tag.</param>
/// <param name="deviceProfileId">Optional. The dlna device profile id to utilize.</param>
/// <param name="playSessionId">The play session id.</param>
/// <param name="segmentContainer">The segment container.</param>
/// <param name="segmentLength">The segment lenght.</param>
/// <param name="minSegments">The minimum number of segments.</param>
/// <param name="mediaSourceId">The media version id, if playing an alternate version.</param>
/// <param name="deviceId">The device id of the client requesting. Used to stop encoding processes when needed.</param>
/// <param name="audioCodec">Optional. Specify a audio codec to encode to, e.g. mp3. If omitted the server will auto-select using the url's extension. Options: aac, mp3, vorbis, wma.</param>
/// <param name="enableAutoStreamCopy">Whether or not to allow automatic stream copy if requested values match the original source. Defaults to true.</param>
/// <param name="allowVideoStreamCopy">Whether or not to allow copying of the video stream url.</param>
/// <param name="allowAudioStreamCopy">Whether or not to allow copying of the audio stream url.</param>
/// <param name="breakOnNonKeyFrames">Optional. Whether to break on non key frames.</param>
/// <param name="audioSampleRate">Optional. Specify a specific audio sample rate, e.g. 44100.</param>
/// <param name="maxAudioBitDepth">Optional. The maximum audio bit depth.</param>
/// <param name="audioBitRate">Optional. Specify an audio bitrate to encode to, e.g. 128000. If omitted this will be left to encoder defaults.</param>
/// <param name="audioChannels">Optional. Specify a specific number of audio channels to encode to, e.g. 2.</param>
/// <param name="maxAudioChannels">Optional. Specify a maximum number of audio channels to encode to, e.g. 2.</param>
/// <param name="profile">Optional. Specify a specific an encoder profile (varies by encoder), e.g. main, baseline, high.</param>
/// <param name="level">Optional. Specify a level for the encoder profile (varies by encoder), e.g. 3, 3.1.</param>
/// <param name="framerate">Optional. A specific video framerate to encode to, e.g. 23.976. Generally this should be omitted unless the device has specific requirements.</param>
/// <param name="maxFramerate">Optional. A specific maximum video framerate to encode to, e.g. 23.976. Generally this should be omitted unless the device has specific requirements.</param>
/// <param name="copyTimestamps">Whether or not to copy timestamps when transcoding with an offset. Defaults to false.</param>
/// <param name="startTimeTicks">Optional. Specify a starting offset, in ticks. 1 tick = 10000 ms.</param>
/// <param name="width">Optional. The fixed horizontal resolution of the encoded video.</param>
/// <param name="height">Optional. The fixed vertical resolution of the encoded video.</param>
/// <param name="videoBitRate">Optional. Specify a video bitrate to encode to, e.g. 500000. If omitted this will be left to encoder defaults.</param>
/// <param name="subtitleStreamIndex">Optional. The index of the subtitle stream to use. If omitted no subtitles will be used.</param>
/// <param name="subtitleMethod">Optional. Specify the subtitle delivery method.</param>
/// <param name="maxRefFrames">Optional.</param>
/// <param name="maxVideoBitDepth">Optional. The maximum video bit depth.</param>
/// <param name="requireAvc">Optional. Whether to require avc.</param>
/// <param name="deInterlace">Optional. Whether to deinterlace the video.</param>
/// <param name="requireNonAnamorphic">Optional. Whether to require a non anamorphic stream.</param>
/// <param name="transcodingMaxAudioChannels">Optional. The maximum number of audio channels to transcode.</param>
/// <param name="cpuCoreLimit">Optional. The limit of how many cpu cores to use.</param>
/// <param name="liveStreamId">The live stream id.</param>
/// <param name="enableMpegtsM2TsMode">Optional. Whether to enable the MpegtsM2Ts mode.</param>
/// <param name="videoCodec">Optional. Specify a video codec to encode to, e.g. h264. If omitted the server will auto-select using the url's extension. Options: h265, h264, mpeg4, theora, vp8, vp9, vpx (deprecated), wmv.</param>
/// <param name="subtitleCodec">Optional. Specify a subtitle codec to encode to.</param>
/// <param name="transcodeReasons">Optional. The transcoding reason.</param>
/// <param name="audioStreamIndex">Optional. The index of the audio stream to use. If omitted the first audio stream will be used.</param>
/// <param name="videoStreamIndex">Optional. The index of the video stream to use. If omitted the first video stream will be used.</param>
/// <param name="context">Optional. The <see cref="EncodingContext"/>.</param>
/// <param name="streamOptions">Optional. The streaming options.</param>
/// <param name="maxWidth">Optional. The max width.</param>
/// <param name="maxHeight">Optional. The max height.</param>
/// <param name="enableSubtitlesInManifest">Optional. Whether to enable subtitles in the manifest.</param>
/// <response code="200">Hls live stream retrieved.</response>
/// <returns>A <see cref="FileResult"/> containing the hls file.</returns>
[HttpGet("Videos/{itemId}/live.m3u8")]
[ProducesResponseType(StatusCodes.Status200OK)]
[ProducesPlaylistFile]
public async Task<ActionResult> GetLiveHlsStream(
[FromRoute, Required] Guid itemId,
[FromQuery] string? container,
[FromQuery] bool? @static,
[FromQuery] string? @params,
[FromQuery] string? tag,
[FromQuery] string? deviceProfileId,
[FromQuery] string? playSessionId,
[FromQuery] string? segmentContainer,
[FromQuery] int? segmentLength,
[FromQuery] int? minSegments,
[FromQuery] string? mediaSourceId,
[FromQuery] string? deviceId,
[FromQuery] string? audioCodec,
[FromQuery] bool? enableAutoStreamCopy,
[FromQuery] bool? allowVideoStreamCopy,
[FromQuery] bool? allowAudioStreamCopy,
[FromQuery] bool? breakOnNonKeyFrames,
[FromQuery] int? audioSampleRate,
[FromQuery] int? maxAudioBitDepth,
[FromQuery] int? audioBitRate,
[FromQuery] int? audioChannels,
[FromQuery] int? maxAudioChannels,
[FromQuery] string? profile,
[FromQuery] string? level,
[FromQuery] float? framerate,
[FromQuery] float? maxFramerate,
[FromQuery] bool? copyTimestamps,
[FromQuery] long? startTimeTicks,
[FromQuery] int? width,
[FromQuery] int? height,
[FromQuery] int? videoBitRate,
[FromQuery] int? subtitleStreamIndex,
[FromQuery] SubtitleDeliveryMethod? subtitleMethod,
[FromQuery] int? maxRefFrames,
[FromQuery] int? maxVideoBitDepth,
[FromQuery] bool? requireAvc,
[FromQuery] bool? deInterlace,
[FromQuery] bool? requireNonAnamorphic,
[FromQuery] int? transcodingMaxAudioChannels,
[FromQuery] int? cpuCoreLimit,
[FromQuery] string? liveStreamId,
[FromQuery] bool? enableMpegtsM2TsMode,
[FromQuery] string? videoCodec,
[FromQuery] string? subtitleCodec,
[FromQuery] string? transcodeReasons,
[FromQuery] int? audioStreamIndex,
[FromQuery] int? videoStreamIndex,
[FromQuery] EncodingContext? context,
[FromQuery] Dictionary<string, string> streamOptions,
[FromQuery] int? maxWidth,
[FromQuery] int? maxHeight,
[FromQuery] bool? enableSubtitlesInManifest)
{
VideoRequestDto streamingRequest = new VideoRequestDto
{
Id = itemId,
Container = container,
Static = @static ?? false,
Params = @params,
Tag = tag,
DeviceProfileId = deviceProfileId,
PlaySessionId = playSessionId,
SegmentContainer = segmentContainer,
SegmentLength = segmentLength,
MinSegments = minSegments,
MediaSourceId = mediaSourceId,
DeviceId = deviceId,
AudioCodec = audioCodec,
EnableAutoStreamCopy = enableAutoStreamCopy ?? true,
AllowAudioStreamCopy = allowAudioStreamCopy ?? true,
AllowVideoStreamCopy = allowVideoStreamCopy ?? true,
BreakOnNonKeyFrames = breakOnNonKeyFrames ?? false,
AudioSampleRate = audioSampleRate,
MaxAudioChannels = maxAudioChannels,
AudioBitRate = audioBitRate,
MaxAudioBitDepth = maxAudioBitDepth,
AudioChannels = audioChannels,
Profile = profile,
Level = level,
Framerate = framerate,
MaxFramerate = maxFramerate,
CopyTimestamps = copyTimestamps ?? false,
StartTimeTicks = startTimeTicks,
Width = width,
Height = height,
VideoBitRate = videoBitRate,
SubtitleStreamIndex = subtitleStreamIndex,
SubtitleMethod = subtitleMethod ?? SubtitleDeliveryMethod.Encode,
MaxRefFrames = maxRefFrames,
MaxVideoBitDepth = maxVideoBitDepth,
RequireAvc = requireAvc ?? false,
DeInterlace = deInterlace ?? false,
RequireNonAnamorphic = requireNonAnamorphic ?? false,
TranscodingMaxAudioChannels = transcodingMaxAudioChannels,
CpuCoreLimit = cpuCoreLimit,
LiveStreamId = liveStreamId,
EnableMpegtsM2TsMode = enableMpegtsM2TsMode ?? false,
VideoCodec = videoCodec,
SubtitleCodec = subtitleCodec,
TranscodeReasons = transcodeReasons,
AudioStreamIndex = audioStreamIndex,
VideoStreamIndex = videoStreamIndex,
Context = context ?? EncodingContext.Streaming,
StreamOptions = streamOptions,
MaxHeight = maxHeight,
MaxWidth = maxWidth,
EnableSubtitlesInManifest = enableSubtitlesInManifest ?? true
};
// CTS lifecycle is managed internally.
var cancellationTokenSource = new CancellationTokenSource();
// Due to CTS.Token calling ThrowIfDisposed (https://github.com/dotnet/runtime/issues/29970) we have to "cache" the token
// since it gets disposed when ffmpeg exits
var cancellationToken = cancellationTokenSource.Token;
using var state = await StreamingHelpers.GetStreamingState(
streamingRequest,
Request,
_authContext,
_mediaSourceManager,
_userManager,
_libraryManager,
_serverConfigurationManager,
_mediaEncoder,
_encodingHelper,
_dlnaManager,
_deviceManager,
_transcodingJobHelper,
TranscodingJobType,
cancellationToken)
.ConfigureAwait(false);
TranscodingJobDto? job = null;
var playlistPath = Path.ChangeExtension(state.OutputFilePath, ".m3u8");
if (!System.IO.File.Exists(playlistPath))
{
var transcodingLock = _transcodingJobHelper.GetTranscodingLock(playlistPath);
await transcodingLock.WaitAsync(cancellationToken).ConfigureAwait(false);
try
{
if (!System.IO.File.Exists(playlistPath))
{
// If the playlist doesn't already exist, startup ffmpeg
try
{
job = await _transcodingJobHelper.StartFfMpeg(
state,
playlistPath,
GetCommandLineArguments(playlistPath, state),
Request,
TranscodingJobType,
cancellationTokenSource)
.ConfigureAwait(false);
job.IsLiveOutput = true;
}
catch
{
state.Dispose();
throw;
}
minSegments = state.MinSegments;
if (minSegments > 0)
{
await HlsHelpers.WaitForMinimumSegmentCount(playlistPath, minSegments, _logger, cancellationToken).ConfigureAwait(false);
}
}
}
finally
{
transcodingLock.Release();
}
}
job ??= _transcodingJobHelper.OnTranscodeBeginRequest(playlistPath, TranscodingJobType);
if (job != null)
{
_transcodingJobHelper.OnTranscodeEndRequest(job);
}
var playlistText = HlsHelpers.GetLivePlaylistText(playlistPath, state);
return Content(playlistText, MimeTypes.GetMimeType("playlist.m3u8"));
}
/// <summary>
/// Gets the command line arguments for ffmpeg.
/// </summary>
/// <param name="outputPath">The output path of the file.</param>
/// <param name="state">The <see cref="StreamState"/>.</param>
/// <returns>The command line arguments as a string.</returns>
private string GetCommandLineArguments(string outputPath, StreamState state)
{
var videoCodec = _encodingHelper.GetVideoEncoder(state, _encodingOptions);
var threads = EncodingHelper.GetNumberOfThreads(state, _encodingOptions, videoCodec); // GetNumberOfThreads is static.
var inputModifier = _encodingHelper.GetInputModifier(state, _encodingOptions);
var mapArgs = state.IsOutputVideo ? _encodingHelper.GetMapArgs(state) : string.Empty;
var directory = Path.GetDirectoryName(outputPath) ?? throw new ArgumentException($"Provided path ({outputPath}) is not valid.", nameof(outputPath));
var outputFileNameWithoutExtension = Path.GetFileNameWithoutExtension(outputPath);
var outputPrefix = Path.Combine(directory, outputFileNameWithoutExtension);
var outputExtension = EncodingHelper.GetSegmentFileExtension(state.Request.SegmentContainer);
var outputTsArg = outputPrefix + "%d" + outputExtension;
var segmentFormat = outputExtension.TrimStart('.');
if (string.Equals(segmentFormat, "ts", StringComparison.OrdinalIgnoreCase))
{
segmentFormat = "mpegts";
}
else if (string.Equals(segmentFormat, "mp4", StringComparison.OrdinalIgnoreCase))
{
string outputFmp4HeaderArg;
if (OperatingSystem.IsWindows())
{
// on Windows, the path of fmp4 header file needs to be configured
outputFmp4HeaderArg = " -hls_fmp4_init_filename \"" + outputPrefix + "-1" + outputExtension + "\"";
}
else
{
// on Linux/Unix, ffmpeg generate fmp4 header file to m3u8 output folder
outputFmp4HeaderArg = " -hls_fmp4_init_filename \"" + outputFileNameWithoutExtension + "-1" + outputExtension + "\"";
}
segmentFormat = "fmp4" + outputFmp4HeaderArg;
}
else
{
_logger.LogError("Invalid HLS segment container: {SegmentFormat}", segmentFormat);
}
var maxMuxingQueueSize = _encodingOptions.MaxMuxingQueueSize > 128
? _encodingOptions.MaxMuxingQueueSize.ToString(CultureInfo.InvariantCulture)
: "128";
var baseUrlParam = string.Format(
CultureInfo.InvariantCulture,
"\"hls/{0}/\"",
Path.GetFileNameWithoutExtension(outputPath));
return string.Format(
CultureInfo.InvariantCulture,
"{0} {1} -map_metadata -1 -map_chapters -1 -threads {2} {3} {4} {5} -copyts -avoid_negative_ts disabled -max_muxing_queue_size {6} -f hls -max_delay 5000000 -hls_time {7} -hls_segment_type {8} -start_number 0 -hls_base_url {9} -hls_playlist_type event -hls_segment_filename \"{10}\" -y \"{11}\"",
inputModifier,
_encodingHelper.GetInputArgument(state, _encodingOptions),
threads,
mapArgs,
GetVideoArguments(state),
GetAudioArguments(state),
maxMuxingQueueSize,
state.SegmentLength.ToString(CultureInfo.InvariantCulture),
segmentFormat,
baseUrlParam,
outputTsArg,
outputPath).Trim();
}
/// <summary>
/// Gets the audio arguments for transcoding.
/// </summary>
/// <param name="state">The <see cref="StreamState"/>.</param>
/// <returns>The command line arguments for audio transcoding.</returns>
private string GetAudioArguments(StreamState state)
{
if (state.AudioStream == null)
{
return string.Empty;
}
var audioCodec = _encodingHelper.GetAudioEncoder(state);
if (!state.IsOutputVideo)
{
if (EncodingHelper.IsCopyCodec(audioCodec))
{
var bitStreamArgs = EncodingHelper.GetAudioBitStreamArguments(state, state.Request.SegmentContainer, state.MediaSource.Container);
return "-acodec copy -strict -2" + bitStreamArgs;
}
var audioTranscodeParams = string.Empty;
audioTranscodeParams += "-acodec " + audioCodec;
if (state.OutputAudioBitrate.HasValue)
{
audioTranscodeParams += " -ab " + state.OutputAudioBitrate.Value.ToString(CultureInfo.InvariantCulture);
}
if (state.OutputAudioChannels.HasValue)
{
audioTranscodeParams += " -ac " + state.OutputAudioChannels.Value.ToString(CultureInfo.InvariantCulture);
}
if (state.OutputAudioSampleRate.HasValue)
{
audioTranscodeParams += " -ar " + state.OutputAudioSampleRate.Value.ToString(CultureInfo.InvariantCulture);
}
audioTranscodeParams += " -vn";
return audioTranscodeParams;
}
if (EncodingHelper.IsCopyCodec(audioCodec))
{
var bitStreamArgs = EncodingHelper.GetAudioBitStreamArguments(state, state.Request.SegmentContainer, state.MediaSource.Container);
return "-acodec copy -strict -2" + bitStreamArgs;
}
var args = "-codec:a:0 " + audioCodec;
var channels = state.OutputAudioChannels;
if (channels.HasValue)
{
args += " -ac " + channels.Value;
}
var bitrate = state.OutputAudioBitrate;
if (bitrate.HasValue)
{
args += " -ab " + bitrate.Value.ToString(CultureInfo.InvariantCulture);
}
if (state.OutputAudioSampleRate.HasValue)
{
args += " -ar " + state.OutputAudioSampleRate.Value.ToString(CultureInfo.InvariantCulture);
}
args += _encodingHelper.GetAudioFilterParam(state, _encodingOptions);
return args;
}
/// <summary>
/// Gets the video arguments for transcoding.
/// </summary>
/// <param name="state">The <see cref="StreamState"/>.</param>
/// <returns>The command line arguments for video transcoding.</returns>
private string GetVideoArguments(StreamState state)
{
if (state.VideoStream == null)
{
return string.Empty;
}
if (!state.IsOutputVideo)
{
return string.Empty;
}
var codec = _encodingHelper.GetVideoEncoder(state, _encodingOptions);
var args = "-codec:v:0 " + codec;
// Prefer hvc1 to hev1.
if (string.Equals(state.ActualOutputVideoCodec, "h265", StringComparison.OrdinalIgnoreCase)
|| string.Equals(state.ActualOutputVideoCodec, "hevc", StringComparison.OrdinalIgnoreCase)
|| string.Equals(codec, "h265", StringComparison.OrdinalIgnoreCase)
|| string.Equals(codec, "hevc", StringComparison.OrdinalIgnoreCase))
{
args += " -tag:v:0 hvc1";
}
// if (state.EnableMpegtsM2TsMode)
// {
// args += " -mpegts_m2ts_mode 1";
// }
// See if we can save come cpu cycles by avoiding encoding.
if (EncodingHelper.IsCopyCodec(codec))
{
// If h264_mp4toannexb is ever added, do not use it for live tv.
if (state.VideoStream != null && !string.Equals(state.VideoStream.NalLengthSize, "0", StringComparison.OrdinalIgnoreCase))
{
string bitStreamArgs = EncodingHelper.GetBitStreamArgs(state.VideoStream);
if (!string.IsNullOrEmpty(bitStreamArgs))
{
args += " " + bitStreamArgs;
}
}
args += " -start_at_zero";
}
else
{
args += _encodingHelper.GetVideoQualityParam(state, codec, _encodingOptions, DefaultEncoderPreset);
// Set the key frame params for video encoding to match the hls segment time.
args += _encodingHelper.GetHlsVideoKeyFrameArguments(state, codec, state.SegmentLength, true, null);
// Currenly b-frames in libx265 breaks the FMP4-HLS playback on iOS, disable it for now.
if (string.Equals(codec, "libx265", StringComparison.OrdinalIgnoreCase))
{
args += " -bf 0";
}
var hasGraphicalSubs = state.SubtitleStream != null && !state.SubtitleStream.IsTextSubtitleStream && state.SubtitleDeliveryMethod == SubtitleDeliveryMethod.Encode;
if (hasGraphicalSubs)
{
// Graphical subs overlay and resolution params.
args += _encodingHelper.GetGraphicalSubtitleParam(state, _encodingOptions, codec);
}
else
{
// Resolution params.
args += _encodingHelper.GetOutputSizeParam(state, _encodingOptions, codec);
}
if (state.SubtitleStream == null || !state.SubtitleStream.IsExternal || state.SubtitleStream.IsTextSubtitleStream)
{
args += " -start_at_zero";
}
}
args += " -flags -global_header";
if (!string.IsNullOrEmpty(state.OutputVideoSync))
{
args += " -vsync " + state.OutputVideoSync;
}
args += _encodingHelper.GetOutputFFlags(state);
return args;
}
}
}

File diff suppressed because it is too large Load diff

View file

@ -110,23 +110,7 @@ namespace MediaBrowser.Controller.MediaEncoding
public string OutputContainer { get; set; }
public string OutputVideoSync
{
get
{
// For live tv + in progress recordings
if (string.Equals(InputContainer, "mpegts", StringComparison.OrdinalIgnoreCase)
|| string.Equals(InputContainer, "ts", StringComparison.OrdinalIgnoreCase))
{
if (!MediaSource.RunTimeTicks.HasValue)
{
return "cfr";
}
}
return "-1";
}
}
public string OutputVideoSync { get; set; }
public string AlbumCoverPath { get; set; }

View file

@ -18,6 +18,16 @@ namespace MediaBrowser.Controller.MediaEncoding
/// <summary>
/// The tonemap_opencl_bt2390.
/// </summary>
TonemapOpenclBt2390 = 2
TonemapOpenclBt2390 = 2,
/// <summary>
/// The overlay_opencl_framesync.
/// </summary>
OverlayOpenclFrameSync = 3,
/// <summary>
/// The overlay_vaapi_framesync.
/// </summary>
OverlayVaapiFrameSync = 4
}
}

View file

@ -25,6 +25,30 @@ namespace MediaBrowser.Controller.MediaEncoding
/// <value>The encoder path.</value>
string EncoderPath { get; }
/// <summary>
/// Gets the version of encoder.
/// </summary>
/// <returns>The version of encoder.</returns>
Version EncoderVersion { get; }
/// <summary>
/// Whether the configured Vaapi device is from AMD(radeonsi/r600 Mesa driver).
/// </summary>
/// <value><c>true</c> if the Vaapi device is an AMD(radeonsi/r600 Mesa driver) GPU, <c>false</c> otherwise.</value>
bool IsVaapiDeviceAmd { get; }
/// <summary>
/// Whether the configured Vaapi device is from Intel(iHD driver).
/// </summary>
/// <value><c>true</c> if the Vaapi device is an Intel(iHD driver) GPU, <c>false</c> otherwise.</value>
bool IsVaapiDeviceInteliHD { get; }
/// <summary>
/// Whether the configured Vaapi device is from Intel(legacy i965 driver).
/// </summary>
/// <value><c>true</c> if the Vaapi device is an Intel(legacy i965 driver) GPU, <c>false</c> otherwise.</value>
bool IsVaapiDeviceInteli965 { get; }
/// <summary>
/// Whether given encoder codec is supported.
/// </summary>
@ -60,12 +84,6 @@ namespace MediaBrowser.Controller.MediaEncoding
/// <returns><c>true</c> if the filter is supported, <c>false</c> otherwise.</returns>
bool SupportsFilterWithOption(FilterOptionType option);
/// <summary>
/// Get the version of media encoder.
/// </summary>
/// <returns>The version of media encoder.</returns>
Version GetMediaEncoderVersion();
/// <summary>
/// Extracts the audio image.
/// </summary>

View file

@ -16,6 +16,12 @@ namespace MediaBrowser.MediaEncoding.Encoder
{
"h264",
"hevc",
"vp8",
"libvpx",
"vp9",
"libvpx-vp9",
"av1",
"libdav1d",
"mpeg2video",
"mpeg4",
"msmpeg4",
@ -30,6 +36,7 @@ namespace MediaBrowser.MediaEncoding.Encoder
"vc1_qsv",
"vp8_qsv",
"vp9_qsv",
"av1_qsv",
"h264_cuvid",
"hevc_cuvid",
"mpeg2_cuvid",
@ -37,16 +44,11 @@ namespace MediaBrowser.MediaEncoding.Encoder
"mpeg4_cuvid",
"vp8_cuvid",
"vp9_cuvid",
"av1_cuvid",
"h264_mmal",
"mpeg2_mmal",
"mpeg4_mmal",
"vc1_mmal",
"h264_mediacodec",
"hevc_mediacodec",
"mpeg2_mediacodec",
"mpeg4_mediacodec",
"vp8_mediacodec",
"vp9_mediacodec",
"h264_opencl",
"hevc_opencl",
"mpeg2_opencl",
@ -89,20 +91,39 @@ namespace MediaBrowser.MediaEncoding.Encoder
private static readonly string[] _requiredFilters = new[]
{
// sw
"alphasrc",
"zscale",
// qsv
"scale_qsv",
"vpp_qsv",
"deinterlace_qsv",
"overlay_qsv",
// cuda
"scale_cuda",
"yadif_cuda",
"hwupload_cuda",
"overlay_cuda",
"tonemap_cuda",
"overlay_cuda",
"hwupload_cuda",
// opencl
"scale_opencl",
"tonemap_opencl",
"overlay_opencl",
// vaapi
"scale_vaapi",
"deinterlace_vaapi",
"tonemap_vaapi",
"overlay_vaapi",
"hwupload_vaapi"
};
private static readonly IReadOnlyDictionary<int, string[]> _filterOptionsDict = new Dictionary<int, string[]>
{
{ 0, new string[] { "scale_cuda", "Output format (default \"same\")" } },
{ 1, new string[] { "tonemap_cuda", "GPU accelerated HDR to SDR tonemapping" } },
{ 2, new string[] { "tonemap_opencl", "bt2390" } }
{ 2, new string[] { "tonemap_opencl", "bt2390" } },
{ 3, new string[] { "overlay_opencl", "Action to take when encountering EOF from secondary input" } },
{ 4, new string[] { "overlay_vaapi", "Action to take when encountering EOF from secondary input" } }
};
// These are the library versions that corresponds to our minimum ffmpeg version 4.x according to the version table below
@ -144,7 +165,7 @@ namespace MediaBrowser.MediaEncoding.Encoder
string output;
try
{
output = GetProcessOutput(_encoderPath, "-version");
output = GetProcessOutput(_encoderPath, "-version", false);
}
catch (Exception ex)
{
@ -225,7 +246,7 @@ namespace MediaBrowser.MediaEncoding.Encoder
string output;
try
{
output = GetProcessOutput(_encoderPath, "-version");
output = GetProcessOutput(_encoderPath, "-version", false);
}
catch (Exception ex)
{
@ -318,12 +339,36 @@ namespace MediaBrowser.MediaEncoding.Encoder
return map;
}
public bool CheckVaapiDeviceByDriverName(string driverName, string renderNodePath)
{
if (!OperatingSystem.IsLinux())
{
return false;
}
if (string.IsNullOrEmpty(driverName) || string.IsNullOrEmpty(renderNodePath))
{
return false;
}
try
{
var output = GetProcessOutput(_encoderPath, "-v verbose -hide_banner -init_hw_device vaapi=va:" + renderNodePath, true);
return output.Contains(driverName, StringComparison.Ordinal);
}
catch (Exception ex)
{
_logger.LogError(ex, "Error detecting the given vaapi render node path");
return false;
}
}
private IEnumerable<string> GetHwaccelTypes()
{
string? output = null;
try
{
output = GetProcessOutput(_encoderPath, "-hwaccels");
output = GetProcessOutput(_encoderPath, "-hwaccels", false);
}
catch (Exception ex)
{
@ -351,7 +396,7 @@ namespace MediaBrowser.MediaEncoding.Encoder
string output;
try
{
output = GetProcessOutput(_encoderPath, "-h filter=" + filter);
output = GetProcessOutput(_encoderPath, "-h filter=" + filter, false);
}
catch (Exception ex)
{
@ -375,7 +420,7 @@ namespace MediaBrowser.MediaEncoding.Encoder
string output;
try
{
output = GetProcessOutput(_encoderPath, "-" + codecstr);
output = GetProcessOutput(_encoderPath, "-" + codecstr, false);
}
catch (Exception ex)
{
@ -406,7 +451,7 @@ namespace MediaBrowser.MediaEncoding.Encoder
string output;
try
{
output = GetProcessOutput(_encoderPath, "-filters");
output = GetProcessOutput(_encoderPath, "-filters", false);
}
catch (Exception ex)
{
@ -444,7 +489,7 @@ namespace MediaBrowser.MediaEncoding.Encoder
return dict;
}
private string GetProcessOutput(string path, string arguments)
private string GetProcessOutput(string path, string arguments, bool readStdErr)
{
using (var process = new Process()
{
@ -455,7 +500,6 @@ namespace MediaBrowser.MediaEncoding.Encoder
WindowStyle = ProcessWindowStyle.Hidden,
ErrorDialog = false,
RedirectStandardOutput = true,
// ffmpeg uses stderr to log info, don't show this
RedirectStandardError = true
}
})
@ -464,7 +508,7 @@ namespace MediaBrowser.MediaEncoding.Encoder
process.Start();
return process.StandardOutput.ReadToEnd();
return readStdErr ? process.StandardError.ReadToEnd() : process.StandardOutput.ReadToEnd();
}
}
}

View file

@ -65,6 +65,10 @@ namespace MediaBrowser.MediaEncoding.Encoder
private List<string> _filters = new List<string>();
private IDictionary<int, bool> _filtersWithOption = new Dictionary<int, bool>();
private bool _isVaapiDeviceAmd = false;
private bool _isVaapiDeviceInteliHD = false;
private bool _isVaapiDeviceInteli965 = false;
private Version _ffmpegVersion = null;
private string _ffmpegPath = string.Empty;
private string _ffprobePath;
@ -87,6 +91,10 @@ namespace MediaBrowser.MediaEncoding.Encoder
/// <inheritdoc />
public string EncoderPath => _ffmpegPath;
public Version EncoderVersion => _ffmpegVersion;
public bool IsVaapiDeviceAmd => _isVaapiDeviceAmd;
public bool IsVaapiDeviceInteliHD => _isVaapiDeviceInteliHD;
public bool IsVaapiDeviceInteli965 => _isVaapiDeviceInteli965;
/// <summary>
/// Run at startup or if the user removes a Custom path from transcode page.
@ -114,9 +122,9 @@ namespace MediaBrowser.MediaEncoding.Encoder
}
// Write the FFmpeg path to the config/encoding.xml file as <EncoderAppPathDisplay> so it appears in UI
var config = _configurationManager.GetEncodingOptions();
config.EncoderAppPathDisplay = _ffmpegPath ?? string.Empty;
_configurationManager.SaveConfiguration("encoding", config);
var options = _configurationManager.GetEncodingOptions();
options.EncoderAppPathDisplay = _ffmpegPath ?? string.Empty;
_configurationManager.SaveConfiguration("encoding", options);
// Only if mpeg path is set, try and set path to probe
if (_ffmpegPath != null)
@ -134,7 +142,30 @@ namespace MediaBrowser.MediaEncoding.Encoder
SetAvailableHwaccels(validator.GetHwaccels());
SetMediaEncoderVersion(validator);
_threads = EncodingHelper.GetNumberOfThreads(null, _configurationManager.GetEncodingOptions(), null);
_threads = EncodingHelper.GetNumberOfThreads(null, options, null);
// Check the Vaapi device vendor
if (OperatingSystem.IsLinux()
&& SupportsHwaccel("vaapi")
&& !string.IsNullOrEmpty(options.VaapiDevice)
&& string.Equals(options.HardwareAccelerationType, "vaapi", StringComparison.OrdinalIgnoreCase))
{
_isVaapiDeviceAmd = validator.CheckVaapiDeviceByDriverName("Mesa Gallium driver", options.VaapiDevice);
_isVaapiDeviceInteliHD = validator.CheckVaapiDeviceByDriverName("Intel iHD driver", options.VaapiDevice);
_isVaapiDeviceInteli965 = validator.CheckVaapiDeviceByDriverName("Intel i965 driver", options.VaapiDevice);
if (_isVaapiDeviceAmd)
{
_logger.LogInformation("VAAPI device {RenderNodePath} is AMD GPU", options.VaapiDevice);
}
else if (_isVaapiDeviceInteliHD)
{
_logger.LogInformation("VAAPI device {RenderNodePath} is Intel GPU (iHD)", options.VaapiDevice);
}
else if (_isVaapiDeviceInteli965)
{
_logger.LogInformation("VAAPI device {RenderNodePath} is Intel GPU (i965)", options.VaapiDevice);
}
}
}
_logger.LogInformation("FFmpeg: {FfmpegPath}", _ffmpegPath ?? string.Empty);
@ -301,11 +332,6 @@ namespace MediaBrowser.MediaEncoding.Encoder
return false;
}
public Version GetMediaEncoderVersion()
{
return _ffmpegVersion;
}
public bool CanEncodeToAudioCodec(string codec)
{
if (string.Equals(codec, "opus", StringComparison.OrdinalIgnoreCase))
@ -508,36 +534,9 @@ namespace MediaBrowser.MediaEncoding.Encoder
if (!isAudio)
{
// The failure of HDR extraction usually occurs when using custom ffmpeg that does not contain the zscale filter.
try
{
return await ExtractImageInternal(inputArgument, container, videoStream, imageStreamIndex, threedFormat, offset, true, true, targetFormat, cancellationToken).ConfigureAwait(false);
}
catch (ArgumentException)
{
throw;
}
catch (Exception ex)
{
_logger.LogError(ex, "I-frame or HDR image extraction failed, will attempt with I-frame extraction disabled. Input: {Arguments}", inputArgument);
}
try
{
return await ExtractImageInternal(inputArgument, container, videoStream, imageStreamIndex, threedFormat, offset, false, true, targetFormat, cancellationToken).ConfigureAwait(false);
}
catch (ArgumentException)
{
throw;
}
catch (Exception ex)
{
_logger.LogError(ex, "HDR image extraction failed, will fallback to SDR image extraction. Input: {Arguments}", inputArgument);
}
try
{
return await ExtractImageInternal(inputArgument, container, videoStream, imageStreamIndex, threedFormat, offset, true, false, targetFormat, cancellationToken).ConfigureAwait(false);
return await ExtractImageInternal(inputArgument, container, videoStream, imageStreamIndex, threedFormat, offset, true, targetFormat, cancellationToken).ConfigureAwait(false);
}
catch (ArgumentException)
{
@ -549,10 +548,10 @@ namespace MediaBrowser.MediaEncoding.Encoder
}
}
return await ExtractImageInternal(inputArgument, container, videoStream, imageStreamIndex, threedFormat, offset, false, false, targetFormat, cancellationToken).ConfigureAwait(false);
return await ExtractImageInternal(inputArgument, container, videoStream, imageStreamIndex, threedFormat, offset, false, targetFormat, cancellationToken).ConfigureAwait(false);
}
private async Task<string> ExtractImageInternal(string inputPath, string container, MediaStream videoStream, int? imageStreamIndex, Video3DFormat? threedFormat, TimeSpan? offset, bool useIFrame, bool allowTonemap, ImageFormat? targetFormat, CancellationToken cancellationToken)
private async Task<string> ExtractImageInternal(string inputPath, string container, MediaStream videoStream, int? imageStreamIndex, Video3DFormat? threedFormat, TimeSpan? offset, bool useIFrame, ImageFormat? targetFormat, CancellationToken cancellationToken)
{
if (string.IsNullOrEmpty(inputPath))
{
@ -572,36 +571,32 @@ namespace MediaBrowser.MediaEncoding.Encoder
var tempExtractPath = Path.Combine(_configurationManager.ApplicationPaths.TempDirectory, Guid.NewGuid() + outputExtension);
Directory.CreateDirectory(Path.GetDirectoryName(tempExtractPath));
// deint -> scale -> thumbnail -> tonemap.
// put the SW tonemap right after the thumbnail to do it only once to reduce cpu usage.
var filters = new List<string>();
// deinterlace using bwdif algorithm for video stream.
if (videoStream != null && videoStream.IsInterlaced)
{
filters.Add("bwdif=0:-1:0");
}
// apply some filters to thumbnail extracted below (below) crop any black lines that we made and get the correct ar.
// This filter chain may have adverse effects on recorded tv thumbnails if ar changes during presentation ex. commercials @ diff ar
var vf = threedFormat switch
var scaler = threedFormat switch
{
// hsbs crop width in half,scale to correct size, set the display aspect,crop out any black bars we may have made. Work out the correct height based on the display aspect it will maintain the aspect where -1 in this case (3d) may not.
Video3DFormat.HalfSideBySide => "-vf crop=iw/2:ih:0:0,scale=(iw*2):ih,setdar=dar=a,crop=min(iw\\,ih*dar):min(ih\\,iw/dar):(iw-min(iw\\,iw*sar))/2:(ih - min (ih\\,ih/sar))/2,setsar=sar=1",
Video3DFormat.HalfSideBySide => "crop=iw/2:ih:0:0,scale=(iw*2):ih,setdar=dar=a,crop=min(iw\\,ih*dar):min(ih\\,iw/dar):(iw-min(iw\\,iw*sar))/2:(ih - min (ih\\,ih/sar))/2,setsar=sar=1",
// fsbs crop width in half,set the display aspect,crop out any black bars we may have made
Video3DFormat.FullSideBySide => "-vf crop=iw/2:ih:0:0,setdar=dar=a,crop=min(iw\\,ih*dar):min(ih\\,iw/dar):(iw-min(iw\\,iw*sar))/2:(ih - min (ih\\,ih/sar))/2,setsar=sar=1",
Video3DFormat.FullSideBySide => "crop=iw/2:ih:0:0,setdar=dar=a,crop=min(iw\\,ih*dar):min(ih\\,iw/dar):(iw-min(iw\\,iw*sar))/2:(ih - min (ih\\,ih/sar))/2,setsar=sar=1",
// htab crop heigh in half,scale to correct size, set the display aspect,crop out any black bars we may have made
Video3DFormat.HalfTopAndBottom => "-vf crop=iw:ih/2:0:0,scale=(iw*2):ih),setdar=dar=a,crop=min(iw\\,ih*dar):min(ih\\,iw/dar):(iw-min(iw\\,iw*sar))/2:(ih - min (ih\\,ih/sar))/2,setsar=sar=1",
Video3DFormat.HalfTopAndBottom => "crop=iw:ih/2:0:0,scale=(iw*2):ih),setdar=dar=a,crop=min(iw\\,ih*dar):min(ih\\,iw/dar):(iw-min(iw\\,iw*sar))/2:(ih - min (ih\\,ih/sar))/2,setsar=sar=1",
// ftab crop heigt in half, set the display aspect,crop out any black bars we may have made
Video3DFormat.FullTopAndBottom => "-vf crop=iw:ih/2:0:0,setdar=dar=a,crop=min(iw\\,ih*dar):min(ih\\,iw/dar):(iw-min(iw\\,iw*sar))/2:(ih - min (ih\\,ih/sar))/2,setsar=sar=1",
_ => string.Empty
Video3DFormat.FullTopAndBottom => "crop=iw:ih/2:0:0,setdar=dar=a,crop=min(iw\\,ih*dar):min(ih\\,iw/dar):(iw-min(iw\\,iw*sar))/2:(ih - min (ih\\,ih/sar))/2,setsar=sar=1",
_ => "scale=trunc(iw*sar):ih"
};
var mapArg = imageStreamIndex.HasValue ? (" -map 0:" + imageStreamIndex.Value.ToString(CultureInfo.InvariantCulture)) : string.Empty;
var enableHdrExtraction = allowTonemap && string.Equals(videoStream?.VideoRange, "HDR", StringComparison.OrdinalIgnoreCase);
if (enableHdrExtraction)
{
string tonemapFilters = "zscale=t=linear:npl=100,format=gbrpf32le,zscale=p=bt709,tonemap=tonemap=hable:desat=0:peak=100,zscale=t=bt709:m=bt709,format=yuv420p";
if (vf.Length == 0)
{
vf = "-vf " + tonemapFilters;
}
else
{
vf += "," + tonemapFilters;
}
}
filters.Add(scaler);
// Use ffmpeg to sample 100 (we can drop this if required using thumbnail=50 for 50 frames) frames and pick the best thumbnail. Have a fall back just in case.
// mpegts need larger batch size otherwise the corrupted thumbnail will be created. Larger batch size will lower the processing speed.
@ -609,18 +604,19 @@ namespace MediaBrowser.MediaEncoding.Encoder
if (enableThumbnail)
{
var useLargerBatchSize = string.Equals("mpegts", container, StringComparison.OrdinalIgnoreCase);
var batchSize = useLargerBatchSize ? "50" : "24";
if (string.IsNullOrEmpty(vf))
{
vf = "-vf thumbnail=" + batchSize;
}
else
{
vf += ",thumbnail=" + batchSize;
}
filters.Add("thumbnail=n=" + (useLargerBatchSize ? "50" : "24"));
}
var args = string.Format(CultureInfo.InvariantCulture, "-i {0}{3} -threads {4} -v quiet -vframes 1 {2} -f image2 \"{1}\"", inputPath, tempExtractPath, vf, mapArg, _threads);
// Use SW tonemap on HDR video stream only when the zscale filter is available.
var enableHdrExtraction = string.Equals(videoStream?.VideoRange, "HDR", StringComparison.OrdinalIgnoreCase) && SupportsFilter("zscale");
if (enableHdrExtraction)
{
filters.Add("zscale=t=linear:npl=100,format=gbrpf32le,zscale=p=bt709,tonemap=tonemap=hable:desat=0:peak=100,zscale=t=bt709:m=bt709,format=yuv420p");
}
var vf = string.Join(',', filters);
var mapArg = imageStreamIndex.HasValue ? (" -map 0:" + imageStreamIndex.Value.ToString(CultureInfo.InvariantCulture)) : string.Empty;
var args = string.Format(CultureInfo.InvariantCulture, "-i {0}{3} -threads {4} -v quiet -vframes 1 -vf {2} -f image2 \"{1}\"", inputPath, tempExtractPath, vf, mapArg, _threads);
if (offset.HasValue)
{

View file

@ -777,18 +777,23 @@ namespace MediaBrowser.MediaEncoding.Probing
if (!stream.BitDepth.HasValue)
{
if (!string.IsNullOrEmpty(streamInfo.PixelFormat)
&& streamInfo.PixelFormat.Contains("p10", StringComparison.OrdinalIgnoreCase))
if (!string.IsNullOrEmpty(streamInfo.PixelFormat))
{
stream.BitDepth = 10;
}
if (!string.IsNullOrEmpty(streamInfo.Profile)
&& (streamInfo.Profile.Contains("Main 10", StringComparison.OrdinalIgnoreCase)
|| streamInfo.Profile.Contains("High 10", StringComparison.OrdinalIgnoreCase)
|| streamInfo.Profile.Contains("Profile 2", StringComparison.OrdinalIgnoreCase)))
{
stream.BitDepth = 10;
if (string.Equals(streamInfo.PixelFormat, "yuv420p", StringComparison.OrdinalIgnoreCase)
|| string.Equals(streamInfo.PixelFormat, "yuv444p", StringComparison.OrdinalIgnoreCase))
{
stream.BitDepth = 8;
}
else if (string.Equals(streamInfo.PixelFormat, "yuv420p10le", StringComparison.OrdinalIgnoreCase)
|| string.Equals(streamInfo.PixelFormat, "yuv444p10le", StringComparison.OrdinalIgnoreCase))
{
stream.BitDepth = 10;
}
else if (string.Equals(streamInfo.PixelFormat, "yuv420p12le", StringComparison.OrdinalIgnoreCase)
|| string.Equals(streamInfo.PixelFormat, "yuv444p12le", StringComparison.OrdinalIgnoreCase))
{
stream.BitDepth = 12;
}
}
}

View file

@ -16,12 +16,9 @@ namespace MediaBrowser.Model.Configuration
// This is a DRM device that is almost guaranteed to be there on every intel platform,
// plus it's the default one in ffmpeg if you don't specify anything
VaapiDevice = "/dev/dri/renderD128";
// This is the OpenCL device that is used for tonemapping.
// The left side of the dot is the platform number, and the right side is the device number on the platform.
OpenclDevice = "0.0";
EnableTonemapping = false;
EnableVppTonemapping = false;
TonemappingAlgorithm = "hable";
TonemappingAlgorithm = "bt2390";
TonemappingRange = "auto";
TonemappingDesat = 0;
TonemappingThreshold = 0.8;
@ -34,6 +31,9 @@ namespace MediaBrowser.Model.Configuration
EnableDecodingColorDepth10Hevc = true;
EnableDecodingColorDepth10Vp9 = true;
EnableEnhancedNvdecDecoder = true;
PreferSystemNativeHwDecoder = true;
EnableIntelLowPowerH264HwEncoder = false;
EnableIntelLowPowerHevcHwEncoder = false;
EnableHardwareEncoding = true;
AllowHevcEncoding = false;
EnableSubtitleExtraction = true;
@ -70,8 +70,6 @@ namespace MediaBrowser.Model.Configuration
public string VaapiDevice { get; set; }
public string OpenclDevice { get; set; }
public bool EnableTonemapping { get; set; }
public bool EnableVppTonemapping { get; set; }
@ -104,6 +102,12 @@ namespace MediaBrowser.Model.Configuration
public bool EnableEnhancedNvdecDecoder { get; set; }
public bool PreferSystemNativeHwDecoder { get; set; }
public bool EnableIntelLowPowerH264HwEncoder { get; set; }
public bool EnableIntelLowPowerHevcHwEncoder { get; set; }
public bool EnableHardwareEncoding { get; set; }
public bool AllowHevcEncoding { get; set; }