jellyfin/MediaBrowser.Api/Playback/BaseStreamingService.cs

2731 lines
104 KiB
C#
Raw Normal View History

2015-03-07 23:43:53 +01:00
using MediaBrowser.Common.Extensions;
using MediaBrowser.Controller.Configuration;
2015-03-07 23:43:53 +01:00
using MediaBrowser.Controller.Devices;
2014-03-25 06:25:03 +01:00
using MediaBrowser.Controller.Dlna;
2013-02-27 05:19:05 +01:00
using MediaBrowser.Controller.Entities;
using MediaBrowser.Controller.Library;
2014-02-20 17:37:41 +01:00
using MediaBrowser.Controller.MediaEncoding;
2014-04-02 00:23:07 +02:00
using MediaBrowser.Model.Dlna;
2014-06-02 21:32:41 +02:00
using MediaBrowser.Model.Dto;
2013-02-27 05:19:05 +01:00
using MediaBrowser.Model.Entities;
2015-03-07 23:43:53 +01:00
using MediaBrowser.Model.Extensions;
using MediaBrowser.Model.IO;
using MediaBrowser.Model.MediaInfo;
2015-06-08 23:32:20 +02:00
using MediaBrowser.Model.Serialization;
2013-02-27 05:19:05 +01:00
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
2013-02-27 05:19:05 +01:00
using System.IO;
using System.Linq;
2014-04-06 19:53:23 +02:00
using System.Text;
2013-02-27 05:19:05 +01:00
using System.Threading;
using System.Threading.Tasks;
2015-10-04 06:23:11 +02:00
using CommonIO;
2016-08-19 02:10:10 +02:00
using MediaBrowser.Common.Net;
using MediaBrowser.Controller;
2013-02-27 05:19:05 +01:00
namespace MediaBrowser.Api.Playback
{
/// <summary>
/// Class BaseStreamingService
/// </summary>
2013-03-16 06:52:33 +01:00
public abstract class BaseStreamingService : BaseApiService
2013-02-27 05:19:05 +01:00
{
/// <summary>
/// Gets or sets the application paths.
/// </summary>
/// <value>The application paths.</value>
protected IServerConfigurationManager ServerConfigurationManager { get; private set; }
2013-02-27 05:19:05 +01:00
/// <summary>
/// Gets or sets the user manager.
/// </summary>
/// <value>The user manager.</value>
2013-09-04 19:02:19 +02:00
protected IUserManager UserManager { get; private set; }
/// <summary>
/// Gets or sets the library manager.
/// </summary>
/// <value>The library manager.</value>
2013-09-04 19:02:19 +02:00
protected ILibraryManager LibraryManager { get; private set; }
2013-03-04 06:43:06 +01:00
2013-02-27 05:19:05 +01:00
/// <summary>
2013-03-04 06:43:06 +01:00
/// Gets or sets the iso manager.
2013-02-27 05:19:05 +01:00
/// </summary>
2013-03-04 06:43:06 +01:00
/// <value>The iso manager.</value>
2013-09-04 19:02:19 +02:00
protected IIsoManager IsoManager { get; private set; }
2013-02-27 05:19:05 +01:00
/// <summary>
/// Gets or sets the media encoder.
/// </summary>
/// <value>The media encoder.</value>
2013-09-04 19:02:19 +02:00
protected IMediaEncoder MediaEncoder { get; private set; }
protected IFileSystem FileSystem { get; private set; }
2013-12-06 04:39:44 +01:00
2014-03-25 06:25:03 +01:00
protected IDlnaManager DlnaManager { get; private set; }
2015-01-20 06:19:13 +01:00
protected IDeviceManager DeviceManager { get; private set; }
2014-06-11 21:31:33 +02:00
protected ISubtitleEncoder SubtitleEncoder { get; private set; }
2015-03-07 23:43:53 +01:00
protected IMediaSourceManager MediaSourceManager { get; private set; }
2015-03-17 04:48:05 +01:00
protected IZipClient ZipClient { get; private set; }
2015-05-04 19:44:25 +02:00
protected IJsonSerializer JsonSerializer { get; private set; }
2013-12-06 04:39:44 +01:00
2016-08-19 02:10:10 +02:00
public static IServerApplicationHost AppHost;
public static IHttpClient HttpClient;
2013-02-27 05:19:05 +01:00
/// <summary>
/// Initializes a new instance of the <see cref="BaseStreamingService" /> class.
/// </summary>
2015-05-04 19:44:25 +02:00
protected BaseStreamingService(IServerConfigurationManager serverConfig, IUserManager userManager, ILibraryManager libraryManager, IIsoManager isoManager, IMediaEncoder mediaEncoder, IFileSystem fileSystem, IDlnaManager dlnaManager, ISubtitleEncoder subtitleEncoder, IDeviceManager deviceManager, IMediaSourceManager mediaSourceManager, IZipClient zipClient, IJsonSerializer jsonSerializer)
2013-02-27 05:19:05 +01:00
{
2015-05-04 19:44:25 +02:00
JsonSerializer = jsonSerializer;
2015-03-17 04:48:05 +01:00
ZipClient = zipClient;
2015-03-07 23:43:53 +01:00
MediaSourceManager = mediaSourceManager;
2015-01-20 06:19:13 +01:00
DeviceManager = deviceManager;
2014-06-11 21:31:33 +02:00
SubtitleEncoder = subtitleEncoder;
2014-03-25 06:25:03 +01:00
DlnaManager = dlnaManager;
FileSystem = fileSystem;
ServerConfigurationManager = serverConfig;
UserManager = userManager;
LibraryManager = libraryManager;
2013-03-04 06:43:06 +01:00
IsoManager = isoManager;
MediaEncoder = mediaEncoder;
2013-02-27 05:19:05 +01:00
}
/// <summary>
/// Gets the command line arguments.
/// </summary>
/// <param name="outputPath">The output path.</param>
/// <param name="state">The state.</param>
2014-06-10 19:36:06 +02:00
/// <param name="isEncoding">if set to <c>true</c> [is encoding].</param>
2013-02-27 05:19:05 +01:00
/// <returns>System.String.</returns>
2015-03-28 21:22:27 +01:00
protected abstract string GetCommandLineArguments(string outputPath, StreamState state, bool isEncoding);
2013-02-27 05:19:05 +01:00
/// <summary>
/// Gets the type of the transcoding job.
/// </summary>
/// <value>The type of the transcoding job.</value>
protected abstract TranscodingJobType TranscodingJobType { get; }
/// <summary>
/// Gets the output file extension.
/// </summary>
/// <param name="state">The state.</param>
/// <returns>System.String.</returns>
protected virtual string GetOutputFileExtension(StreamState state)
{
2013-12-19 22:51:32 +01:00
return Path.GetExtension(state.RequestedUrl);
2013-02-27 05:19:05 +01:00
}
/// <summary>
/// Gets the output file path.
/// </summary>
/// <param name="state">The state.</param>
/// <returns>System.String.</returns>
private string GetOutputFilePath(StreamState state)
2013-02-27 05:19:05 +01:00
{
2015-01-17 05:29:53 +01:00
var folder = ServerConfigurationManager.ApplicationPaths.TranscodingTempPath;
2015-02-06 06:39:07 +01:00
2013-05-21 19:44:24 +02:00
var outputFileExtension = GetOutputFileExtension(state);
2015-03-28 21:22:27 +01:00
var data = GetCommandLineArguments("dummy\\dummy", state, false);
2014-06-28 21:35:30 +02:00
data += "-" + (state.Request.DeviceId ?? string.Empty);
2015-03-29 20:31:28 +02:00
data += "-" + (state.Request.PlaySessionId ?? string.Empty);
2014-06-28 21:35:30 +02:00
2015-03-10 05:22:42 +01:00
var dataHash = data.GetMD5().ToString("N");
if (EnableOutputInSubFolder)
{
return Path.Combine(folder, dataHash, dataHash + (outputFileExtension ?? string.Empty).ToLower());
}
return Path.Combine(folder, dataHash + (outputFileExtension ?? string.Empty).ToLower());
}
protected virtual bool EnableOutputInSubFolder
{
get { return false; }
2013-02-27 05:19:05 +01:00
}
protected readonly CultureInfo UsCulture = new CultureInfo("en-US");
2013-02-27 05:19:05 +01:00
/// <summary>
/// Gets the fast seek command line parameter.
/// </summary>
/// <param name="request">The request.</param>
/// <returns>System.String.</returns>
/// <value>The fast seek command line parameter.</value>
protected string GetFastSeekCommandLineParameter(StreamRequest request)
{
2015-05-19 21:15:40 +02:00
var time = request.StartTimeTicks ?? 0;
2013-02-27 05:19:05 +01:00
2015-05-19 21:15:40 +02:00
if (time > 0)
{
2015-05-21 22:53:14 +02:00
return string.Format("-ss {0}", MediaEncoder.GetTimeParameter(time));
2013-02-27 05:19:05 +01:00
}
return string.Empty;
}
/// <summary>
/// Gets the map args.
/// </summary>
/// <param name="state">The state.</param>
/// <returns>System.String.</returns>
protected virtual string GetMapArgs(StreamState state)
2013-02-27 05:19:05 +01:00
{
2014-05-18 21:58:42 +02:00
// If we don't have known media info
// If input is video, use -sn to drop subtitles
// Otherwise just return empty
if (state.VideoStream == null && state.AudioStream == null)
{
return state.IsInputVideo ? "-sn" : string.Empty;
}
2013-02-27 05:19:05 +01:00
2014-05-18 21:58:42 +02:00
// We have media info, but we don't know the stream indexes
if (state.VideoStream != null && state.VideoStream.Index == -1)
{
return "-sn";
}
// We have media info, but we don't know the stream indexes
if (state.AudioStream != null && state.AudioStream.Index == -1)
2013-05-01 22:07:20 +02:00
{
2014-04-15 05:54:52 +02:00
return state.IsInputVideo ? "-sn" : string.Empty;
2013-05-01 22:07:20 +02:00
}
2014-05-18 21:58:42 +02:00
var args = string.Empty;
2013-02-27 05:19:05 +01:00
if (state.VideoStream != null)
{
args += string.Format("-map 0:{0}", state.VideoStream.Index);
}
else
{
args += "-map -0:v";
}
if (state.AudioStream != null)
{
args += string.Format(" -map 0:{0}", state.AudioStream.Index);
}
2013-05-01 22:07:20 +02:00
2013-02-27 05:19:05 +01:00
else
{
args += " -map -0:a";
}
2016-03-07 19:50:58 +01:00
if (state.SubtitleStream == null || state.VideoRequest.SubtitleMethod == SubtitleDeliveryMethod.Hls)
2013-02-27 05:19:05 +01:00
{
args += " -map -0:s";
}
2014-12-20 07:06:27 +01:00
else if (state.SubtitleStream.IsExternal && !state.SubtitleStream.IsTextSubtitleStream)
{
args += " -map 1:0 -sn";
}
2013-02-27 05:19:05 +01:00
return args;
}
/// <summary>
/// Determines which stream will be used for playback
/// </summary>
/// <param name="allStream">All stream.</param>
/// <param name="desiredIndex">Index of the desired.</param>
/// <param name="type">The type.</param>
/// <param name="returnFirstIfNoIndex">if set to <c>true</c> [return first if no index].</param>
/// <returns>MediaStream.</returns>
private MediaStream GetMediaStream(IEnumerable<MediaStream> allStream, int? desiredIndex, MediaStreamType type, bool returnFirstIfNoIndex = true)
{
var streams = allStream.Where(s => s.Type == type).OrderBy(i => i.Index).ToList();
2013-02-27 05:19:05 +01:00
if (desiredIndex.HasValue)
{
var stream = streams.FirstOrDefault(s => s.Index == desiredIndex.Value);
if (stream != null)
{
return stream;
}
}
2014-04-02 00:23:07 +02:00
if (type == MediaStreamType.Video)
{
streams = streams.Where(i => !string.Equals(i.Codec, "mjpeg", StringComparison.OrdinalIgnoreCase)).ToList();
}
if (returnFirstIfNoIndex && type == MediaStreamType.Audio)
{
return streams.FirstOrDefault(i => i.Channels.HasValue && i.Channels.Value > 0) ??
streams.FirstOrDefault();
}
2013-02-27 05:19:05 +01:00
// Just return the first one
return returnFirstIfNoIndex ? streams.FirstOrDefault() : null;
}
/// <summary>
/// Gets the number of threads.
/// </summary>
/// <returns>System.Int32.</returns>
protected int GetNumberOfThreads(StreamState state, bool isWebm)
{
2015-07-31 03:52:11 +02:00
var threads = ApiEntryPoint.Instance.GetEncodingOptions().EncodingThreadCount;
2014-10-16 05:26:39 +02:00
if (isWebm)
{
// Recommended per docs
return Math.Max(Environment.ProcessorCount - 1, 2);
}
2014-12-20 07:06:27 +01:00
2015-07-31 03:52:11 +02:00
// Automatic
if (threads == -1)
{
return 0;
}
return threads;
}
2015-11-27 05:33:20 +01:00
protected string GetH264Encoder(StreamState state)
2014-12-19 05:20:07 +01:00
{
2016-08-05 07:12:25 +02:00
var defaultEncoder = "libx264";
// Only use alternative encoders for video files.
// When using concat with folder rips, if the mfx session fails to initialize, ffmpeg will be stuck retrying and will not exit gracefully
// Since transcoding of folder rips is expiremental anyway, it's not worth adding additional variables such as this.
if (state.VideoType == VideoType.VideoFile)
2014-12-19 05:20:07 +01:00
{
2016-08-24 08:13:15 +02:00
var encodingOptions = ApiEntryPoint.Instance.GetEncodingOptions();
var hwType = encodingOptions.HardwareAccelerationType;
2016-08-05 07:12:25 +02:00
if (string.Equals(hwType, "qsv", StringComparison.OrdinalIgnoreCase) ||
string.Equals(hwType, "h264_qsv", StringComparison.OrdinalIgnoreCase))
{
2016-08-05 07:12:25 +02:00
return GetAvailableEncoder("h264_qsv", defaultEncoder);
}
2016-04-27 21:23:05 +02:00
2016-08-05 07:12:25 +02:00
if (string.Equals(hwType, "nvenc", StringComparison.OrdinalIgnoreCase))
{
2016-08-05 07:12:25 +02:00
return GetAvailableEncoder("h264_nvenc", defaultEncoder);
}
2016-08-05 07:12:25 +02:00
if (string.Equals(hwType, "h264_omx", StringComparison.OrdinalIgnoreCase))
{
2016-08-05 07:12:25 +02:00
return GetAvailableEncoder("h264_omx", defaultEncoder);
}
2016-08-24 08:13:15 +02:00
if (string.Equals(hwType, "vaapi", StringComparison.OrdinalIgnoreCase) && !string.IsNullOrWhiteSpace(encodingOptions.VaapiDevice))
2016-08-23 18:31:16 +02:00
{
return GetAvailableEncoder("h264_vaapi", defaultEncoder);
}
2014-12-19 05:20:07 +01:00
}
2015-11-27 05:33:20 +01:00
2016-08-05 07:12:25 +02:00
return defaultEncoder;
}
private string GetAvailableEncoder(string preferredEncoder, string defaultEncoder)
{
if (MediaEncoder.SupportsEncoder(preferredEncoder))
{
return preferredEncoder;
}
return defaultEncoder;
2014-12-19 05:20:07 +01:00
}
/// <summary>
/// Gets the video bitrate to specify on the command line
/// </summary>
/// <param name="state">The state.</param>
2016-09-08 04:55:54 +02:00
/// <param name="videoEncoder">The video codec.</param>
/// <returns>System.String.</returns>
2016-09-08 04:55:54 +02:00
protected string GetVideoQualityParam(StreamState state, string videoEncoder)
{
2014-02-02 15:47:00 +01:00
var param = string.Empty;
2014-04-08 06:17:18 +02:00
var isVc1 = state.VideoStream != null &&
2014-04-02 23:55:19 +02:00
string.Equals(state.VideoStream.Codec, "vc1", StringComparison.OrdinalIgnoreCase);
2016-09-04 17:01:31 +02:00
var encodingOptions = ApiEntryPoint.Instance.GetEncodingOptions();
2016-09-08 04:55:54 +02:00
if (string.Equals(videoEncoder, "libx264", StringComparison.OrdinalIgnoreCase))
2014-02-02 15:47:00 +01:00
{
2016-09-04 17:01:31 +02:00
if (!string.IsNullOrWhiteSpace(encodingOptions.H264Preset))
{
2016-09-04 19:52:43 +02:00
param += "-preset " + encodingOptions.H264Preset;
2016-09-04 17:01:31 +02:00
}
else
{
2016-09-04 19:52:43 +02:00
param += "-preset superfast";
2016-09-04 17:01:31 +02:00
}
2015-01-11 04:06:16 +01:00
2016-09-04 17:01:31 +02:00
if (encodingOptions.H264Crf >= 0 && encodingOptions.H264Crf <= 51)
{
2016-09-04 19:52:43 +02:00
param += " -crf " + encodingOptions.H264Crf.ToString(CultureInfo.InvariantCulture);
2016-09-04 17:01:31 +02:00
}
else
{
param += " -crf 23";
}
2016-09-21 19:07:18 +02:00
param += " -tune zerolatency";
2015-01-11 04:06:16 +01:00
}
2016-09-08 04:55:54 +02:00
else if (string.Equals(videoEncoder, "libx265", StringComparison.OrdinalIgnoreCase))
2015-01-11 04:06:16 +01:00
{
2016-09-04 19:52:43 +02:00
param += "-preset fast";
2015-07-31 03:52:11 +02:00
param += " -crf 28";
2014-02-02 15:47:00 +01:00
}
2015-01-14 05:02:57 +01:00
// h264 (h264_qsv)
2016-09-08 04:55:54 +02:00
else if (string.Equals(videoEncoder, "h264_qsv", StringComparison.OrdinalIgnoreCase))
2015-01-14 05:02:57 +01:00
{
2016-09-04 19:52:43 +02:00
param += "-preset 7 -look_ahead 0";
2015-01-14 05:02:57 +01:00
}
// h264 (h264_nvenc)
2016-09-08 04:55:54 +02:00
else if (string.Equals(videoEncoder, "h264_nvenc", StringComparison.OrdinalIgnoreCase))
2015-02-01 21:00:02 +01:00
{
2016-09-04 19:52:43 +02:00
param += "-preset default";
2015-02-01 21:00:02 +01:00
}
// webm
2016-09-08 04:55:54 +02:00
else if (string.Equals(videoEncoder, "libvpx", StringComparison.OrdinalIgnoreCase))
{
2014-04-02 23:55:19 +02:00
// Values 0-3, 0 being highest quality but slower
var profileScore = 0;
string crf;
2014-08-30 16:26:29 +02:00
var qmin = "0";
var qmax = "50";
2014-04-02 23:55:19 +02:00
2015-07-31 03:52:11 +02:00
crf = "10";
2014-04-02 23:55:19 +02:00
if (isVc1)
{
2014-04-18 19:16:25 +02:00
profileScore++;
}
2014-04-02 23:55:19 +02:00
2014-08-29 14:14:41 +02:00
// Max of 2
profileScore = Math.Min(profileScore, 2);
2014-08-30 16:26:29 +02:00
2014-04-02 23:55:19 +02:00
// http://www.webmproject.org/docs/encoder-parameters/
2016-09-04 19:52:43 +02:00
param += string.Format("-speed 16 -quality good -profile:v {0} -slices 8 -crf {1} -qmin {2} -qmax {3}",
2014-04-02 23:55:19 +02:00
profileScore.ToString(UsCulture),
2014-08-30 16:26:29 +02:00
crf,
qmin,
qmax);
2014-02-02 15:47:00 +01:00
}
2016-09-08 04:55:54 +02:00
else if (string.Equals(videoEncoder, "mpeg4", StringComparison.OrdinalIgnoreCase))
2014-02-02 15:47:00 +01:00
{
2016-09-04 19:52:43 +02:00
param += "-mbd rd -flags +mv4+aic -trellis 2 -cmp 2 -subcmp 2 -bf 2";
}
// asf/wmv
2016-09-08 04:55:54 +02:00
else if (string.Equals(videoEncoder, "wmv2", StringComparison.OrdinalIgnoreCase))
2014-01-10 14:52:01 +01:00
{
2016-09-04 19:52:43 +02:00
param += "-qmin 2";
2014-01-10 14:52:01 +01:00
}
2016-09-08 04:55:54 +02:00
else if (string.Equals(videoEncoder, "msmpeg4", StringComparison.OrdinalIgnoreCase))
2014-01-10 14:52:01 +01:00
{
2016-09-04 19:52:43 +02:00
param += "-mbd 2";
2014-01-10 14:52:01 +01:00
}
2016-09-08 04:55:54 +02:00
param += GetVideoBitrateParam(state, videoEncoder);
2014-02-02 15:47:00 +01:00
var framerate = GetFramerateParam(state);
if (framerate.HasValue)
{
2014-02-02 15:47:00 +01:00
param += string.Format(" -r {0}", framerate.Value.ToString(UsCulture));
}
if (!string.IsNullOrEmpty(state.OutputVideoSync))
2014-02-02 15:47:00 +01:00
{
param += " -vsync " + state.OutputVideoSync;
2014-02-02 15:47:00 +01:00
}
if (!string.IsNullOrEmpty(state.VideoRequest.Profile))
{
2016-09-08 04:55:54 +02:00
if (!string.Equals(videoEncoder, "h264_omx", StringComparison.OrdinalIgnoreCase) &&
!string.Equals(videoEncoder, "h264_vaapi", StringComparison.OrdinalIgnoreCase))
2016-06-30 20:59:18 +02:00
{
// not supported by h264_omx
param += " -profile:v " + state.VideoRequest.Profile;
}
2014-02-02 15:47:00 +01:00
}
if (!string.IsNullOrEmpty(state.VideoRequest.Level))
{
2016-09-08 04:55:54 +02:00
var level = NormalizeTranscodingLevel(state.OutputVideoCodec, state.VideoRequest.Level);
// h264_qsv and h264_nvenc expect levels to be expressed as a decimal. libx264 supports decimal and non-decimal format
2016-10-04 07:15:39 +02:00
// also needed for libx264 due to https://trac.ffmpeg.org/ticket/3307
2016-09-08 04:55:54 +02:00
if (string.Equals(videoEncoder, "h264_qsv", StringComparison.OrdinalIgnoreCase) ||
2016-10-04 07:15:39 +02:00
string.Equals(videoEncoder, "h264_nvenc", StringComparison.OrdinalIgnoreCase) ||
string.Equals(videoEncoder, "libx264", StringComparison.OrdinalIgnoreCase))
2015-02-01 21:00:02 +01:00
{
2016-09-08 04:55:54 +02:00
switch (level)
2015-02-01 21:00:02 +01:00
{
case "30":
2016-10-04 07:15:39 +02:00
param += " -level 3.0";
2015-02-01 21:00:02 +01:00
break;
case "31":
param += " -level 3.1";
break;
case "32":
param += " -level 3.2";
break;
case "40":
2016-10-04 07:15:39 +02:00
param += " -level 4.0";
2015-02-01 21:00:02 +01:00
break;
case "41":
param += " -level 4.1";
break;
case "42":
param += " -level 4.2";
break;
case "50":
2016-10-04 07:15:39 +02:00
param += " -level 5.0";
2015-02-01 21:00:02 +01:00
break;
case "51":
param += " -level 5.1";
break;
case "52":
param += " -level 5.2";
break;
default:
2016-09-08 04:55:54 +02:00
param += " -level " + level;
2015-02-01 21:00:02 +01:00
break;
}
}
2016-09-08 04:55:54 +02:00
else if (!string.Equals(videoEncoder, "h264_omx", StringComparison.OrdinalIgnoreCase))
2015-02-01 21:00:02 +01:00
{
2016-09-08 04:55:54 +02:00
param += " -level " + level;
2015-02-01 21:00:02 +01:00
}
2014-02-02 15:47:00 +01:00
}
2016-09-08 04:55:54 +02:00
if (!string.Equals(videoEncoder, "h264_omx", StringComparison.OrdinalIgnoreCase) &&
!string.Equals(videoEncoder, "h264_qsv", StringComparison.OrdinalIgnoreCase) &&
!string.Equals(videoEncoder, "h264_nvenc", StringComparison.OrdinalIgnoreCase) &&
!string.Equals(videoEncoder, "h264_vaapi", StringComparison.OrdinalIgnoreCase))
2016-06-30 20:59:18 +02:00
{
param = "-pix_fmt yuv420p " + param;
}
return param;
2014-01-10 14:52:01 +01:00
}
2016-09-08 04:55:54 +02:00
private string NormalizeTranscodingLevel(string videoCodec, string level)
{
double requestLevel;
// Clients may direct play higher than level 41, but there's no reason to transcode higher
if (double.TryParse(level, NumberStyles.Any, UsCulture, out requestLevel))
{
if (string.Equals(videoCodec, "h264", StringComparison.OrdinalIgnoreCase))
{
if (requestLevel > 41)
{
return "41";
}
}
}
return level;
}
2014-01-10 14:52:01 +01:00
protected string GetAudioFilterParam(StreamState state, bool isHls)
{
var volParam = string.Empty;
var audioSampleRate = string.Empty;
var channels = state.OutputAudioChannels;
2014-01-20 16:04:50 +01:00
2014-01-10 14:52:01 +01:00
// Boost volume to 200% when downsampling from 6ch to 2ch
2014-04-08 06:17:18 +02:00
if (channels.HasValue && channels.Value <= 2)
{
2016-05-14 20:02:06 +02:00
if (state.AudioStream != null && state.AudioStream.Channels.HasValue && state.AudioStream.Channels.Value > 5 && !ApiEntryPoint.Instance.GetEncodingOptions().DownMixAudioBoost.Equals(1))
2014-04-08 06:17:18 +02:00
{
2014-12-21 20:40:37 +01:00
volParam = ",volume=" + ApiEntryPoint.Instance.GetEncodingOptions().DownMixAudioBoost.ToString(UsCulture);
2014-04-08 06:17:18 +02:00
}
}
2014-01-10 14:52:01 +01:00
if (state.OutputAudioSampleRate.HasValue)
{
audioSampleRate = state.OutputAudioSampleRate.Value + ":";
}
2014-01-10 14:52:01 +01:00
var adelay = isHls ? "adelay=1," : string.Empty;
var pts = string.Empty;
2016-03-07 19:50:58 +01:00
if (state.SubtitleStream != null && state.SubtitleStream.IsTextSubtitleStream && state.VideoRequest.SubtitleMethod == SubtitleDeliveryMethod.Encode && !state.VideoRequest.CopyTimestamps)
2014-01-10 14:52:01 +01:00
{
2014-05-12 20:04:25 +02:00
var seconds = TimeSpan.FromTicks(state.Request.StartTimeTicks ?? 0).TotalSeconds;
2014-01-10 14:52:01 +01:00
2014-05-12 20:04:25 +02:00
pts = string.Format(",asetpts=PTS-{0}/TB", Math.Round(seconds).ToString(UsCulture));
2014-01-10 14:52:01 +01:00
}
2014-01-20 16:04:50 +01:00
return string.Format("-af \"{0}aresample={1}async={4}{2}{3}\"",
2014-01-10 14:52:01 +01:00
adelay,
2014-01-20 16:04:50 +01:00
audioSampleRate,
2014-01-10 14:52:01 +01:00
volParam,
2014-01-15 06:05:19 +01:00
pts,
state.OutputAudioSync);
}
2013-12-19 22:51:32 +01:00
2013-02-27 05:19:05 +01:00
/// <summary>
/// If we're going to put a fixed size on the command line, this will calculate it
/// </summary>
/// <param name="state">The state.</param>
/// <param name="outputVideoCodec">The output video codec.</param>
/// <param name="allowTimeStampCopy">if set to <c>true</c> [allow time stamp copy].</param>
2013-02-27 05:19:05 +01:00
/// <returns>System.String.</returns>
2014-06-02 21:32:41 +02:00
protected string GetOutputSizeParam(StreamState state,
string outputVideoCodec,
bool allowTimeStampCopy = true)
2013-02-27 05:19:05 +01:00
{
// http://sonnati.wordpress.com/2012/10/19/ffmpeg-the-swiss-army-knife-of-internet-streaming-part-vi/
2013-03-09 03:34:54 +01:00
var request = state.VideoRequest;
2013-02-27 05:19:05 +01:00
2014-06-20 17:10:40 +02:00
var filters = new List<string>();
2016-08-23 18:31:16 +02:00
if (string.Equals(outputVideoCodec, "h264_vaapi", StringComparison.OrdinalIgnoreCase))
{
filters.Add("format=nv12|vaapi");
filters.Add("hwupload");
}
else if (state.DeInterlace && !string.Equals(outputVideoCodec, "h264_vaapi", StringComparison.OrdinalIgnoreCase))
2014-06-20 17:10:40 +02:00
{
filters.Add("yadif=0:-1:0");
2013-02-27 05:19:05 +01:00
}
2016-08-23 18:31:16 +02:00
if (string.Equals(outputVideoCodec, "h264_vaapi", StringComparison.OrdinalIgnoreCase))
2013-02-27 05:19:05 +01:00
{
2016-08-23 18:31:16 +02:00
// Work around vaapi's reduced scaling features
var scaler = "scale_vaapi";
2013-12-21 19:37:34 +01:00
2016-08-23 18:31:16 +02:00
// Given the input dimensions (inputWidth, inputHeight), determine the output dimensions
// (outputWidth, outputHeight). The user may request precise output dimensions or maximum
// output dimensions. Output dimensions are guaranteed to be even.
decimal inputWidth = Convert.ToDecimal(state.VideoStream.Width);
decimal inputHeight = Convert.ToDecimal(state.VideoStream.Height);
decimal outputWidth = request.Width.HasValue ? Convert.ToDecimal(request.Width.Value) : inputWidth;
decimal outputHeight = request.Height.HasValue ? Convert.ToDecimal(request.Height.Value) : inputHeight;
decimal maximumWidth = request.MaxWidth.HasValue ? Convert.ToDecimal(request.MaxWidth.Value) : outputWidth;
decimal maximumHeight = request.MaxHeight.HasValue ? Convert.ToDecimal(request.MaxHeight.Value) : outputHeight;
2016-08-23 18:31:16 +02:00
if (outputWidth > maximumWidth || outputHeight > maximumHeight)
{
var scale = Math.Min(maximumWidth / outputWidth, maximumHeight / outputHeight);
outputWidth = Math.Min(maximumWidth, Math.Truncate(outputWidth * scale));
outputHeight = Math.Min(maximumHeight, Math.Truncate(outputHeight * scale));
}
2016-08-23 18:31:16 +02:00
outputWidth = 2 * Math.Truncate(outputWidth / 2);
outputHeight = 2 * Math.Truncate(outputHeight / 2);
2013-02-27 05:19:05 +01:00
2016-08-23 18:31:16 +02:00
if (outputWidth != inputWidth || outputHeight != inputHeight)
{
filters.Add(string.Format("{0}=w={1}:h={2}", scaler, outputWidth.ToString(UsCulture), outputHeight.ToString(UsCulture)));
}
}
else
2013-02-27 05:19:05 +01:00
{
2016-08-23 18:31:16 +02:00
// If fixed dimensions were supplied
if (request.Width.HasValue && request.Height.HasValue)
{
var widthParam = request.Width.Value.ToString(UsCulture);
var heightParam = request.Height.Value.ToString(UsCulture);
2013-12-22 19:58:51 +01:00
2016-08-23 18:31:16 +02:00
filters.Add(string.Format("scale=trunc({0}/2)*2:trunc({1}/2)*2", widthParam, heightParam));
}
2013-02-27 05:19:05 +01:00
2016-08-23 18:31:16 +02:00
// If Max dimensions were supplied, for width selects lowest even number between input width and width req size and selects lowest even number from in width*display aspect and requested size
else if (request.MaxWidth.HasValue && request.MaxHeight.HasValue)
{
var maxWidthParam = request.MaxWidth.Value.ToString(UsCulture);
var maxHeightParam = request.MaxHeight.Value.ToString(UsCulture);
2013-12-22 19:58:51 +01:00
2016-08-23 18:31:16 +02:00
filters.Add(string.Format("scale=trunc(min(max(iw\\,ih*dar)\\,min({0}\\,{1}*dar))/2)*2:trunc(min(max(iw/dar\\,ih)\\,min({0}/dar\\,{1}))/2)*2", maxWidthParam, maxHeightParam));
}
2013-10-06 03:04:41 +02:00
2016-08-23 18:31:16 +02:00
// If a fixed width was requested
else if (request.Width.HasValue)
{
var widthParam = request.Width.Value.ToString(UsCulture);
2013-12-22 19:58:51 +01:00
2016-08-23 18:31:16 +02:00
filters.Add(string.Format("scale={0}:trunc(ow/a/2)*2", widthParam));
}
2013-02-27 05:19:05 +01:00
2016-08-23 18:31:16 +02:00
// If a fixed height was requested
else if (request.Height.HasValue)
{
var heightParam = request.Height.Value.ToString(UsCulture);
filters.Add(string.Format("scale=trunc(oh*a/2)*2:{0}", heightParam));
}
// If a max width was requested
else if (request.MaxWidth.HasValue)
{
var maxWidthParam = request.MaxWidth.Value.ToString(UsCulture);
2013-12-22 19:58:51 +01:00
2016-08-23 18:31:16 +02:00
filters.Add(string.Format("scale=trunc(min(max(iw\\,ih*dar)\\,{0})/2)*2:trunc(ow/dar/2)*2", maxWidthParam));
}
// If a max height was requested
else if (request.MaxHeight.HasValue)
{
var maxHeightParam = request.MaxHeight.Value.ToString(UsCulture);
2016-08-25 06:54:06 +02:00
filters.Add(string.Format("scale=trunc(oh*a/2)*2:min(max(iw/dar\\,ih)\\,{0})", maxHeightParam));
2016-08-23 18:31:16 +02:00
}
2013-10-06 03:04:41 +02:00
}
2014-06-20 17:10:40 +02:00
var output = string.Empty;
2016-03-07 19:50:58 +01:00
if (state.SubtitleStream != null && state.SubtitleStream.IsTextSubtitleStream && state.VideoRequest.SubtitleMethod == SubtitleDeliveryMethod.Encode)
2014-06-20 17:10:40 +02:00
{
2014-08-17 07:38:13 +02:00
var subParam = GetTextSubtitleParam(state);
2013-02-27 05:19:05 +01:00
2014-06-20 17:10:40 +02:00
filters.Add(subParam);
2013-02-27 05:19:05 +01:00
if (allowTimeStampCopy)
{
output += " -copyts";
}
2014-06-20 17:10:40 +02:00
}
2013-02-27 05:19:05 +01:00
2014-06-20 17:10:40 +02:00
if (filters.Count > 0)
{
output += string.Format(" -vf \"{0}\"", string.Join(",", filters.ToArray()));
}
2013-02-27 05:19:05 +01:00
2014-06-20 17:10:40 +02:00
return output;
2013-02-27 05:19:05 +01:00
}
/// <summary>
/// Gets the text subtitle param.
/// </summary>
2013-12-19 22:51:32 +01:00
/// <param name="state">The state.</param>
2013-02-27 05:19:05 +01:00
/// <returns>System.String.</returns>
2014-08-17 07:38:13 +02:00
protected string GetTextSubtitleParam(StreamState state)
2013-02-27 05:19:05 +01:00
{
2014-06-26 19:04:11 +02:00
var seconds = Math.Round(TimeSpan.FromTicks(state.Request.StartTimeTicks ?? 0).TotalSeconds);
2014-01-10 14:52:01 +01:00
2016-02-24 08:11:35 +01:00
var setPtsParam = state.VideoRequest.CopyTimestamps
? string.Empty
: string.Format(",setpts=PTS -{0}/TB", seconds.ToString(UsCulture));
2014-06-10 19:36:06 +02:00
if (state.SubtitleStream.IsExternal)
2013-02-27 05:19:05 +01:00
{
2014-06-10 19:36:06 +02:00
var subtitlePath = state.SubtitleStream.Path;
2014-06-10 19:36:06 +02:00
var charsetParam = string.Empty;
2013-02-27 05:19:05 +01:00
2014-06-10 19:36:06 +02:00
if (!string.IsNullOrEmpty(state.SubtitleStream.Language))
{
2016-04-03 03:19:27 +02:00
var charenc = SubtitleEncoder.GetSubtitleFileCharacterSet(subtitlePath, state.SubtitleStream.Language, state.MediaSource.Protocol, CancellationToken.None).Result;
2013-06-04 04:02:49 +02:00
2014-06-10 19:36:06 +02:00
if (!string.IsNullOrEmpty(charenc))
{
charsetParam = ":charenc=" + charenc;
}
2013-02-27 05:19:05 +01:00
}
2014-07-17 05:17:14 +02:00
// TODO: Perhaps also use original_size=1920x800 ??
2016-02-24 08:11:35 +01:00
return string.Format("subtitles=filename='{0}'{1}{2}",
2015-08-16 17:53:30 +02:00
MediaEncoder.EscapeSubtitleFilterPath(subtitlePath),
2014-06-10 19:36:06 +02:00
charsetParam,
2016-02-24 08:11:35 +01:00
setPtsParam);
2013-02-27 05:19:05 +01:00
}
2015-03-28 21:22:27 +01:00
var mediaPath = state.MediaPath ?? string.Empty;
2016-02-24 08:11:35 +01:00
return string.Format("subtitles='{0}:si={1}'{2}",
2015-08-16 17:53:30 +02:00
MediaEncoder.EscapeSubtitleFilterPath(mediaPath),
2014-06-12 04:38:40 +02:00
state.InternalSubtitleStreamOffset.ToString(UsCulture),
2016-02-24 08:11:35 +01:00
setPtsParam);
2013-02-27 05:19:05 +01:00
}
/// <summary>
/// Gets the internal graphical subtitle param.
/// </summary>
/// <param name="state">The state.</param>
/// <param name="outputVideoCodec">The output video codec.</param>
/// <returns>System.String.</returns>
2014-07-30 05:31:35 +02:00
protected string GetGraphicalSubtitleParam(StreamState state, string outputVideoCodec)
2013-02-27 05:19:05 +01:00
{
var outputSizeParam = string.Empty;
2013-03-09 03:34:54 +01:00
var request = state.VideoRequest;
2013-02-27 05:19:05 +01:00
// Add resolution params, if specified
if (request.Width.HasValue || request.Height.HasValue || request.MaxHeight.HasValue || request.MaxWidth.HasValue)
{
2014-08-17 07:38:13 +02:00
outputSizeParam = GetOutputSizeParam(state, outputVideoCodec).TrimEnd('"');
if (string.Equals(outputVideoCodec, "h264_vaapi", StringComparison.OrdinalIgnoreCase))
{
outputSizeParam = "," + outputSizeParam.Substring(outputSizeParam.IndexOf("format", StringComparison.OrdinalIgnoreCase));
}
else
{
outputSizeParam = "," + outputSizeParam.Substring(outputSizeParam.IndexOf("scale", StringComparison.OrdinalIgnoreCase));
}
}
if (string.Equals(outputVideoCodec, "h264_vaapi", StringComparison.OrdinalIgnoreCase) && outputSizeParam.Length == 0)
{
outputSizeParam = ",format=nv12|vaapi,hwupload";
2013-02-27 05:19:05 +01:00
}
2014-01-06 03:57:30 +01:00
var videoSizeParam = string.Empty;
if (state.VideoStream != null && state.VideoStream.Width.HasValue && state.VideoStream.Height.HasValue)
{
videoSizeParam = string.Format(",scale={0}:{1}", state.VideoStream.Width.Value.ToString(UsCulture), state.VideoStream.Height.Value.ToString(UsCulture));
}
2014-12-20 07:06:27 +01:00
var mapPrefix = state.SubtitleStream.IsExternal ?
1 :
0;
var subtitleStreamIndex = state.SubtitleStream.IsExternal
? 0
: state.SubtitleStream.Index;
return string.Format(" -filter_complex \"[{0}:{1}]format=yuva444p{4},lut=u=128:v=128:y=gammaval(.3)[sub] ; [0:{2}] [sub] overlay{3}\"",
mapPrefix.ToString(UsCulture),
subtitleStreamIndex.ToString(UsCulture),
state.VideoStream.Index.ToString(UsCulture),
2014-01-06 03:57:30 +01:00
outputSizeParam,
videoSizeParam);
2013-02-27 05:19:05 +01:00
}
/// <summary>
/// Gets the probe size argument.
/// </summary>
/// <param name="state">The state.</param>
/// <returns>System.String.</returns>
private string GetProbeSizeArgument(StreamState state)
{
if (state.PlayableStreamFileNames.Count > 0)
{
2016-10-11 08:46:59 +02:00
return MediaEncoder.GetProbeSizeAndAnalyzeDurationArgument(state.PlayableStreamFileNames.ToArray(), state.InputProtocol);
}
2016-10-11 08:46:59 +02:00
return MediaEncoder.GetProbeSizeAndAnalyzeDurationArgument(new[] { state.MediaPath }, state.InputProtocol);
}
2013-02-27 05:19:05 +01:00
/// <summary>
/// Gets the number of audio channels to specify on the command line
/// </summary>
/// <param name="request">The request.</param>
/// <param name="audioStream">The audio stream.</param>
/// <param name="outputAudioCodec">The output audio codec.</param>
2013-02-27 05:19:05 +01:00
/// <returns>System.Nullable{System.Int32}.</returns>
private int? GetNumAudioChannelsParam(StreamRequest request, MediaStream audioStream, string outputAudioCodec)
2013-02-27 05:19:05 +01:00
{
var inputChannels = audioStream == null
? null
: audioStream.Channels;
2014-03-31 23:04:22 +02:00
2015-06-15 06:17:12 +02:00
if (inputChannels <= 0)
{
inputChannels = null;
}
2016-05-14 07:40:01 +02:00
int? resultChannels = null;
var codec = outputAudioCodec ?? string.Empty;
if (codec.IndexOf("wma", StringComparison.OrdinalIgnoreCase) != -1)
{
// wmav2 currently only supports two channel output
2016-05-14 07:40:01 +02:00
resultChannels = Math.Min(2, inputChannels ?? 2);
2013-02-27 05:19:05 +01:00
}
2016-05-14 07:40:01 +02:00
else if (request.MaxAudioChannels.HasValue)
2014-03-23 06:10:33 +01:00
{
2016-01-03 22:35:19 +01:00
var channelLimit = codec.IndexOf("mp3", StringComparison.OrdinalIgnoreCase) != -1
? 2
: 6;
if (inputChannels.HasValue)
2014-03-23 06:10:33 +01:00
{
2016-01-03 22:35:19 +01:00
channelLimit = Math.Min(channelLimit, inputChannels.Value);
2014-03-23 06:10:33 +01:00
}
2014-12-24 07:28:40 +01:00
// If we don't have any media info then limit it to 5 to prevent encoding errors due to asking for too many channels
2016-05-14 07:40:01 +02:00
resultChannels = Math.Min(request.MaxAudioChannels.Value, channelLimit);
2014-03-23 06:10:33 +01:00
}
2016-05-14 07:40:01 +02:00
if (resultChannels.HasValue && !string.Equals(codec, "copy", StringComparison.OrdinalIgnoreCase))
{
if (request.TranscodingMaxAudioChannels.HasValue)
{
resultChannels = Math.Min(request.TranscodingMaxAudioChannels.Value, resultChannels.Value);
}
}
return resultChannels ?? request.AudioChannels;
2013-02-27 05:19:05 +01:00
}
/// <summary>
/// Determines whether the specified stream is H264.
/// </summary>
/// <param name="stream">The stream.</param>
/// <returns><c>true</c> if the specified stream is H264; otherwise, <c>false</c>.</returns>
protected bool IsH264(MediaStream stream)
{
2014-10-22 06:42:26 +02:00
var codec = stream.Codec ?? string.Empty;
return codec.IndexOf("264", StringComparison.OrdinalIgnoreCase) != -1 ||
codec.IndexOf("avc", StringComparison.OrdinalIgnoreCase) != -1;
2013-02-27 05:19:05 +01:00
}
/// <summary>
2015-06-03 05:15:46 +02:00
/// Gets the audio encoder.
2013-02-27 05:19:05 +01:00
/// </summary>
2015-07-25 19:21:10 +02:00
/// <param name="state">The state.</param>
2013-02-27 05:19:05 +01:00
/// <returns>System.String.</returns>
2015-07-25 19:21:10 +02:00
protected string GetAudioEncoder(StreamState state)
2013-02-27 05:19:05 +01:00
{
2015-07-25 19:21:10 +02:00
var codec = state.OutputAudioCodec;
2013-02-27 05:19:05 +01:00
2014-03-25 06:25:03 +01:00
if (string.Equals(codec, "aac", StringComparison.OrdinalIgnoreCase))
2013-02-27 05:19:05 +01:00
{
2014-03-25 06:25:03 +01:00
return "aac -strict experimental";
}
if (string.Equals(codec, "mp3", StringComparison.OrdinalIgnoreCase))
{
return "libmp3lame";
}
if (string.Equals(codec, "vorbis", StringComparison.OrdinalIgnoreCase))
{
return "libvorbis";
}
if (string.Equals(codec, "wma", StringComparison.OrdinalIgnoreCase))
{
return "wmav2";
2013-02-27 05:19:05 +01:00
}
2014-03-25 06:25:03 +01:00
return codec.ToLower();
2013-02-27 05:19:05 +01:00
}
/// <summary>
/// Gets the name of the output video codec
/// </summary>
2015-07-25 19:21:10 +02:00
/// <param name="state">The state.</param>
2013-02-27 05:19:05 +01:00
/// <returns>System.String.</returns>
2015-07-25 19:21:10 +02:00
protected string GetVideoEncoder(StreamState state)
2013-02-27 05:19:05 +01:00
{
2015-07-25 19:21:10 +02:00
var codec = state.OutputVideoCodec;
2013-02-27 05:19:05 +01:00
2014-03-23 21:07:02 +01:00
if (!string.IsNullOrEmpty(codec))
2013-02-27 05:19:05 +01:00
{
2014-03-23 21:07:02 +01:00
if (string.Equals(codec, "h264", StringComparison.OrdinalIgnoreCase))
2013-02-27 05:19:05 +01:00
{
2015-11-27 05:33:20 +01:00
return GetH264Encoder(state);
2013-02-27 05:19:05 +01:00
}
2014-03-23 21:07:02 +01:00
if (string.Equals(codec, "vpx", StringComparison.OrdinalIgnoreCase))
2013-02-27 05:19:05 +01:00
{
return "libvpx";
}
2014-03-23 21:07:02 +01:00
if (string.Equals(codec, "wmv", StringComparison.OrdinalIgnoreCase))
2013-02-27 05:19:05 +01:00
{
return "wmv2";
2013-02-27 05:19:05 +01:00
}
2014-03-23 21:07:02 +01:00
if (string.Equals(codec, "theora", StringComparison.OrdinalIgnoreCase))
2013-02-27 05:19:05 +01:00
{
return "libtheora";
}
2014-03-23 21:07:02 +01:00
return codec.ToLower();
2013-02-27 05:19:05 +01:00
}
return "copy";
}
/// <summary>
/// Gets the name of the output video codec
/// </summary>
/// <param name="state">The state.</param>
/// <returns>System.String.</returns>
protected string GetVideoDecoder(StreamState state)
{
2016-04-30 06:00:45 +02:00
if (string.Equals(state.OutputVideoCodec, "copy", StringComparison.OrdinalIgnoreCase))
{
return null;
}
// Only use alternative encoders for video files.
// When using concat with folder rips, if the mfx session fails to initialize, ffmpeg will be stuck retrying and will not exit gracefully
// Since transcoding of folder rips is expiremental anyway, it's not worth adding additional variables such as this.
if (state.VideoType != VideoType.VideoFile)
{
2016-04-30 06:00:45 +02:00
return null;
}
if (state.VideoStream != null && !string.IsNullOrWhiteSpace(state.VideoStream.Codec))
{
2016-04-30 06:00:45 +02:00
if (string.Equals(ApiEntryPoint.Instance.GetEncodingOptions().HardwareAccelerationType, "qsv", StringComparison.OrdinalIgnoreCase))
{
switch (state.MediaSource.VideoStream.Codec.ToLower())
{
case "avc":
case "h264":
if (MediaEncoder.SupportsDecoder("h264_qsv"))
{
2016-04-30 06:00:45 +02:00
// Seeing stalls and failures with decoding. Not worth it compared to encoding.
2016-06-07 19:36:45 +02:00
return "-c:v h264_qsv ";
}
break;
case "mpeg2video":
if (MediaEncoder.SupportsDecoder("mpeg2_qsv"))
{
return "-c:v mpeg2_qsv ";
}
break;
case "vc1":
if (MediaEncoder.SupportsDecoder("vc1_qsv"))
{
return "-c:v vc1_qsv ";
}
break;
}
}
}
// leave blank so ffmpeg will decide
2015-09-20 04:06:56 +02:00
return null;
}
2013-02-27 05:19:05 +01:00
/// <summary>
/// Gets the input argument.
/// </summary>
2013-12-19 22:51:32 +01:00
/// <param name="state">The state.</param>
2013-02-27 05:19:05 +01:00
/// <returns>System.String.</returns>
2015-03-28 21:22:27 +01:00
protected string GetInputArgument(StreamState state)
2014-12-20 07:06:27 +01:00
{
2015-09-20 04:06:56 +02:00
var arg = string.Format("-i {0}", GetInputPathArgument(state));
2014-12-20 07:06:27 +01:00
2016-03-07 19:50:58 +01:00
if (state.SubtitleStream != null && state.VideoRequest.SubtitleMethod == SubtitleDeliveryMethod.Encode)
2014-12-20 07:06:27 +01:00
{
if (state.SubtitleStream.IsExternal && !state.SubtitleStream.IsTextSubtitleStream)
{
if (state.VideoStream != null && state.VideoStream.Width.HasValue)
{
// This is hacky but not sure how to get the exact subtitle resolution
double height = state.VideoStream.Width.Value;
height /= 16;
height *= 9;
arg += string.Format(" -canvas_size {0}:{1}", state.VideoStream.Width.Value.ToString(CultureInfo.InvariantCulture), Convert.ToInt32(height).ToString(CultureInfo.InvariantCulture));
}
2015-01-03 20:38:22 +01:00
arg += " -i \"" + state.SubtitleStream.Path + "\"";
2014-12-20 07:06:27 +01:00
}
}
2016-08-23 18:31:16 +02:00
if (state.VideoRequest != null)
{
var encodingOptions = ApiEntryPoint.Instance.GetEncodingOptions();
if (GetVideoEncoder(state).IndexOf("vaapi", StringComparison.OrdinalIgnoreCase) != -1)
{
var hasGraphicalSubs = state.SubtitleStream != null && !state.SubtitleStream.IsTextSubtitleStream && state.VideoRequest.SubtitleMethod == SubtitleDeliveryMethod.Encode;
var hwOutputFormat = "vaapi";
if (hasGraphicalSubs)
{
hwOutputFormat = "yuv420p";
}
arg = "-hwaccel vaapi -hwaccel_output_format " + hwOutputFormat + " -vaapi_device " + encodingOptions.VaapiDevice + " " + arg;
2016-08-23 18:31:16 +02:00
}
}
return arg.Trim();
2014-12-20 07:06:27 +01:00
}
2015-03-28 21:22:27 +01:00
private string GetInputPathArgument(StreamState state)
2013-02-27 05:19:05 +01:00
{
var protocol = state.InputProtocol;
2015-03-28 21:22:27 +01:00
var mediaPath = state.MediaPath ?? string.Empty;
2015-03-28 21:22:27 +01:00
var inputPath = new[] { mediaPath };
2013-12-19 22:51:32 +01:00
if (state.IsInputVideo)
{
2013-12-19 22:51:32 +01:00
if (!(state.VideoType == VideoType.Iso && state.IsoMount == null))
2013-05-21 08:17:07 +02:00
{
2015-09-24 19:50:49 +02:00
inputPath = MediaEncoderHelpers.GetInputArgument(FileSystem, mediaPath, state.InputProtocol, state.IsoMount, state.PlayableStreamFileNames);
2013-05-21 08:17:07 +02:00
}
}
return MediaEncoder.GetInputArgument(inputPath, protocol);
2013-02-27 05:19:05 +01:00
}
2014-06-28 21:35:30 +02:00
private async Task AcquireResources(StreamState state, CancellationTokenSource cancellationTokenSource)
{
if (state.VideoType == VideoType.Iso && state.IsoType.HasValue && IsoManager.CanMount(state.MediaPath))
{
state.IsoMount = await IsoManager.Mount(state.MediaPath, cancellationTokenSource.Token).ConfigureAwait(false);
}
2015-08-24 14:54:10 +02:00
if (state.MediaSource.RequiresOpening && string.IsNullOrWhiteSpace(state.Request.LiveStreamId))
2014-06-28 21:35:30 +02:00
{
2015-03-29 18:45:16 +02:00
var liveStreamResponse = await MediaSourceManager.OpenLiveStream(new LiveStreamRequest
{
OpenToken = state.MediaSource.OpenToken
}, false, cancellationTokenSource.Token).ConfigureAwait(false);
2014-06-28 21:35:30 +02:00
2015-03-29 18:45:16 +02:00
AttachMediaSourceInfo(state, liveStreamResponse.MediaSource, state.VideoRequest, state.RequestedUrl);
2014-06-28 21:35:30 +02:00
2015-03-28 21:22:27 +01:00
if (state.VideoRequest != null)
2014-06-28 21:35:30 +02:00
{
2015-03-28 21:22:27 +01:00
TryStreamCopy(state, state.VideoRequest);
2014-08-15 18:35:41 +02:00
}
2015-03-29 06:56:39 +02:00
}
2014-08-15 18:35:41 +02:00
2015-03-29 06:56:39 +02:00
if (state.MediaSource.BufferMs.HasValue)
{
await Task.Delay(state.MediaSource.BufferMs.Value, cancellationTokenSource.Token).ConfigureAwait(false);
2014-06-28 21:35:30 +02:00
}
}
2013-02-27 05:19:05 +01:00
/// <summary>
/// Starts the FFMPEG.
/// </summary>
/// <param name="state">The state.</param>
/// <param name="outputPath">The output path.</param>
2014-06-02 21:32:41 +02:00
/// <param name="cancellationTokenSource">The cancellation token source.</param>
2014-10-16 05:26:39 +02:00
/// <param name="workingDirectory">The working directory.</param>
2013-02-27 05:19:05 +01:00
/// <returns>Task.</returns>
2014-12-20 07:06:27 +01:00
protected async Task<TranscodingJob> StartFfMpeg(StreamState state,
string outputPath,
2014-10-16 05:26:39 +02:00
CancellationTokenSource cancellationTokenSource,
string workingDirectory = null)
2013-02-27 05:19:05 +01:00
{
2015-09-13 23:32:02 +02:00
FileSystem.CreateDirectory(Path.GetDirectoryName(outputPath));
2013-06-04 18:48:23 +02:00
2014-06-28 21:35:30 +02:00
await AcquireResources(state, cancellationTokenSource).ConfigureAwait(false);
2013-02-27 05:19:05 +01:00
if (state.VideoRequest != null && !string.Equals(state.OutputVideoCodec, "copy", StringComparison.OrdinalIgnoreCase))
{
var auth = AuthorizationContext.GetAuthorizationInfo(Request);
if (!string.IsNullOrWhiteSpace(auth.UserId))
{
var user = UserManager.GetUserById(auth.UserId);
if (!user.Policy.EnableVideoPlaybackTranscoding)
{
ApiEntryPoint.Instance.OnTranscodeFailedToStart(outputPath, TranscodingJobType, state);
throw new ArgumentException("User does not have access to video transcoding");
}
}
}
2014-09-03 04:30:24 +02:00
var transcodingId = Guid.NewGuid().ToString("N");
2015-03-28 21:22:27 +01:00
var commandLineArgs = GetCommandLineArguments(outputPath, state, true);
2013-02-27 05:19:05 +01:00
var process = new Process
{
StartInfo = new ProcessStartInfo
{
CreateNoWindow = true,
UseShellExecute = false,
// Must consume both stdout and stderr or deadlocks may occur
2016-07-02 04:16:05 +02:00
//RedirectStandardOutput = true,
2013-02-27 05:19:05 +01:00
RedirectStandardError = true,
2014-06-20 06:50:30 +02:00
RedirectStandardInput = true,
2013-02-27 05:19:05 +01:00
FileName = MediaEncoder.EncoderPath,
Arguments = commandLineArgs,
2013-02-27 05:19:05 +01:00
WindowStyle = ProcessWindowStyle.Hidden,
ErrorDialog = false
2013-02-27 05:19:05 +01:00
},
EnableRaisingEvents = true
};
2014-10-16 05:26:39 +02:00
if (!string.IsNullOrWhiteSpace(workingDirectory))
{
process.StartInfo.WorkingDirectory = workingDirectory;
}
2014-09-03 04:30:24 +02:00
var transcodingJob = ApiEntryPoint.Instance.OnTranscodeBeginning(outputPath,
2015-03-29 20:31:28 +02:00
state.Request.PlaySessionId,
2015-04-20 20:04:02 +02:00
state.MediaSource.LiveStreamId,
2014-09-03 04:30:24 +02:00
transcodingId,
TranscodingJobType,
process,
state.Request.DeviceId,
state,
cancellationTokenSource);
2013-02-27 05:19:05 +01:00
2014-04-04 00:50:04 +02:00
var commandLineLogMessage = process.StartInfo.FileName + " " + process.StartInfo.Arguments;
Logger.Info(commandLineLogMessage);
2013-02-27 05:19:05 +01:00
var logFilePrefix = "ffmpeg-transcode";
2016-04-12 20:01:23 +02:00
if (state.VideoRequest != null && string.Equals(state.OutputVideoCodec, "copy", StringComparison.OrdinalIgnoreCase) && string.Equals(state.OutputAudioCodec, "copy", StringComparison.OrdinalIgnoreCase))
{
logFilePrefix = "ffmpeg-directstream";
2016-04-12 20:01:23 +02:00
}
else if (state.VideoRequest != null && string.Equals(state.OutputVideoCodec, "copy", StringComparison.OrdinalIgnoreCase))
{
logFilePrefix = "ffmpeg-remux";
2016-04-12 20:01:23 +02:00
}
var logFilePath = Path.Combine(ServerConfigurationManager.ApplicationPaths.LogDirectoryPath, logFilePrefix + "-" + Guid.NewGuid() + ".txt");
2015-09-13 23:32:02 +02:00
FileSystem.CreateDirectory(Path.GetDirectoryName(logFilePath));
2013-02-27 05:19:05 +01:00
// FFMpeg writes debug/error info to stderr. This is useful when debugging so let's put it in the log directory.
state.LogFileStream = FileSystem.GetFileStream(logFilePath, FileMode.Create, FileAccess.Write, FileShare.Read, true);
2013-02-27 05:19:05 +01:00
2015-05-04 19:44:25 +02:00
var commandLineLogMessageBytes = Encoding.UTF8.GetBytes(Request.AbsoluteUri + Environment.NewLine + Environment.NewLine + JsonSerializer.SerializeToString(state.MediaSource) + Environment.NewLine + Environment.NewLine + commandLineLogMessage + Environment.NewLine + Environment.NewLine);
2014-06-02 21:32:41 +02:00
await state.LogFileStream.WriteAsync(commandLineLogMessageBytes, 0, commandLineLogMessageBytes.Length, cancellationTokenSource.Token).ConfigureAwait(false);
2014-04-04 00:50:04 +02:00
2014-09-03 04:30:24 +02:00
process.Exited += (sender, args) => OnFfMpegProcessExited(process, transcodingJob, state);
2013-02-27 05:19:05 +01:00
try
{
process.Start();
}
catch (Exception ex)
2013-02-27 05:19:05 +01:00
{
Logger.ErrorException("Error starting ffmpeg", ex);
ApiEntryPoint.Instance.OnTranscodeFailedToStart(outputPath, TranscodingJobType, state);
2013-02-27 05:19:05 +01:00
throw;
}
// MUST read both stdout and stderr asynchronously or a deadlock may occurr
2016-07-02 04:16:05 +02:00
//process.BeginOutputReadLine();
2013-02-27 05:19:05 +01:00
// Important - don't await the log task or we won't be able to kill ffmpeg when the user stops playback
var task = Task.Run(() => StartStreamingLog(transcodingJob, state, process.StandardError.BaseStream, state.LogFileStream));
2013-02-27 05:19:05 +01:00
// Wait for the file to exist before proceeeding
2016-04-04 07:07:10 +02:00
while (!FileSystem.FileExists(state.WaitForPath ?? outputPath) && !transcodingJob.HasExited)
2013-02-27 05:19:05 +01:00
{
2014-06-02 21:32:41 +02:00
await Task.Delay(100, cancellationTokenSource.Token).ConfigureAwait(false);
2013-02-27 05:19:05 +01:00
}
2014-09-03 04:30:24 +02:00
2016-09-07 19:17:26 +02:00
if (state.IsInputVideo && transcodingJob.Type == TranscodingJobType.Progressive && !transcodingJob.HasExited)
2014-10-15 02:04:44 +02:00
{
await Task.Delay(1000, cancellationTokenSource.Token).ConfigureAwait(false);
2016-09-07 19:17:26 +02:00
if (state.ReadInputAtNativeFramerate && !transcodingJob.HasExited)
2014-10-15 02:04:44 +02:00
{
2014-11-11 04:41:55 +01:00
await Task.Delay(1500, cancellationTokenSource.Token).ConfigureAwait(false);
2014-10-15 02:04:44 +02:00
}
}
2016-09-07 19:17:26 +02:00
if (!transcodingJob.HasExited)
{
StartThrottler(state, transcodingJob);
}
2016-08-19 02:10:10 +02:00
ReportUsage(state);
2015-02-28 19:47:05 +01:00
2014-09-03 04:30:24 +02:00
return transcodingJob;
2013-02-27 05:19:05 +01:00
}
2015-02-28 19:47:05 +01:00
private void StartThrottler(StreamState state, TranscodingJob transcodingJob)
{
2016-09-21 19:07:18 +02:00
if (EnableThrottling(state))
2015-02-28 19:47:05 +01:00
{
transcodingJob.TranscodingThrottler = state.TranscodingThrottler = new TranscodingThrottler(transcodingJob, Logger, ServerConfigurationManager);
state.TranscodingThrottler.Start();
2015-02-28 19:47:05 +01:00
}
}
2016-09-23 07:45:14 +02:00
private bool EnableThrottling(StreamState state)
2015-05-19 21:15:40 +02:00
{
// do not use throttling with hardware encoders
return state.InputProtocol == MediaProtocol.File &&
state.RunTimeTicks.HasValue &&
state.RunTimeTicks.Value >= TimeSpan.FromMinutes(5).Ticks &&
2016-08-19 02:10:10 +02:00
state.IsInputVideo &&
state.VideoType == VideoType.VideoFile &&
!string.Equals(state.OutputVideoCodec, "copy", StringComparison.OrdinalIgnoreCase) &&
string.Equals(GetVideoEncoder(state), "libx264", StringComparison.OrdinalIgnoreCase);
2015-05-19 21:15:40 +02:00
}
2016-04-29 18:54:51 +02:00
private async Task StartStreamingLog(TranscodingJob transcodingJob, StreamState state, Stream source, Stream target)
{
try
{
using (var reader = new StreamReader(source))
{
while (!reader.EndOfStream)
{
var line = await reader.ReadLineAsync().ConfigureAwait(false);
2014-09-03 04:30:24 +02:00
ParseLogLine(line, transcodingJob, state);
var bytes = Encoding.UTF8.GetBytes(Environment.NewLine + line);
await target.WriteAsync(bytes, 0, bytes.Length).ConfigureAwait(false);
2015-09-20 04:06:56 +02:00
await target.FlushAsync().ConfigureAwait(false);
}
}
}
2015-05-08 00:27:01 +02:00
catch (ObjectDisposedException)
{
// Don't spam the log. This doesn't seem to throw in windows, but sometimes under linux
}
catch (Exception ex)
{
Logger.ErrorException("Error reading ffmpeg log", ex);
}
}
2014-09-03 04:30:24 +02:00
private void ParseLogLine(string line, TranscodingJob transcodingJob, StreamState state)
{
float? framerate = null;
double? percent = null;
2014-09-03 04:30:24 +02:00
TimeSpan? transcodingPosition = null;
long? bytesTranscoded = null;
int? bitRate = null;
var parts = line.Split(' ');
var totalMs = state.RunTimeTicks.HasValue
? TimeSpan.FromTicks(state.RunTimeTicks.Value).TotalMilliseconds
: 0;
var startMs = state.Request.StartTimeTicks.HasValue
? TimeSpan.FromTicks(state.Request.StartTimeTicks.Value).TotalMilliseconds
: 0;
for (var i = 0; i < parts.Length; i++)
{
var part = parts[i];
if (string.Equals(part, "fps=", StringComparison.OrdinalIgnoreCase) &&
(i + 1 < parts.Length))
{
var rate = parts[i + 1];
float val;
if (float.TryParse(rate, NumberStyles.Any, UsCulture, out val))
{
framerate = val;
}
}
else if (state.RunTimeTicks.HasValue &&
part.StartsWith("time=", StringComparison.OrdinalIgnoreCase))
{
var time = part.Split(new[] { '=' }, 2).Last();
TimeSpan val;
if (TimeSpan.TryParse(time, UsCulture, out val))
{
var currentMs = startMs + val.TotalMilliseconds;
var percentVal = currentMs / totalMs;
percent = 100 * percentVal;
2014-09-03 04:30:24 +02:00
transcodingPosition = val;
}
}
else if (part.StartsWith("size=", StringComparison.OrdinalIgnoreCase))
{
var size = part.Split(new[] { '=' }, 2).Last();
int? scale = null;
if (size.IndexOf("kb", StringComparison.OrdinalIgnoreCase) != -1)
{
scale = 1024;
size = size.Replace("kb", string.Empty, StringComparison.OrdinalIgnoreCase);
}
if (scale.HasValue)
{
long val;
2014-12-20 07:06:27 +01:00
2014-09-03 04:30:24 +02:00
if (long.TryParse(size, NumberStyles.Any, UsCulture, out val))
{
bytesTranscoded = val * scale.Value;
}
}
}
else if (part.StartsWith("bitrate=", StringComparison.OrdinalIgnoreCase))
{
var rate = part.Split(new[] { '=' }, 2).Last();
int? scale = null;
if (rate.IndexOf("kbits/s", StringComparison.OrdinalIgnoreCase) != -1)
{
scale = 1024;
rate = rate.Replace("kbits/s", string.Empty, StringComparison.OrdinalIgnoreCase);
}
if (scale.HasValue)
{
float val;
if (float.TryParse(rate, NumberStyles.Any, UsCulture, out val))
{
bitRate = (int)Math.Ceiling(val * scale.Value);
}
}
}
}
if (framerate.HasValue || percent.HasValue)
{
ApiEntryPoint.Instance.ReportTranscodingProgress(transcodingJob, state, transcodingPosition, framerate, percent, bytesTranscoded, bitRate);
}
}
private int? GetVideoBitrateParamValue(VideoStreamRequest request, MediaStream videoStream, string outputVideoCodec)
2013-08-30 04:13:58 +02:00
{
var bitrate = request.VideoBitRate;
2014-02-03 21:37:18 +01:00
if (videoStream != null)
2014-02-03 21:37:18 +01:00
{
var isUpscaling = request.Height.HasValue && videoStream.Height.HasValue &&
request.Height.Value > videoStream.Height.Value;
2014-02-03 21:37:18 +01:00
if (request.Width.HasValue && videoStream.Width.HasValue &&
request.Width.Value > videoStream.Width.Value)
2014-02-03 21:37:18 +01:00
{
isUpscaling = true;
}
2014-02-03 21:37:18 +01:00
// Don't allow bitrate increases unless upscaling
if (!isUpscaling)
{
if (bitrate.HasValue && videoStream.BitRate.HasValue)
2014-02-03 21:37:18 +01:00
{
bitrate = Math.Min(bitrate.Value, videoStream.BitRate.Value);
2014-02-03 21:37:18 +01:00
}
}
}
if (bitrate.HasValue)
{
var inputVideoCodec = videoStream == null ? null : videoStream.Codec;
bitrate = ResolutionNormalizer.ScaleBitrate(bitrate.Value, inputVideoCodec, outputVideoCodec);
2016-06-09 18:13:25 +02:00
// If a max bitrate was requested, don't let the scaled bitrate exceed it
if (request.VideoBitRate.HasValue)
{
bitrate = Math.Min(bitrate.Value, request.VideoBitRate.Value);
}
}
2014-02-03 21:37:18 +01:00
return bitrate;
2013-08-30 04:13:58 +02:00
}
2016-02-07 22:48:08 +01:00
protected string GetVideoBitrateParam(StreamState state, string videoCodec)
2014-02-02 15:47:00 +01:00
{
var bitrate = state.OutputVideoBitrate;
2014-02-02 15:47:00 +01:00
if (bitrate.HasValue)
{
if (string.Equals(videoCodec, "libvpx", StringComparison.OrdinalIgnoreCase))
{
// With vpx when crf is used, b:v becomes a max rate
// https://trac.ffmpeg.org/wiki/vpxEncodingGuide. But higher bitrate source files -b:v causes judder so limite the bitrate but dont allow it to "saturate" the bitrate. So dont contrain it down just up.
return string.Format(" -maxrate:v {0} -bufsize:v ({0}*2) -b:v {0}", bitrate.Value.ToString(UsCulture));
2014-02-02 15:47:00 +01:00
}
2014-02-02 15:47:00 +01:00
if (string.Equals(videoCodec, "msmpeg4", StringComparison.OrdinalIgnoreCase))
{
return string.Format(" -b:v {0}", bitrate.Value.ToString(UsCulture));
}
2015-10-05 05:24:24 +02:00
// h264
2016-02-07 22:48:08 +01:00
return string.Format(" -b:v {0} -maxrate {0} -bufsize {1}",
bitrate.Value.ToString(UsCulture),
(bitrate.Value * 2).ToString(UsCulture));
2014-02-02 15:47:00 +01:00
}
return string.Empty;
}
private int? GetAudioBitrateParam(StreamRequest request, MediaStream audioStream)
2013-08-30 04:13:58 +02:00
{
if (request.AudioBitRate.HasValue)
2013-08-30 04:13:58 +02:00
{
// Make sure we don't request a bitrate higher than the source
var currentBitrate = audioStream == null ? request.AudioBitRate.Value : audioStream.BitRate ?? request.AudioBitRate.Value;
2013-08-30 04:13:58 +02:00
2016-09-20 17:22:53 +02:00
// Don't encode any higher than this
return Math.Min(384000, request.AudioBitRate.Value);
2014-06-24 23:45:21 +02:00
//return Math.Min(currentBitrate, request.AudioBitRate.Value);
2013-08-30 04:13:58 +02:00
}
return null;
}
2013-05-01 22:07:20 +02:00
/// <summary>
/// Gets the user agent param.
/// </summary>
2014-06-02 21:32:41 +02:00
/// <param name="state">The state.</param>
2013-05-01 22:07:20 +02:00
/// <returns>System.String.</returns>
2014-06-02 21:32:41 +02:00
private string GetUserAgentParam(StreamState state)
2013-05-17 20:05:49 +02:00
{
2014-06-04 05:34:36 +02:00
string useragent = null;
2014-06-04 21:40:05 +02:00
state.RemoteHttpHeaders.TryGetValue("User-Agent", out useragent);
2013-05-17 20:05:49 +02:00
2014-06-02 21:32:41 +02:00
if (!string.IsNullOrWhiteSpace(useragent))
2013-05-17 20:05:49 +02:00
{
return "-user-agent \"" + useragent + "\"";
}
return string.Empty;
}
2013-02-27 05:19:05 +01:00
/// <summary>
/// Processes the exited.
/// </summary>
/// <param name="process">The process.</param>
2014-09-03 04:30:24 +02:00
/// <param name="job">The job.</param>
2013-02-27 05:19:05 +01:00
/// <param name="state">The state.</param>
2014-09-03 04:30:24 +02:00
private void OnFfMpegProcessExited(Process process, TranscodingJob job, StreamState state)
2013-02-27 05:19:05 +01:00
{
2014-06-26 19:04:11 +02:00
if (job != null)
{
job.HasExited = true;
}
2014-06-20 06:50:30 +02:00
Logger.Debug("Disposing stream resources");
2014-04-05 17:02:50 +02:00
state.Dispose();
2014-01-03 05:58:22 +01:00
2013-02-27 05:19:05 +01:00
try
{
Logger.Info("FFMpeg exited with code {0}", process.ExitCode);
2013-02-27 05:19:05 +01:00
}
catch
{
2014-06-20 06:50:30 +02:00
Logger.Error("FFMpeg exited with an error.");
2013-02-27 05:19:05 +01:00
}
2014-06-20 06:50:30 +02:00
// This causes on exited to be called twice:
//try
//{
// // Dispose the process
// process.Dispose();
//}
//catch (Exception ex)
//{
// Logger.ErrorException("Error disposing ffmpeg.", ex);
//}
2013-02-27 05:19:05 +01:00
}
2014-01-24 19:09:50 +01:00
protected double? GetFramerateParam(StreamState state)
{
2014-03-24 18:54:45 +01:00
if (state.VideoRequest != null)
2014-01-24 19:09:50 +01:00
{
2014-03-24 18:54:45 +01:00
if (state.VideoRequest.Framerate.HasValue)
{
return state.VideoRequest.Framerate.Value;
}
2014-01-24 19:09:50 +01:00
2014-08-24 17:48:06 +02:00
var maxrate = state.VideoRequest.MaxFramerate;
2014-01-24 19:09:50 +01:00
2014-08-24 17:48:06 +02:00
if (maxrate.HasValue && state.VideoStream != null)
2014-01-24 19:09:50 +01:00
{
2014-03-24 18:54:45 +01:00
var contentRate = state.VideoStream.AverageFrameRate ?? state.VideoStream.RealFrameRate;
2014-08-24 17:48:06 +02:00
if (contentRate.HasValue && contentRate.Value > maxrate.Value)
2014-03-24 18:54:45 +01:00
{
return maxrate;
}
2014-01-24 19:09:50 +01:00
}
}
return null;
}
2014-02-13 06:11:54 +01:00
/// <summary>
/// Parses the parameters.
/// </summary>
/// <param name="request">The request.</param>
private void ParseParams(StreamRequest request)
{
var vals = request.Params.Split(';');
var videoRequest = request as VideoStreamRequest;
for (var i = 0; i < vals.Length; i++)
{
var val = vals[i];
if (string.IsNullOrWhiteSpace(val))
{
continue;
}
if (i == 0)
{
2014-03-26 16:06:48 +01:00
request.DeviceProfileId = val;
}
2014-03-25 06:25:03 +01:00
else if (i == 1)
2014-03-23 06:10:33 +01:00
{
2014-03-26 16:06:48 +01:00
request.DeviceId = val;
2014-03-23 06:10:33 +01:00
}
2014-03-25 06:25:03 +01:00
else if (i == 2)
2014-03-23 18:18:24 +01:00
{
2014-03-26 16:06:48 +01:00
request.MediaSourceId = val;
2014-03-23 18:18:24 +01:00
}
2014-03-25 06:25:03 +01:00
else if (i == 3)
2014-03-26 16:06:48 +01:00
{
request.Static = string.Equals("true", val, StringComparison.OrdinalIgnoreCase);
}
else if (i == 4)
2014-02-13 06:11:54 +01:00
{
if (videoRequest != null)
{
2014-03-23 21:07:02 +01:00
videoRequest.VideoCodec = val;
2014-02-13 06:11:54 +01:00
}
}
2014-03-26 16:06:48 +01:00
else if (i == 5)
2014-02-13 06:11:54 +01:00
{
2014-03-23 21:07:02 +01:00
request.AudioCodec = val;
2014-02-13 06:11:54 +01:00
}
2014-03-26 16:06:48 +01:00
else if (i == 6)
2014-02-13 06:11:54 +01:00
{
if (videoRequest != null)
{
videoRequest.AudioStreamIndex = int.Parse(val, UsCulture);
}
}
2014-03-26 16:06:48 +01:00
else if (i == 7)
2014-02-13 06:11:54 +01:00
{
if (videoRequest != null)
{
videoRequest.SubtitleStreamIndex = int.Parse(val, UsCulture);
}
}
2014-03-26 16:06:48 +01:00
else if (i == 8)
2014-02-13 06:11:54 +01:00
{
if (videoRequest != null)
{
videoRequest.VideoBitRate = int.Parse(val, UsCulture);
}
}
2014-03-26 16:06:48 +01:00
else if (i == 9)
2014-02-13 06:11:54 +01:00
{
request.AudioBitRate = int.Parse(val, UsCulture);
}
2014-03-26 16:06:48 +01:00
else if (i == 10)
2014-02-13 06:11:54 +01:00
{
2014-03-23 06:10:33 +01:00
request.MaxAudioChannels = int.Parse(val, UsCulture);
2014-02-13 06:11:54 +01:00
}
2014-03-26 16:06:48 +01:00
else if (i == 11)
{
if (videoRequest != null)
{
2014-06-23 18:05:19 +02:00
videoRequest.MaxFramerate = float.Parse(val, UsCulture);
}
}
2014-03-26 16:06:48 +01:00
else if (i == 12)
{
if (videoRequest != null)
{
2014-03-28 05:24:11 +01:00
videoRequest.MaxWidth = int.Parse(val, UsCulture);
}
}
2014-03-26 16:06:48 +01:00
else if (i == 13)
2014-03-24 18:54:45 +01:00
{
if (videoRequest != null)
{
2014-03-28 05:24:11 +01:00
videoRequest.MaxHeight = int.Parse(val, UsCulture);
2014-03-24 18:54:45 +01:00
}
}
2014-03-26 16:06:48 +01:00
else if (i == 14)
2014-03-24 18:54:45 +01:00
{
2014-03-30 21:05:10 +02:00
request.StartTimeTicks = long.Parse(val, UsCulture);
2014-03-24 18:54:45 +01:00
}
2014-03-26 16:06:48 +01:00
else if (i == 15)
2014-03-28 05:24:11 +01:00
{
if (videoRequest != null)
{
videoRequest.Level = val;
}
}
2014-09-18 06:50:21 +02:00
else if (i == 16)
2014-09-23 06:05:29 +02:00
{
if (videoRequest != null)
{
videoRequest.MaxRefFrames = int.Parse(val, UsCulture);
}
}
2015-11-27 05:33:20 +01:00
else if (i == 17)
2014-09-23 06:05:29 +02:00
{
if (videoRequest != null)
{
videoRequest.MaxVideoBitDepth = int.Parse(val, UsCulture);
}
}
2015-11-27 05:33:20 +01:00
else if (i == 18)
2014-10-10 00:22:04 +02:00
{
if (videoRequest != null)
{
videoRequest.Profile = val;
}
}
2015-11-27 05:33:20 +01:00
else if (i == 19)
2014-11-12 05:51:40 +01:00
{
2016-04-04 02:01:03 +02:00
// cabac no longer used
2014-11-12 05:51:40 +01:00
}
2015-11-27 05:33:20 +01:00
else if (i == 20)
2015-03-23 18:19:21 +01:00
{
2015-03-29 20:31:28 +02:00
request.PlaySessionId = val;
2015-03-23 18:19:21 +01:00
}
2015-11-27 05:33:20 +01:00
else if (i == 21)
2015-04-12 18:46:29 +02:00
{
// api_key
}
2015-11-27 05:33:20 +01:00
else if (i == 22)
2015-03-29 20:16:40 +02:00
{
request.LiveStreamId = val;
}
2015-11-27 05:33:20 +01:00
else if (i == 23)
2015-04-24 22:06:37 +02:00
{
// Duplicating ItemId because of MediaMonkey
}
else if (i == 24)
{
if (videoRequest != null)
{
videoRequest.CopyTimestamps = string.Equals("true", val, StringComparison.OrdinalIgnoreCase);
}
}
2016-03-07 05:56:45 +01:00
else if (i == 25)
{
if (!string.IsNullOrWhiteSpace(val) && videoRequest != null)
{
SubtitleDeliveryMethod method;
if (Enum.TryParse(val, out method))
{
videoRequest.SubtitleMethod = method;
}
}
}
2016-08-23 07:08:07 +02:00
else if (i == 26)
2016-05-14 07:40:01 +02:00
{
request.TranscodingMaxAudioChannels = int.Parse(val, UsCulture);
}
2016-08-23 07:08:07 +02:00
else if (i == 27)
2016-07-13 21:16:51 +02:00
{
if (videoRequest != null)
{
videoRequest.EnableSubtitlesInManifest = string.Equals("true", val, StringComparison.OrdinalIgnoreCase);
}
}
2016-08-23 07:08:07 +02:00
else if (i == 28)
{
request.Tag = val;
}
2016-10-16 19:11:32 +02:00
else if (i == 29)
{
if (videoRequest != null)
{
videoRequest.EnableSplittingOnNonKeyFrames = string.Equals("true", val, StringComparison.OrdinalIgnoreCase);
}
}
2014-02-13 06:11:54 +01:00
}
}
2014-04-02 00:23:07 +02:00
/// <summary>
/// Parses the dlna headers.
/// </summary>
/// <param name="request">The request.</param>
private void ParseDlnaHeaders(StreamRequest request)
{
if (!request.StartTimeTicks.HasValue)
{
var timeSeek = GetHeader("TimeSeekRange.dlna.org");
request.StartTimeTicks = ParseTimeSeekHeader(timeSeek);
}
}
/// <summary>
/// Parses the time seek header.
/// </summary>
private long? ParseTimeSeekHeader(string value)
{
if (string.IsNullOrWhiteSpace(value))
{
return null;
}
if (value.IndexOf("npt=", StringComparison.OrdinalIgnoreCase) != 0)
{
throw new ArgumentException("Invalid timeseek header");
}
value = value.Substring(4).Split(new[] { '-' }, 2)[0];
if (value.IndexOf(':') == -1)
{
// Parses npt times in the format of '417.33'
double seconds;
if (double.TryParse(value, NumberStyles.Any, UsCulture, out seconds))
{
return TimeSpan.FromSeconds(seconds).Ticks;
}
throw new ArgumentException("Invalid timeseek header");
}
// Parses npt times in the format of '10:19:25.7'
var tokens = value.Split(new[] { ':' }, 3);
double secondsSum = 0;
var timeFactor = 3600;
foreach (var time in tokens)
{
double digit;
if (double.TryParse(time, NumberStyles.Any, UsCulture, out digit))
{
2016-03-27 23:11:27 +02:00
secondsSum += digit * timeFactor;
2014-04-02 00:23:07 +02:00
}
else
{
throw new ArgumentException("Invalid timeseek header");
}
timeFactor /= 60;
}
return TimeSpan.FromSeconds(secondsSum).Ticks;
}
2013-02-27 05:19:05 +01:00
/// <summary>
/// Gets the state.
/// </summary>
/// <param name="request">The request.</param>
2013-12-21 19:37:34 +01:00
/// <param name="cancellationToken">The cancellation token.</param>
2013-02-27 05:19:05 +01:00
/// <returns>StreamState.</returns>
2013-12-21 19:37:34 +01:00
protected async Task<StreamState> GetState(StreamRequest request, CancellationToken cancellationToken)
2013-02-27 05:19:05 +01:00
{
2014-04-02 00:23:07 +02:00
ParseDlnaHeaders(request);
2014-02-13 06:11:54 +01:00
if (!string.IsNullOrWhiteSpace(request.Params))
{
ParseParams(request);
}
2013-12-07 16:52:38 +01:00
var url = Request.PathInfo;
2013-02-27 05:44:41 +01:00
2014-03-23 21:07:02 +01:00
if (string.IsNullOrEmpty(request.AudioCodec))
2013-02-27 05:44:41 +01:00
{
request.AudioCodec = InferAudioCodec(url);
}
2015-03-28 21:22:27 +01:00
var state = new StreamState(MediaSourceManager, Logger)
2013-02-27 05:19:05 +01:00
{
Request = request,
2016-06-30 23:26:43 +02:00
RequestedUrl = url,
UserAgent = Request.UserAgent
2013-02-27 05:19:05 +01:00
};
2016-01-03 20:01:43 +01:00
//if ((Request.UserAgent ?? string.Empty).IndexOf("iphone", StringComparison.OrdinalIgnoreCase) != -1 ||
// (Request.UserAgent ?? string.Empty).IndexOf("ipad", StringComparison.OrdinalIgnoreCase) != -1 ||
// (Request.UserAgent ?? string.Empty).IndexOf("ipod", StringComparison.OrdinalIgnoreCase) != -1)
//{
// state.SegmentLength = 6;
//}
2015-12-29 17:13:43 +01:00
2016-09-05 07:39:14 +02:00
if (state.VideoRequest != null)
{
if (!string.IsNullOrWhiteSpace(state.VideoRequest.VideoCodec))
{
state.SupportedVideoCodecs = state.VideoRequest.VideoCodec.Split(',').Where(i => !string.IsNullOrWhiteSpace(i)).ToList();
2016-09-05 22:07:36 +02:00
state.VideoRequest.VideoCodec = state.SupportedVideoCodecs.FirstOrDefault();
2016-09-05 07:39:14 +02:00
}
}
2014-04-10 17:06:54 +02:00
if (!string.IsNullOrWhiteSpace(request.AudioCodec))
{
state.SupportedAudioCodecs = request.AudioCodec.Split(',').Where(i => !string.IsNullOrWhiteSpace(i)).ToList();
2016-06-30 20:59:18 +02:00
state.Request.AudioCodec = state.SupportedAudioCodecs.FirstOrDefault(i => MediaEncoder.CanEncodeToAudioCodec(i))
2016-06-26 18:21:10 +02:00
?? state.SupportedAudioCodecs.FirstOrDefault();
2014-04-10 17:06:54 +02:00
}
2014-06-02 21:32:41 +02:00
var item = LibraryManager.GetItemById(request.Id);
2013-12-21 19:37:34 +01:00
2015-03-28 21:22:27 +01:00
state.IsInputVideo = string.Equals(item.MediaType, MediaType.Video, StringComparison.OrdinalIgnoreCase);
2014-05-18 21:58:42 +02:00
2016-09-18 22:38:38 +02:00
MediaSourceInfo mediaSource = null;
2015-03-29 06:56:39 +02:00
if (string.IsNullOrWhiteSpace(request.LiveStreamId))
{
2016-09-29 14:55:49 +02:00
TranscodingJob currentJob = !string.IsNullOrWhiteSpace(request.PlaySessionId) ?
ApiEntryPoint.Instance.GetTranscodingJob(request.PlaySessionId)
: null;
if (currentJob != null)
{
mediaSource = currentJob.MediaSource;
}
2015-04-07 19:51:14 +02:00
2016-09-18 22:38:38 +02:00
if (mediaSource == null)
2015-04-07 19:51:14 +02:00
{
2016-09-18 22:38:38 +02:00
var mediaSources = (await MediaSourceManager.GetPlayackMediaSources(request.Id, null, false, new[] { MediaType.Audio, MediaType.Video }, cancellationToken).ConfigureAwait(false)).ToList();
mediaSource = string.IsNullOrEmpty(request.MediaSourceId)
? mediaSources.First()
: mediaSources.FirstOrDefault(i => string.Equals(i.Id, request.MediaSourceId));
if (mediaSource == null && string.Equals(request.Id, request.MediaSourceId, StringComparison.OrdinalIgnoreCase))
{
mediaSource = mediaSources.First();
}
2015-04-07 19:51:14 +02:00
}
2015-03-29 06:56:39 +02:00
}
else
{
2016-10-05 09:15:29 +02:00
var liveStreamInfo = await MediaSourceManager.GetLiveStreamWithDirectStreamProvider(request.LiveStreamId, cancellationToken).ConfigureAwait(false);
mediaSource = liveStreamInfo.Item1;
state.DirectStreamProvider = liveStreamInfo.Item2;
2015-03-29 06:56:39 +02:00
}
2013-12-19 22:51:32 +01:00
2014-06-11 21:31:33 +02:00
var videoRequest = request as VideoStreamRequest;
2013-12-06 04:39:44 +01:00
2015-03-28 21:22:27 +01:00
AttachMediaSourceInfo(state, mediaSource, videoRequest, url);
2013-03-11 04:44:22 +01:00
var container = Path.GetExtension(state.RequestedUrl);
if (string.IsNullOrEmpty(container))
{
2014-12-20 07:06:27 +01:00
container = request.Static ?
2014-10-16 05:26:39 +02:00
state.InputContainer :
(Path.GetExtension(GetOutputFilePath(state)) ?? string.Empty).TrimStart('.');
}
state.OutputContainer = (container ?? string.Empty).TrimStart('.');
state.OutputAudioBitrate = GetAudioBitrateParam(state.Request, state.AudioStream);
state.OutputAudioSampleRate = request.AudioSampleRate;
2015-06-03 05:15:46 +02:00
state.OutputAudioCodec = state.Request.AudioCodec;
2014-06-06 06:56:47 +02:00
state.OutputAudioChannels = GetNumAudioChannelsParam(state.Request, state.AudioStream, state.OutputAudioCodec);
2014-06-10 19:36:06 +02:00
2014-04-09 04:12:17 +02:00
if (videoRequest != null)
2014-04-02 06:10:46 +02:00
{
2015-06-03 05:15:46 +02:00
state.OutputVideoCodec = state.VideoRequest.VideoCodec;
state.OutputVideoBitrate = GetVideoBitrateParamValue(state.VideoRequest, state.VideoStream, state.OutputVideoCodec);
2014-10-07 01:58:46 +02:00
if (state.OutputVideoBitrate.HasValue)
{
var resolution = ResolutionNormalizer.Normalize(
2016-04-04 07:07:10 +02:00
state.VideoStream == null ? (int?)null : state.VideoStream.BitRate,
state.OutputVideoBitrate.Value,
state.VideoStream == null ? null : state.VideoStream.Codec,
2014-10-07 01:58:46 +02:00
state.OutputVideoCodec,
videoRequest.MaxWidth,
videoRequest.MaxHeight);
videoRequest.MaxWidth = resolution.MaxWidth;
videoRequest.MaxHeight = resolution.MaxHeight;
}
2014-06-23 18:05:19 +02:00
}
ApplyDeviceProfileSettings(state);
2014-06-23 18:05:19 +02:00
if (videoRequest != null)
{
2015-03-28 21:22:27 +01:00
TryStreamCopy(state, videoRequest);
2014-04-06 19:53:23 +02:00
}
state.OutputFilePath = GetOutputFilePath(state);
2013-03-09 03:34:54 +01:00
return state;
2013-02-27 05:19:05 +01:00
}
2013-02-27 05:44:41 +01:00
2015-03-28 21:22:27 +01:00
private void TryStreamCopy(StreamState state, VideoStreamRequest videoRequest)
{
2016-04-04 06:18:36 +02:00
if (state.VideoStream != null && CanStreamCopyVideo(state))
2015-03-28 21:22:27 +01:00
{
state.OutputVideoCodec = "copy";
}
else
{
// If the user doesn't have access to transcoding, then force stream copy, regardless of whether it will be compatible or not
var auth = AuthorizationContext.GetAuthorizationInfo(Request);
if (!string.IsNullOrWhiteSpace(auth.UserId))
{
var user = UserManager.GetUserById(auth.UserId);
if (!user.Policy.EnableVideoPlaybackTranscoding)
{
state.OutputVideoCodec = "copy";
}
}
}
2015-03-28 21:22:27 +01:00
2016-04-04 07:07:10 +02:00
if (state.AudioStream != null && CanStreamCopyAudio(state, state.SupportedAudioCodecs))
2015-03-28 21:22:27 +01:00
{
state.OutputAudioCodec = "copy";
}
else
{
// If the user doesn't have access to transcoding, then force stream copy, regardless of whether it will be compatible or not
var auth = AuthorizationContext.GetAuthorizationInfo(Request);
if (!string.IsNullOrWhiteSpace(auth.UserId))
{
var user = UserManager.GetUserById(auth.UserId);
if (!user.Policy.EnableAudioPlaybackTranscoding)
{
state.OutputAudioCodec = "copy";
}
}
}
2015-03-28 21:22:27 +01:00
}
private void AttachMediaSourceInfo(StreamState state,
2015-03-20 06:40:51 +01:00
MediaSourceInfo mediaSource,
2014-10-15 06:11:40 +02:00
VideoStreamRequest videoRequest,
string requestedUrl)
{
2015-03-28 21:22:27 +01:00
state.MediaPath = mediaSource.Path;
state.InputProtocol = mediaSource.Protocol;
state.InputContainer = mediaSource.Container;
state.InputFileSize = mediaSource.Size;
state.InputBitrate = mediaSource.Bitrate;
state.RunTimeTicks = mediaSource.RunTimeTicks;
state.RemoteHttpHeaders = mediaSource.RequiredHttpHeaders;
2015-03-29 06:56:39 +02:00
2015-03-28 21:22:27 +01:00
if (mediaSource.VideoType.HasValue)
{
state.VideoType = mediaSource.VideoType.Value;
}
state.IsoType = mediaSource.IsoType;
state.PlayableStreamFileNames = mediaSource.PlayableStreamFileNames.ToList();
if (mediaSource.Timestamp.HasValue)
{
state.InputTimestamp = mediaSource.Timestamp.Value;
}
2015-03-29 06:56:39 +02:00
2014-10-15 06:11:40 +02:00
state.InputProtocol = mediaSource.Protocol;
state.MediaPath = mediaSource.Path;
state.RunTimeTicks = mediaSource.RunTimeTicks;
state.RemoteHttpHeaders = mediaSource.RequiredHttpHeaders;
state.InputBitrate = mediaSource.Bitrate;
state.InputFileSize = mediaSource.Size;
2014-10-25 20:32:58 +02:00
state.ReadInputAtNativeFramerate = mediaSource.ReadAtNativeFramerate;
2014-10-15 06:11:40 +02:00
2015-03-28 21:22:27 +01:00
if (state.ReadInputAtNativeFramerate ||
mediaSource.Protocol == MediaProtocol.File && string.Equals(mediaSource.Container, "wtv", StringComparison.OrdinalIgnoreCase))
2014-12-19 05:20:07 +01:00
{
state.OutputAudioSync = "1000";
state.InputVideoSync = "-1";
state.InputAudioSync = "1";
}
2015-06-04 22:27:46 +02:00
if (string.Equals(mediaSource.Container, "wma", StringComparison.OrdinalIgnoreCase))
{
// Seeing some stuttering when transcoding wma to audio-only HLS
state.InputAudioSync = "1";
}
2015-03-28 21:22:27 +01:00
var mediaStreams = mediaSource.MediaStreams;
2014-12-20 07:06:27 +01:00
2014-06-01 21:41:35 +02:00
if (videoRequest != null)
{
if (string.IsNullOrEmpty(videoRequest.VideoCodec))
{
videoRequest.VideoCodec = InferVideoCodec(requestedUrl);
}
state.VideoStream = GetMediaStream(mediaStreams, videoRequest.VideoStreamIndex, MediaStreamType.Video);
state.SubtitleStream = GetMediaStream(mediaStreams, videoRequest.SubtitleStreamIndex, MediaStreamType.Subtitle, false);
state.AudioStream = GetMediaStream(mediaStreams, videoRequest.AudioStreamIndex, MediaStreamType.Audio);
2014-06-12 04:38:40 +02:00
if (state.SubtitleStream != null && !state.SubtitleStream.IsExternal)
{
state.InternalSubtitleStreamOffset = mediaStreams.Where(i => i.Type == MediaStreamType.Subtitle && !i.IsExternal).ToList().IndexOf(state.SubtitleStream);
}
2014-06-01 21:41:35 +02:00
if (state.VideoStream != null && state.VideoStream.IsInterlaced)
{
state.DeInterlace = true;
}
EnforceResolutionLimit(state, videoRequest);
}
else
{
state.AudioStream = GetMediaStream(mediaStreams, null, MediaStreamType.Audio, true);
}
2015-03-28 21:22:27 +01:00
state.MediaSource = mediaSource;
2014-06-01 21:41:35 +02:00
}
2016-04-04 06:18:36 +02:00
protected virtual bool CanStreamCopyVideo(StreamState state)
2014-04-02 06:10:46 +02:00
{
2016-04-04 06:18:36 +02:00
var request = state.VideoRequest;
var videoStream = state.VideoStream;
2014-04-02 06:10:46 +02:00
if (videoStream.IsInterlaced)
{
return false;
}
2015-08-10 19:37:50 +02:00
if (videoStream.IsAnamorphic ?? false)
{
return false;
}
2016-04-04 07:07:10 +02:00
2014-06-20 07:31:32 +02:00
// Can't stream copy if we're burning in subtitles
if (request.SubtitleStreamIndex.HasValue)
{
2014-08-06 04:26:12 +02:00
if (request.SubtitleMethod == SubtitleDeliveryMethod.Encode)
{
return false;
}
2014-06-20 07:31:32 +02:00
}
2016-04-27 21:23:05 +02:00
if (string.Equals("h264", videoStream.Codec, StringComparison.OrdinalIgnoreCase))
{
if (videoStream.IsAVC.HasValue && !videoStream.IsAVC.Value)
{
Logger.Debug("Cannot stream copy video. Stream is marked as not AVC");
return false;
}
}
2014-04-02 06:10:46 +02:00
// Source and target codecs must match
2016-09-05 07:39:14 +02:00
if (string.IsNullOrEmpty(videoStream.Codec) || !state.SupportedVideoCodecs.Contains(videoStream.Codec, StringComparer.OrdinalIgnoreCase))
2014-04-02 06:10:46 +02:00
{
return false;
}
// If client is requesting a specific video profile, it must match the source
2014-10-20 05:04:45 +02:00
if (!string.IsNullOrEmpty(request.Profile))
2014-04-02 06:10:46 +02:00
{
2014-10-20 05:04:45 +02:00
if (string.IsNullOrEmpty(videoStream.Profile))
{
2016-04-04 02:01:03 +02:00
//return false;
2014-10-20 05:04:45 +02:00
}
2016-04-04 02:01:03 +02:00
if (!string.IsNullOrEmpty(videoStream.Profile) && !string.Equals(request.Profile, videoStream.Profile, StringComparison.OrdinalIgnoreCase))
2014-10-20 05:04:45 +02:00
{
var currentScore = GetVideoProfileScore(videoStream.Profile);
var requestedScore = GetVideoProfileScore(request.Profile);
if (currentScore == -1 || currentScore > requestedScore)
{
return false;
}
}
2014-04-02 06:10:46 +02:00
}
// Video width must fall within requested value
if (request.MaxWidth.HasValue)
{
if (!videoStream.Width.HasValue || videoStream.Width.Value > request.MaxWidth.Value)
{
return false;
}
}
// Video height must fall within requested value
if (request.MaxHeight.HasValue)
{
if (!videoStream.Height.HasValue || videoStream.Height.Value > request.MaxHeight.Value)
{
return false;
}
}
// Video framerate must fall within requested value
var requestedFramerate = request.MaxFramerate ?? request.Framerate;
if (requestedFramerate.HasValue)
{
var videoFrameRate = videoStream.AverageFrameRate ?? videoStream.RealFrameRate;
if (!videoFrameRate.HasValue || videoFrameRate.Value > requestedFramerate.Value)
{
return false;
}
}
// Video bitrate must fall within requested value
if (request.VideoBitRate.HasValue)
{
if (!videoStream.BitRate.HasValue || videoStream.BitRate.Value > request.VideoBitRate.Value)
{
return false;
}
}
2014-09-23 06:05:29 +02:00
if (request.MaxVideoBitDepth.HasValue)
{
if (videoStream.BitDepth.HasValue && videoStream.BitDepth.Value > request.MaxVideoBitDepth.Value)
{
return false;
}
}
if (request.MaxRefFrames.HasValue)
{
if (videoStream.RefFrames.HasValue && videoStream.RefFrames.Value > request.MaxRefFrames.Value)
{
return false;
}
}
2014-04-02 06:10:46 +02:00
// If a specific level was requested, the source must match or be less than
if (!string.IsNullOrEmpty(request.Level))
{
double requestLevel;
if (double.TryParse(request.Level, NumberStyles.Any, UsCulture, out requestLevel))
{
if (!videoStream.Level.HasValue)
{
2016-04-04 02:01:03 +02:00
//return false;
2014-04-02 06:10:46 +02:00
}
2016-04-04 02:01:03 +02:00
if (videoStream.Level.HasValue && videoStream.Level.Value > requestLevel)
2014-04-02 06:10:46 +02:00
{
return false;
}
}
}
return request.EnableAutoStreamCopy;
2014-04-02 06:10:46 +02:00
}
2014-10-20 05:04:45 +02:00
private int GetVideoProfileScore(string profile)
{
var list = new List<string>
{
"Constrained Baseline",
"Baseline",
"Extended",
"Main",
"High",
"Progressive High",
"Constrained High"
};
return Array.FindIndex(list.ToArray(), t => string.Equals(t, profile, StringComparison.OrdinalIgnoreCase));
}
2016-04-04 07:07:10 +02:00
protected virtual bool CanStreamCopyAudio(StreamState state, List<string> supportedAudioCodecs)
2014-04-06 19:53:23 +02:00
{
2016-04-04 07:07:10 +02:00
var request = state.VideoRequest;
var audioStream = state.AudioStream;
2014-04-06 19:53:23 +02:00
// Source and target codecs must match
2014-04-10 17:06:54 +02:00
if (string.IsNullOrEmpty(audioStream.Codec) || !supportedAudioCodecs.Contains(audioStream.Codec, StringComparer.OrdinalIgnoreCase))
2014-04-06 19:53:23 +02:00
{
return false;
}
// Video bitrate must fall within requested value
if (request.AudioBitRate.HasValue)
{
2014-05-30 23:06:57 +02:00
if (!audioStream.BitRate.HasValue || audioStream.BitRate.Value <= 0)
{
return false;
}
if (audioStream.BitRate.Value > request.AudioBitRate.Value)
2014-04-06 19:53:23 +02:00
{
return false;
}
}
// Channels must fall within requested value
var channels = request.AudioChannels ?? request.MaxAudioChannels;
if (channels.HasValue)
{
2014-05-30 23:06:57 +02:00
if (!audioStream.Channels.HasValue || audioStream.Channels.Value <= 0)
{
return false;
}
if (audioStream.Channels.Value > channels.Value)
2014-04-06 19:53:23 +02:00
{
return false;
}
}
// Sample rate must fall within requested value
if (request.AudioSampleRate.HasValue)
{
2014-05-30 23:06:57 +02:00
if (!audioStream.SampleRate.HasValue || audioStream.SampleRate.Value <= 0)
{
return false;
}
if (audioStream.SampleRate.Value > request.AudioSampleRate.Value)
2014-04-06 19:53:23 +02:00
{
return false;
}
}
2014-10-04 03:42:38 +02:00
return request.EnableAutoStreamCopy;
2014-04-06 19:53:23 +02:00
}
2014-04-08 06:17:18 +02:00
2014-03-25 06:25:03 +01:00
private void ApplyDeviceProfileSettings(StreamState state)
{
2014-05-18 23:23:03 +02:00
var headers = new Dictionary<string, string>();
foreach (var key in Request.Headers.AllKeys)
{
headers[key] = Request.Headers[key];
}
2015-01-20 06:19:13 +01:00
if (!string.IsNullOrWhiteSpace(state.Request.DeviceProfileId))
{
state.DeviceProfile = DlnaManager.GetProfile(state.Request.DeviceProfileId);
}
else
{
if (!string.IsNullOrWhiteSpace(state.Request.DeviceId))
{
var caps = DeviceManager.GetCapabilities(state.Request.DeviceId);
if (caps != null)
{
state.DeviceProfile = caps.DeviceProfile;
}
else
{
state.DeviceProfile = DlnaManager.GetProfile(headers);
}
}
}
2014-05-18 23:23:03 +02:00
2014-04-23 04:47:46 +02:00
var profile = state.DeviceProfile;
2014-03-26 16:17:36 +01:00
if (profile == null)
{
// Don't use settings from the default profile.
// Only use a specific profile if it was requested.
return;
}
2014-03-25 06:25:03 +01:00
2014-10-20 05:04:45 +02:00
var audioCodec = state.ActualOutputAudioCodec;
var videoCodec = state.ActualOutputVideoCodec;
2014-03-25 06:25:03 +01:00
var mediaProfile = state.VideoRequest == null ?
2014-04-24 07:08:10 +02:00
profile.GetAudioMediaProfile(state.OutputContainer, audioCodec, state.OutputAudioChannels, state.OutputAudioBitrate) :
profile.GetVideoMediaProfile(state.OutputContainer,
audioCodec,
2014-04-24 07:08:10 +02:00
videoCodec,
state.OutputWidth,
state.OutputHeight,
state.TargetVideoBitDepth,
state.OutputVideoBitrate,
state.TargetVideoProfile,
state.TargetVideoLevel,
state.TargetFramerate,
state.TargetPacketLength,
2014-06-22 18:25:47 +02:00
state.TargetTimestamp,
2014-09-09 03:15:31 +02:00
state.IsTargetAnamorphic,
state.TargetRefFrames,
state.TargetVideoStreamCount,
2015-10-19 18:05:03 +02:00
state.TargetAudioStreamCount,
2016-10-03 08:28:45 +02:00
state.TargetVideoCodecTag,
state.IsTargetAVC);
2014-03-25 06:25:03 +01:00
if (mediaProfile != null)
{
state.MimeType = mediaProfile.MimeType;
}
2015-06-03 05:15:46 +02:00
if (!state.Request.Static)
2014-03-25 06:25:03 +01:00
{
2015-06-03 05:15:46 +02:00
var transcodingProfile = state.VideoRequest == null ?
profile.GetAudioTranscodingProfile(state.OutputContainer, audioCodec) :
profile.GetVideoTranscodingProfile(state.OutputContainer, audioCodec, videoCodec);
if (transcodingProfile != null)
{
state.EstimateContentLength = transcodingProfile.EstimateContentLength;
state.EnableMpegtsM2TsMode = transcodingProfile.EnableMpegtsM2TsMode;
state.TranscodeSeekInfo = transcodingProfile.TranscodeSeekInfo;
if (state.VideoRequest != null)
{
state.VideoRequest.CopyTimestamps = transcodingProfile.CopyTimestamps;
2016-07-13 21:16:51 +02:00
state.VideoRequest.EnableSubtitlesInManifest = transcodingProfile.EnableSubtitlesInManifest;
2016-10-16 19:11:32 +02:00
state.VideoRequest.EnableSplittingOnNonKeyFrames = transcodingProfile.EnableSplittingOnNonKeyFrames;
}
2015-06-03 05:15:46 +02:00
}
2014-03-25 06:25:03 +01:00
}
}
2016-08-19 02:10:10 +02:00
private async void ReportUsage(StreamState state)
{
try
{
await ReportUsageInternal(state).ConfigureAwait(false);
}
catch
{
}
}
private Task ReportUsageInternal(StreamState state)
{
if (!ServerConfigurationManager.Configuration.EnableAnonymousUsageReporting)
{
return Task.FromResult(true);
}
2016-08-24 08:13:15 +02:00
if (!MediaEncoder.IsDefaultEncoderPath)
2016-08-19 02:10:10 +02:00
{
return Task.FromResult(true);
}
var dict = new Dictionary<string, string>();
var outputAudio = GetAudioEncoder(state);
if (!string.IsNullOrWhiteSpace(outputAudio))
{
dict["outputAudio"] = outputAudio;
}
var outputVideo = GetVideoEncoder(state);
if (!string.IsNullOrWhiteSpace(outputVideo))
{
dict["outputVideo"] = outputVideo;
}
if (ServerConfigurationManager.Configuration.CodecsUsed.Contains(outputAudio ?? string.Empty, StringComparer.OrdinalIgnoreCase) &&
ServerConfigurationManager.Configuration.CodecsUsed.Contains(outputVideo ?? string.Empty, StringComparer.OrdinalIgnoreCase))
{
return Task.FromResult(true);
}
dict["id"] = AppHost.SystemId;
dict["type"] = state.VideoRequest == null ? "Audio" : "Video";
var audioStream = state.AudioStream;
if (audioStream != null && !string.IsNullOrWhiteSpace(audioStream.Codec))
{
dict["inputAudio"] = audioStream.Codec;
}
var videoStream = state.VideoStream;
if (videoStream != null && !string.IsNullOrWhiteSpace(videoStream.Codec))
{
dict["inputVideo"] = videoStream.Codec;
}
var cert = GetType().Assembly.GetModules().First().GetSignerCertificate();
if (cert != null)
{
dict["assemblySig"] = cert.GetCertHashString();
dict["certSubject"] = cert.Subject ?? string.Empty;
dict["certIssuer"] = cert.Issuer ?? string.Empty;
}
else
{
return Task.FromResult(true);
}
if (state.SupportedAudioCodecs.Count > 0)
{
dict["supportedAudioCodecs"] = string.Join(",", state.SupportedAudioCodecs.ToArray());
}
var auth = AuthorizationContext.GetAuthorizationInfo(Request);
dict["appName"] = auth.Client ?? string.Empty;
dict["appVersion"] = auth.Version ?? string.Empty;
dict["device"] = auth.Device ?? string.Empty;
dict["deviceId"] = auth.DeviceId ?? string.Empty;
dict["context"] = "streaming";
2016-08-19 05:57:36 +02:00
//Logger.Info(JsonSerializer.SerializeToString(dict));
2016-08-19 02:10:10 +02:00
if (!ServerConfigurationManager.Configuration.CodecsUsed.Contains(outputAudio ?? string.Empty, StringComparer.OrdinalIgnoreCase))
{
var list = ServerConfigurationManager.Configuration.CodecsUsed.ToList();
list.Add(outputAudio);
ServerConfigurationManager.Configuration.CodecsUsed = list.ToArray();
}
if (!ServerConfigurationManager.Configuration.CodecsUsed.Contains(outputVideo ?? string.Empty, StringComparer.OrdinalIgnoreCase))
{
var list = ServerConfigurationManager.Configuration.CodecsUsed.ToList();
list.Add(outputVideo);
ServerConfigurationManager.Configuration.CodecsUsed = list.ToArray();
}
ServerConfigurationManager.SaveConfiguration();
//Logger.Info(JsonSerializer.SerializeToString(dict));
var options = new HttpRequestOptions()
{
Url = "https://mb3admin.com/admin/service/transcoding/report",
CancellationToken = CancellationToken.None,
LogRequest = false,
2016-10-06 20:55:01 +02:00
LogErrors = false,
BufferContent = false
2016-08-19 02:10:10 +02:00
};
options.RequestContent = JsonSerializer.SerializeToString(dict);
options.RequestContentType = "application/json";
return HttpClient.Post(options);
}
2014-03-25 06:25:03 +01:00
/// <summary>
/// Adds the dlna headers.
/// </summary>
/// <param name="state">The state.</param>
/// <param name="responseHeaders">The response headers.</param>
/// <param name="isStaticallyStreamed">if set to <c>true</c> [is statically streamed].</param>
/// <returns><c>true</c> if XXXX, <c>false</c> otherwise</returns>
protected void AddDlnaHeaders(StreamState state, IDictionary<string, string> responseHeaders, bool isStaticallyStreamed)
{
2014-04-23 04:47:46 +02:00
var profile = state.DeviceProfile;
2014-03-25 06:25:03 +01:00
var transferMode = GetHeader("transferMode.dlna.org");
responseHeaders["transferMode.dlna.org"] = string.IsNullOrEmpty(transferMode) ? "Streaming" : transferMode;
responseHeaders["realTimeInfo.dlna.org"] = "DLNA.ORG_TLAG=*";
2015-06-02 03:26:51 +02:00
if (string.Equals(GetHeader("getMediaInfo.sec"), "1", StringComparison.OrdinalIgnoreCase))
{
if (state.RunTimeTicks.HasValue)
{
var ms = TimeSpan.FromTicks(state.RunTimeTicks.Value).TotalMilliseconds;
responseHeaders["MediaInfo.sec"] = string.Format("SEC_Duration={0};", Convert.ToInt32(ms).ToString(CultureInfo.InvariantCulture));
}
}
2014-05-03 22:43:22 +02:00
if (state.RunTimeTicks.HasValue && !isStaticallyStreamed && profile != null)
2014-04-02 00:23:07 +02:00
{
2014-04-18 07:03:01 +02:00
AddTimeSeekResponseHeaders(state, responseHeaders);
2014-04-02 00:23:07 +02:00
}
2014-03-25 06:25:03 +01:00
2014-05-03 22:50:28 +02:00
if (profile == null)
{
profile = DlnaManager.GetDefaultProfile();
}
2014-10-20 05:04:45 +02:00
var audioCodec = state.ActualOutputAudioCodec;
if (state.VideoRequest == null)
2014-03-25 06:25:03 +01:00
{
2014-04-23 04:47:46 +02:00
responseHeaders["contentFeatures.dlna.org"] = new ContentFeatureBuilder(profile)
.BuildAudioHeader(
state.OutputContainer,
audioCodec,
state.OutputAudioBitrate,
state.OutputAudioSampleRate,
2014-04-23 04:47:46 +02:00
state.OutputAudioChannels,
isStaticallyStreamed,
state.RunTimeTicks,
state.TranscodeSeekInfo
);
2014-03-25 06:25:03 +01:00
}
2014-04-23 04:47:46 +02:00
else
2014-03-25 06:25:03 +01:00
{
2014-10-20 05:04:45 +02:00
var videoCodec = state.ActualOutputVideoCodec;
2014-04-23 04:47:46 +02:00
responseHeaders["contentFeatures.dlna.org"] = new ContentFeatureBuilder(profile)
.BuildVideoHeader(
state.OutputContainer,
videoCodec,
audioCodec,
state.OutputWidth,
state.OutputHeight,
2014-04-24 07:08:10 +02:00
state.TargetVideoBitDepth,
state.OutputVideoBitrate,
state.TargetTimestamp,
2014-04-23 04:47:46 +02:00
isStaticallyStreamed,
state.RunTimeTicks,
2014-04-24 07:08:10 +02:00
state.TargetVideoProfile,
state.TargetVideoLevel,
state.TargetFramerate,
state.TargetPacketLength,
2014-06-22 18:25:47 +02:00
state.TranscodeSeekInfo,
2014-09-09 03:15:31 +02:00
state.IsTargetAnamorphic,
state.TargetRefFrames,
state.TargetVideoStreamCount,
2015-10-19 18:05:03 +02:00
state.TargetAudioStreamCount,
2016-10-03 08:28:45 +02:00
state.TargetVideoCodecTag,
state.IsTargetAVC
2014-07-31 04:09:23 +02:00
).FirstOrDefault() ?? string.Empty;
2014-04-23 04:47:46 +02:00
}
2014-04-23 04:47:46 +02:00
foreach (var item in responseHeaders)
{
Request.Response.AddHeader(item.Key, item.Value);
}
2014-03-25 06:25:03 +01:00
}
2014-04-02 00:23:07 +02:00
private void AddTimeSeekResponseHeaders(StreamState state, IDictionary<string, string> responseHeaders)
{
var runtimeSeconds = TimeSpan.FromTicks(state.RunTimeTicks.Value).TotalSeconds.ToString(UsCulture);
var startSeconds = TimeSpan.FromTicks(state.Request.StartTimeTicks ?? 0).TotalSeconds.ToString(UsCulture);
responseHeaders["TimeSeekRange.dlna.org"] = string.Format("npt={0}-{1}/{1}", startSeconds, runtimeSeconds);
responseHeaders["X-AvailableSeekRange"] = string.Format("1 npt={0}-{1}", startSeconds, runtimeSeconds);
}
/// <summary>
/// Enforces the resolution limit.
/// </summary>
/// <param name="state">The state.</param>
/// <param name="videoRequest">The video request.</param>
private void EnforceResolutionLimit(StreamState state, VideoStreamRequest videoRequest)
{
2014-02-03 03:11:25 +01:00
// Switch the incoming params to be ceilings rather than fixed values
videoRequest.MaxWidth = videoRequest.MaxWidth ?? videoRequest.Width;
videoRequest.MaxHeight = videoRequest.MaxHeight ?? videoRequest.Height;
videoRequest.Width = null;
videoRequest.Height = null;
}
2014-06-28 21:35:30 +02:00
protected string GetInputModifier(StreamState state, bool genPts = true)
2014-01-22 02:37:01 +01:00
{
var inputModifier = string.Empty;
var probeSize = GetProbeSizeArgument(state);
inputModifier += " " + probeSize;
inputModifier = inputModifier.Trim();
2014-02-02 15:47:00 +01:00
var userAgentParam = GetUserAgentParam(state);
2014-06-04 21:40:05 +02:00
if (!string.IsNullOrWhiteSpace(userAgentParam))
{
inputModifier += " " + userAgentParam;
2014-06-02 21:32:41 +02:00
}
inputModifier = inputModifier.Trim();
inputModifier += " " + GetFastSeekCommandLineParameter(state.Request);
inputModifier = inputModifier.Trim();
2016-09-30 08:50:06 +02:00
//inputModifier += " -fflags +genpts+ignidx+igndts";
2014-06-28 21:35:30 +02:00
if (state.VideoRequest != null && genPts)
{
2014-06-28 21:35:30 +02:00
inputModifier += " -fflags +genpts";
}
2014-03-21 05:52:28 +01:00
if (!string.IsNullOrEmpty(state.InputAudioSync))
{
inputModifier += " -async " + state.InputAudioSync;
}
if (!string.IsNullOrEmpty(state.InputVideoSync))
{
inputModifier += " -vsync " + state.InputVideoSync;
}
2014-01-22 02:37:01 +01:00
if (state.ReadInputAtNativeFramerate)
{
inputModifier += " -re";
}
2015-09-20 04:06:56 +02:00
var videoDecoder = GetVideoDecoder(state);
if (!string.IsNullOrWhiteSpace(videoDecoder))
{
inputModifier += " " + videoDecoder;
}
2015-12-12 08:05:36 +01:00
if (state.VideoRequest != null)
{
2016-04-24 06:23:13 +02:00
// Important: If this is ever re-enabled, make sure not to use it with wtv because it breaks seeking
if (string.Equals(state.OutputContainer, "mkv", StringComparison.OrdinalIgnoreCase) && state.VideoRequest.CopyTimestamps)
2015-12-12 08:05:36 +01:00
{
2016-04-24 06:23:13 +02:00
//inputModifier += " -noaccurate_seek";
2015-12-12 08:05:36 +01:00
}
}
2016-04-04 07:07:10 +02:00
2014-01-22 02:37:01 +01:00
return inputModifier;
}
2013-02-27 05:44:41 +01:00
/// <summary>
/// Infers the audio codec based on the url
/// </summary>
/// <param name="url">The URL.</param>
/// <returns>System.Nullable{AudioCodecs}.</returns>
2014-03-23 21:07:02 +01:00
private string InferAudioCodec(string url)
2013-02-27 05:44:41 +01:00
{
var ext = Path.GetExtension(url);
if (string.Equals(ext, ".mp3", StringComparison.OrdinalIgnoreCase))
{
2014-03-23 21:07:02 +01:00
return "mp3";
2013-02-27 05:44:41 +01:00
}
if (string.Equals(ext, ".aac", StringComparison.OrdinalIgnoreCase))
{
2014-03-23 21:07:02 +01:00
return "aac";
2013-02-27 05:44:41 +01:00
}
2013-03-09 16:24:38 +01:00
if (string.Equals(ext, ".wma", StringComparison.OrdinalIgnoreCase))
2013-02-27 05:44:41 +01:00
{
2014-03-23 21:07:02 +01:00
return "wma";
2013-02-27 05:44:41 +01:00
}
2013-03-09 16:24:38 +01:00
if (string.Equals(ext, ".ogg", StringComparison.OrdinalIgnoreCase))
{
2014-03-23 21:07:02 +01:00
return "vorbis";
2013-03-09 16:24:38 +01:00
}
if (string.Equals(ext, ".oga", StringComparison.OrdinalIgnoreCase))
{
2014-03-23 21:07:02 +01:00
return "vorbis";
2013-03-09 16:24:38 +01:00
}
if (string.Equals(ext, ".ogv", StringComparison.OrdinalIgnoreCase))
{
2014-03-23 21:07:02 +01:00
return "vorbis";
2013-03-09 16:24:38 +01:00
}
if (string.Equals(ext, ".webm", StringComparison.OrdinalIgnoreCase))
{
2014-03-23 21:07:02 +01:00
return "vorbis";
2013-03-09 16:24:38 +01:00
}
if (string.Equals(ext, ".webma", StringComparison.OrdinalIgnoreCase))
{
2014-03-23 21:07:02 +01:00
return "vorbis";
2013-03-09 16:24:38 +01:00
}
2013-02-27 05:44:41 +01:00
2014-03-25 06:25:03 +01:00
return "copy";
2013-02-27 05:44:41 +01:00
}
/// <summary>
/// Infers the video codec.
/// </summary>
/// <param name="url">The URL.</param>
/// <returns>System.Nullable{VideoCodecs}.</returns>
2014-03-23 21:07:02 +01:00
private string InferVideoCodec(string url)
2013-02-27 05:44:41 +01:00
{
var ext = Path.GetExtension(url);
if (string.Equals(ext, ".asf", StringComparison.OrdinalIgnoreCase))
{
2014-03-23 21:07:02 +01:00
return "wmv";
2013-02-27 05:44:41 +01:00
}
if (string.Equals(ext, ".webm", StringComparison.OrdinalIgnoreCase))
{
2014-03-23 21:07:02 +01:00
return "vpx";
2013-02-27 05:44:41 +01:00
}
2013-03-09 16:24:38 +01:00
if (string.Equals(ext, ".ogg", StringComparison.OrdinalIgnoreCase) || string.Equals(ext, ".ogv", StringComparison.OrdinalIgnoreCase))
2013-02-27 05:44:41 +01:00
{
2014-03-23 21:07:02 +01:00
return "theora";
2013-02-27 05:44:41 +01:00
}
2013-03-09 16:24:38 +01:00
if (string.Equals(ext, ".m3u8", StringComparison.OrdinalIgnoreCase) || string.Equals(ext, ".ts", StringComparison.OrdinalIgnoreCase))
{
2014-03-23 21:07:02 +01:00
return "h264";
2013-03-09 16:24:38 +01:00
}
2013-02-27 05:44:41 +01:00
2014-03-23 21:07:02 +01:00
return "copy";
2013-02-27 05:44:41 +01:00
}
2013-02-27 05:19:05 +01:00
}
}