move encoding methods to shared classes

pull/1154/head
Luke Pulverenti 7 years ago
parent 39e8e3cbe7
commit 5edaf12d40

@ -172,6 +172,10 @@
<Project>{17E1F4E6-8ABD-4FE5-9ECF-43D4B6087BA2}</Project>
<Name>MediaBrowser.Controller</Name>
</ProjectReference>
<ProjectReference Include="..\MediaBrowser.MediaEncoding\MediaBrowser.MediaEncoding.csproj">
<Project>{0bd82fa6-eb8a-4452-8af5-74f9c3849451}</Project>
<Name>MediaBrowser.MediaEncoding</Name>
</ProjectReference>
<ProjectReference Include="..\MediaBrowser.Model\MediaBrowser.Model.csproj">
<Project>{7EEEB4BB-F3E8-48FC-B4C5-70F0FFF8329B}</Project>
<Name>MediaBrowser.Model</Name>

File diff suppressed because it is too large Load Diff

@ -237,13 +237,15 @@ namespace MediaBrowser.Api.Playback.Hls
protected override string GetCommandLineArguments(string outputPath, StreamState state, bool isEncoding)
{
var encodingOptions = ApiEntryPoint.Instance.GetEncodingOptions();
var itsOffsetMs = 0;
var itsOffset = itsOffsetMs == 0 ? string.Empty : string.Format("-itsoffset {0} ", TimeSpan.FromMilliseconds(itsOffsetMs).TotalSeconds.ToString(UsCulture));
var threads = GetNumberOfThreads(state, false);
var threads = EncodingHelper.GetNumberOfThreads(state, encodingOptions, false);
var inputModifier = GetInputModifier(state, true);
var inputModifier = EncodingHelper.GetInputModifier(state, encodingOptions);
// If isEncoding is true we're actually starting ffmpeg
var startNumberParam = isEncoding ? GetStartNumber(state).ToString(UsCulture) : "0";
@ -265,9 +267,9 @@ namespace MediaBrowser.Api.Playback.Hls
return string.Format("{0} {1} -map_metadata -1 -map_chapters -1 -threads {2} {3} {4} {5} -f segment -max_delay 5000000 -avoid_negative_ts disabled -start_at_zero -segment_time {6} {10} -individual_header_trailer 0 -segment_format mpegts -segment_list_type m3u8 -segment_start_number {7} -segment_list \"{8}\" -y \"{9}\"",
inputModifier,
GetInputArgument(state),
EncodingHelper.GetInputArgument(state, encodingOptions),
threads,
GetMapArgs(state),
EncodingHelper.GetMapArgs(state),
GetVideoArguments(state),
GetAudioArguments(state),
state.SegmentLength.ToString(UsCulture),
@ -284,9 +286,9 @@ namespace MediaBrowser.Api.Playback.Hls
var args = string.Format("{0} {1} {2} -map_metadata -1 -map_chapters -1 -threads {3} {4} {5} -max_delay 5000000 -avoid_negative_ts disabled -start_at_zero {6} -hls_time {7} -individual_header_trailer 0 -start_number {8} -hls_list_size {9}{10} -y \"{11}\"",
itsOffset,
inputModifier,
GetInputArgument(state),
EncodingHelper.GetInputArgument(state, encodingOptions),
threads,
GetMapArgs(state),
EncodingHelper.GetMapArgs(state),
GetVideoArguments(state),
GetAudioArguments(state),
state.SegmentLength.ToString(UsCulture),

@ -758,7 +758,7 @@ namespace MediaBrowser.Api.Playback.Hls
protected override string GetAudioArguments(StreamState state)
{
var codec = GetAudioEncoder(state);
var codec = EncodingHelper.GetAudioEncoder(state);
if (!state.IsOutputVideo)
{
@ -811,7 +811,7 @@ namespace MediaBrowser.Api.Playback.Hls
args += " -ab " + bitrate.Value.ToString(UsCulture);
}
args += " " + GetAudioFilterParam(state, true);
args += " " + EncodingHelper.GetAudioFilterParam(state, ApiEntryPoint.Instance.GetEncodingOptions(), true);
return args;
}
@ -823,7 +823,7 @@ namespace MediaBrowser.Api.Playback.Hls
return string.Empty;
}
var codec = GetVideoEncoder(state);
var codec = EncodingHelper.GetVideoEncoder(state, ApiEntryPoint.Instance.GetEncodingOptions());
var args = "-codec:v:0 " + codec;
@ -835,7 +835,7 @@ namespace MediaBrowser.Api.Playback.Hls
// See if we can save come cpu cycles by avoiding encoding
if (string.Equals(codec, "copy", StringComparison.OrdinalIgnoreCase))
{
if (state.VideoStream != null && IsH264(state.VideoStream) && !string.Equals(state.VideoStream.NalLengthSize, "0", StringComparison.OrdinalIgnoreCase))
if (state.VideoStream != null && EncodingHelper.IsH264(state.VideoStream) && !string.Equals(state.VideoStream.NalLengthSize, "0", StringComparison.OrdinalIgnoreCase))
{
args += " -bsf:v h264_mp4toannexb";
}
@ -849,20 +849,22 @@ namespace MediaBrowser.Api.Playback.Hls
var hasGraphicalSubs = state.SubtitleStream != null && !state.SubtitleStream.IsTextSubtitleStream && state.VideoRequest.SubtitleMethod == SubtitleDeliveryMethod.Encode;
args += " " + GetVideoQualityParam(state, GetH264Encoder(state)) + keyFrameArg;
var encodingOptions = ApiEntryPoint.Instance.GetEncodingOptions();
args += " " + EncodingHelper.GetVideoQualityParam(state, EncodingHelper.GetH264Encoder(state, encodingOptions), encodingOptions, GetDefaultH264Preset()) + keyFrameArg;
//args += " -mixed-refs 0 -refs 3 -x264opts b_pyramid=0:weightb=0:weightp=0";
// Add resolution params, if specified
if (!hasGraphicalSubs)
{
args += GetOutputSizeParam(state, codec, EnableCopyTs(state));
args += EncodingHelper.GetOutputSizeParam(state, codec, EnableCopyTs(state));
}
// This is for internal graphical subs
if (hasGraphicalSubs)
{
args += GetGraphicalSubtitleParam(state, codec);
args += EncodingHelper.GetGraphicalSubtitleParam(state, codec);
}
//args += " -flags -global_header";
@ -884,15 +886,16 @@ namespace MediaBrowser.Api.Playback.Hls
protected override string GetCommandLineArguments(string outputPath, StreamState state, bool isEncoding)
{
var threads = GetNumberOfThreads(state, false);
var encodingOptions = ApiEntryPoint.Instance.GetEncodingOptions();
var threads = EncodingHelper.GetNumberOfThreads(state, encodingOptions, false);
var inputModifier = GetInputModifier(state, false);
var inputModifier = EncodingHelper.GetInputModifier(state, encodingOptions);
// If isEncoding is true we're actually starting ffmpeg
var startNumber = GetStartNumber(state);
var startNumberParam = isEncoding ? startNumber.ToString(UsCulture) : "0";
var mapArgs = state.IsOutputVideo ? GetMapArgs(state) : string.Empty;
var mapArgs = state.IsOutputVideo ? EncodingHelper.GetMapArgs(state) : string.Empty;
var useGenericSegmenter = true;
if (useGenericSegmenter)
@ -909,7 +912,7 @@ namespace MediaBrowser.Api.Playback.Hls
return string.Format("{0} {1} -map_metadata -1 -map_chapters -1 -threads {2} {3} {4} {5} -f segment -max_delay 5000000 -avoid_negative_ts disabled -start_at_zero -segment_time {6} {10} -individual_header_trailer 0 -segment_format mpegts -segment_list_type m3u8 -segment_start_number {7} -segment_list \"{8}\" -y \"{9}\"",
inputModifier,
GetInputArgument(state),
EncodingHelper.GetInputArgument(state, encodingOptions),
threads,
mapArgs,
GetVideoArguments(state),
@ -924,7 +927,7 @@ namespace MediaBrowser.Api.Playback.Hls
return string.Format("{0} {1} -map_metadata -1 -map_chapters -1 -threads {2} {3} {4} {5} -max_delay 5000000 -avoid_negative_ts disabled -start_at_zero -hls_time {6} -individual_header_trailer 0 -start_number {7} -hls_list_size {8} -y \"{9}\"",
inputModifier,
GetInputArgument(state),
EncodingHelper.GetInputArgument(state, encodingOptions),
threads,
mapArgs,
GetVideoArguments(state),

@ -37,7 +37,7 @@ namespace MediaBrowser.Api.Playback.Hls
/// <returns>System.String.</returns>
protected override string GetAudioArguments(StreamState state)
{
var codec = GetAudioEncoder(state);
var codec = EncodingHelper.GetAudioEncoder(state);
if (string.Equals(codec, "copy", StringComparison.OrdinalIgnoreCase))
{
@ -60,7 +60,7 @@ namespace MediaBrowser.Api.Playback.Hls
args += " -ab " + bitrate.Value.ToString(UsCulture);
}
args += " " + GetAudioFilterParam(state, true);
args += " " + EncodingHelper.GetAudioFilterParam(state, ApiEntryPoint.Instance.GetEncodingOptions(), true);
return args;
}
@ -72,7 +72,7 @@ namespace MediaBrowser.Api.Playback.Hls
/// <returns>System.String.</returns>
protected override string GetVideoArguments(StreamState state)
{
var codec = GetVideoEncoder(state);
var codec = EncodingHelper.GetVideoEncoder(state, ApiEntryPoint.Instance.GetEncodingOptions());
var args = "-codec:v:0 " + codec;
@ -85,7 +85,7 @@ namespace MediaBrowser.Api.Playback.Hls
if (codec.Equals("copy", StringComparison.OrdinalIgnoreCase))
{
// if h264_mp4toannexb is ever added, do not use it for live tv
if (state.VideoStream != null && IsH264(state.VideoStream) && !string.Equals(state.VideoStream.NalLengthSize, "0", StringComparison.OrdinalIgnoreCase))
if (state.VideoStream != null && EncodingHelper.IsH264(state.VideoStream) && !string.Equals(state.VideoStream.NalLengthSize, "0", StringComparison.OrdinalIgnoreCase))
{
args += " -bsf:v h264_mp4toannexb";
}
@ -98,18 +98,19 @@ namespace MediaBrowser.Api.Playback.Hls
var hasGraphicalSubs = state.SubtitleStream != null && !state.SubtitleStream.IsTextSubtitleStream && state.VideoRequest.SubtitleMethod == SubtitleDeliveryMethod.Encode;
args += " " + GetVideoQualityParam(state, GetH264Encoder(state)) + keyFrameArg;
var encodingOptions = ApiEntryPoint.Instance.GetEncodingOptions();
args += " " + EncodingHelper.GetVideoQualityParam(state, EncodingHelper.GetH264Encoder(state, encodingOptions), encodingOptions, GetDefaultH264Preset()) + keyFrameArg;
// Add resolution params, if specified
if (!hasGraphicalSubs)
{
args += GetOutputSizeParam(state, codec);
args += EncodingHelper.GetOutputSizeParam(state, codec);
}
// This is for internal graphical subs
if (hasGraphicalSubs)
{
args += GetGraphicalSubtitleParam(state, codec);
args += EncodingHelper.GetGraphicalSubtitleParam(state, codec);
}
args += " -flags -global_header";

@ -57,6 +57,8 @@ namespace MediaBrowser.Api.Playback.Progressive
protected override string GetCommandLineArguments(string outputPath, StreamState state, bool isEncoding)
{
var encodingOptions = ApiEntryPoint.Instance.GetEncodingOptions();
var audioTranscodeParams = new List<string>();
var bitrate = state.OutputAudioBitrate;
@ -82,13 +84,13 @@ namespace MediaBrowser.Api.Playback.Progressive
const string vn = " -vn";
var threads = GetNumberOfThreads(state, false);
var threads = EncodingHelper.GetNumberOfThreads(state, encodingOptions, false);
var inputModifier = GetInputModifier(state);
var inputModifier = EncodingHelper.GetInputModifier(state, encodingOptions);
return string.Format("{0} {1} -threads {2}{3} {4} -id3v2_version 3 -write_id3v1 1 -y \"{5}\"",
inputModifier,
GetInputArgument(state),
EncodingHelper.GetInputArgument(state, encodingOptions),
threads,
vn,
string.Join(" ", audioTranscodeParams.ToArray()),

@ -96,8 +96,10 @@ namespace MediaBrowser.Api.Playback.Progressive
protected override string GetCommandLineArguments(string outputPath, StreamState state, bool isEncoding)
{
var encodingOptions = ApiEntryPoint.Instance.GetEncodingOptions();
// Get the output codec name
var videoCodec = GetVideoEncoder(state);
var videoCodec = EncodingHelper.GetVideoEncoder(state, encodingOptions);
var format = string.Empty;
var keyFrame = string.Empty;
@ -108,9 +110,9 @@ namespace MediaBrowser.Api.Playback.Progressive
format = " -f mp4 -movflags frag_keyframe+empty_moov";
}
var threads = GetNumberOfThreads(state, string.Equals(videoCodec, "libvpx", StringComparison.OrdinalIgnoreCase));
var threads = EncodingHelper.GetNumberOfThreads(state, encodingOptions, string.Equals(videoCodec, "libvpx", StringComparison.OrdinalIgnoreCase));
var inputModifier = GetInputModifier(state);
var inputModifier = EncodingHelper.GetInputModifier(state, encodingOptions);
var subtitleArguments = state.SubtitleStream != null &&
state.SubtitleDeliveryMethod == SubtitleDeliveryMethod.Embed
@ -119,9 +121,9 @@ namespace MediaBrowser.Api.Playback.Progressive
return string.Format("{0} {1}{2} {3} {4} -map_metadata -1 -map_chapters -1 -threads {5} {6}{7}{8} -y \"{9}\"",
inputModifier,
GetInputArgument(state),
EncodingHelper.GetInputArgument(state, encodingOptions),
keyFrame,
GetMapArgs(state),
EncodingHelper.GetMapArgs(state),
GetVideoArguments(state, videoCodec),
threads,
GetAudioArguments(state),
@ -165,7 +167,7 @@ namespace MediaBrowser.Api.Playback.Progressive
if (string.Equals(videoCodec, "copy", StringComparison.OrdinalIgnoreCase))
{
if (state.VideoStream != null && IsH264(state.VideoStream) && string.Equals(state.OutputContainer, "ts", StringComparison.OrdinalIgnoreCase) && !string.Equals(state.VideoStream.NalLengthSize, "0", StringComparison.OrdinalIgnoreCase))
if (state.VideoStream != null && EncodingHelper.IsH264(state.VideoStream) && string.Equals(state.OutputContainer, "ts", StringComparison.OrdinalIgnoreCase) && !string.Equals(state.VideoStream.NalLengthSize, "0", StringComparison.OrdinalIgnoreCase))
{
args += " -bsf:v h264_mp4toannexb";
}
@ -194,7 +196,7 @@ namespace MediaBrowser.Api.Playback.Progressive
// Add resolution params, if specified
if (!hasGraphicalSubs)
{
var outputSizeParam = GetOutputSizeParam(state, videoCodec);
var outputSizeParam = EncodingHelper.GetOutputSizeParam(state, videoCodec);
args += outputSizeParam;
hasCopyTs = outputSizeParam.IndexOf("copyts", StringComparison.OrdinalIgnoreCase) != -1;
}
@ -208,7 +210,8 @@ namespace MediaBrowser.Api.Playback.Progressive
args += " -avoid_negative_ts disabled -start_at_zero";
}
var qualityParam = GetVideoQualityParam(state, videoCodec);
var encodingOptions = ApiEntryPoint.Instance.GetEncodingOptions();
var qualityParam = EncodingHelper.GetVideoQualityParam(state, videoCodec, encodingOptions, GetDefaultH264Preset());
if (!string.IsNullOrEmpty(qualityParam))
{
@ -218,7 +221,7 @@ namespace MediaBrowser.Api.Playback.Progressive
// This is for internal graphical subs
if (hasGraphicalSubs)
{
args += GetGraphicalSubtitleParam(state, videoCodec);
args += EncodingHelper.GetGraphicalSubtitleParam(state, videoCodec);
}
if (!state.RunTimeTicks.HasValue)
@ -243,7 +246,7 @@ namespace MediaBrowser.Api.Playback.Progressive
}
// Get the output codec name
var codec = GetAudioEncoder(state);
var codec = EncodingHelper.GetAudioEncoder(state);
var args = "-codec:a:0 " + codec;
@ -267,7 +270,7 @@ namespace MediaBrowser.Api.Playback.Progressive
args += " -ab " + bitrate.Value.ToString(UsCulture);
}
args += " " + GetAudioFilterParam(state, false);
args += " " + EncodingHelper.GetAudioFilterParam(state, ApiEntryPoint.Instance.GetEncodingOptions(), false);
return args;
}

@ -1,4 +1,5 @@
using MediaBrowser.Model.Dlna;
using MediaBrowser.Controller.MediaEncoding;
using MediaBrowser.Model.Dlna;
using MediaBrowser.Model.Services;
namespace MediaBrowser.Api.Playback
@ -6,7 +7,7 @@ namespace MediaBrowser.Api.Playback
/// <summary>
/// Class StreamRequest
/// </summary>
public class StreamRequest
public class StreamRequest : BaseEncodingJobOptions
{
/// <summary>
/// Gets or sets the id.
@ -30,46 +31,6 @@ namespace MediaBrowser.Api.Playback
public string SubtitleCodec { get; set; }
/// <summary>
/// Gets or sets the start time ticks.
/// </summary>
/// <value>The start time ticks.</value>
[ApiMember(Name = "StartTimeTicks", Description = "Optional. Specify a starting offset, in ticks. 1 tick = 10000 ms", IsRequired = false, DataType = "int", ParameterType = "query", Verb = "GET")]
public long? StartTimeTicks { get; set; }
/// <summary>
/// Gets or sets the audio bit rate.
/// </summary>
/// <value>The audio bit rate.</value>
[ApiMember(Name = "AudioBitRate", Description = "Optional. Specify an audio bitrate to encode to, e.g. 128000. If omitted this will be left to encoder defaults.", IsRequired = false, DataType = "int", ParameterType = "query", Verb = "GET")]
public int? AudioBitRate { get; set; }
/// <summary>
/// Gets or sets the audio channels.
/// </summary>
/// <value>The audio channels.</value>
[ApiMember(Name = "AudioChannels", Description = "Optional. Specify a specific number of audio channels to encode to, e.g. 2", IsRequired = false, DataType = "int", ParameterType = "query", Verb = "GET")]
public int? AudioChannels { get; set; }
[ApiMember(Name = "MaxAudioChannels", Description = "Optional. Specify a maximum number of audio channels to encode to, e.g. 2", IsRequired = false, DataType = "int", ParameterType = "query", Verb = "GET")]
public int? MaxAudioChannels { get; set; }
public int? TranscodingMaxAudioChannels { get; set; }
/// <summary>
/// Gets or sets the audio sample rate.
/// </summary>
/// <value>The audio sample rate.</value>
[ApiMember(Name = "AudioSampleRate", Description = "Optional. Specify a specific audio sample rate, e.g. 44100", IsRequired = false, DataType = "int", ParameterType = "query", Verb = "GET")]
public int? AudioSampleRate { get; set; }
/// <summary>
/// Gets or sets a value indicating whether this <see cref="StreamRequest" /> is static.
/// </summary>
/// <value><c>true</c> if static; otherwise, <c>false</c>.</value>
[ApiMember(Name = "Static", Description = "Optional. If true, the original file will be streamed statically without any encoding. Use either no url extension or the original file extension. true/false", IsRequired = false, DataType = "bool", ParameterType = "query", Verb = "GET")]
public bool Static { get; set; }
[ApiMember(Name = "DeviceProfileId", Description = "Optional. The dlna device profile id to utilize.", IsRequired = false, DataType = "string", ParameterType = "query", Verb = "GET")]
public string DeviceProfileId { get; set; }
@ -81,102 +42,6 @@ namespace MediaBrowser.Api.Playback
public class VideoStreamRequest : StreamRequest
{
/// <summary>
/// Gets or sets the video codec.
/// </summary>
/// <value>The video codec.</value>
[ApiMember(Name = "VideoCodec", Description = "Optional. Specify a video codec to encode to, e.g. h264. If omitted the server will auto-select using the url's extension. Options: h264, mpeg4, theora, vpx, wmv.", IsRequired = false, DataType = "string", ParameterType = "query", Verb = "GET")]
public string VideoCodec { get; set; }
/// <summary>
/// Gets or sets the video bit rate.
/// </summary>
/// <value>The video bit rate.</value>
[ApiMember(Name = "VideoBitRate", Description = "Optional. Specify a video bitrate to encode to, e.g. 500000. If omitted this will be left to encoder defaults.", IsRequired = false, DataType = "int", ParameterType = "query", Verb = "GET")]
public int? VideoBitRate { get; set; }
/// <summary>
/// Gets or sets the index of the audio stream.
/// </summary>
/// <value>The index of the audio stream.</value>
[ApiMember(Name = "AudioStreamIndex", Description = "Optional. The index of the audio stream to use. If omitted the first audio stream will be used.", IsRequired = false, DataType = "int", ParameterType = "query", Verb = "GET")]
public int? AudioStreamIndex { get; set; }
/// <summary>
/// Gets or sets the index of the video stream.
/// </summary>
/// <value>The index of the video stream.</value>
[ApiMember(Name = "VideoStreamIndex", Description = "Optional. The index of the video stream to use. If omitted the first video stream will be used.", IsRequired = false, DataType = "int", ParameterType = "query", Verb = "GET")]
public int? VideoStreamIndex { get; set; }
/// <summary>
/// Gets or sets the index of the subtitle stream.
/// </summary>
/// <value>The index of the subtitle stream.</value>
[ApiMember(Name = "SubtitleStreamIndex", Description = "Optional. The index of the subtitle stream to use. If omitted no subtitles will be used.", IsRequired = false, DataType = "int", ParameterType = "query", Verb = "GET")]
public int? SubtitleStreamIndex { get; set; }
/// <summary>
/// Gets or sets the width.
/// </summary>
/// <value>The width.</value>
[ApiMember(Name = "Width", Description = "Optional. The fixed horizontal resolution of the encoded video.", IsRequired = false, DataType = "int", ParameterType = "query", Verb = "GET")]
public int? Width { get; set; }
/// <summary>
/// Gets or sets the height.
/// </summary>
/// <value>The height.</value>
[ApiMember(Name = "Height", Description = "Optional. The fixed vertical resolution of the encoded video.", IsRequired = false, DataType = "int", ParameterType = "query", Verb = "GET")]
public int? Height { get; set; }
/// <summary>
/// Gets or sets the width of the max.
/// </summary>
/// <value>The width of the max.</value>
[ApiMember(Name = "MaxWidth", Description = "Optional. The maximum horizontal resolution of the encoded video.", IsRequired = false, DataType = "int", ParameterType = "query", Verb = "GET")]
public int? MaxWidth { get; set; }
/// <summary>
/// Gets or sets the height of the max.
/// </summary>
/// <value>The height of the max.</value>
[ApiMember(Name = "MaxHeight", Description = "Optional. The maximum vertical resolution of the encoded video.", IsRequired = false, DataType = "int", ParameterType = "query", Verb = "GET")]
public int? MaxHeight { get; set; }
[ApiMember(Name = "MaxRefFrames", Description = "Optional.", IsRequired = false, DataType = "int", ParameterType = "query", Verb = "GET")]
public int? MaxRefFrames { get; set; }
[ApiMember(Name = "MaxVideoBitDepth", Description = "Optional.", IsRequired = false, DataType = "int", ParameterType = "query", Verb = "GET")]
public int? MaxVideoBitDepth { get; set; }
/// <summary>
/// Gets or sets the framerate.
/// </summary>
/// <value>The framerate.</value>
[ApiMember(Name = "Framerate", Description = "Optional. A specific video framerate to encode to, e.g. 23.976. Generally this should be omitted unless the device has specific requirements.", IsRequired = false, DataType = "double", ParameterType = "query", Verb = "GET")]
public float? Framerate { get; set; }
[ApiMember(Name = "MaxFramerate", Description = "Optional. A specific maximum video framerate to encode to, e.g. 23.976. Generally this should be omitted unless the device has specific requirements.", IsRequired = false, DataType = "double", ParameterType = "query", Verb = "GET")]
public float? MaxFramerate { get; set; }
/// <summary>
/// Gets or sets the profile.
/// </summary>
/// <value>The profile.</value>
[ApiMember(Name = "Profile", Description = "Optional. Specify a specific h264 profile, e.g. main, baseline, high.", IsRequired = false, DataType = "string", ParameterType = "query", Verb = "GET")]
public string Profile { get; set; }
/// <summary>
/// Gets or sets the level.
/// </summary>
/// <value>The level.</value>
[ApiMember(Name = "Level", Description = "Optional. Specify a level for the h264 profile, e.g. 3, 3.1.", IsRequired = false, DataType = "string", ParameterType = "query", Verb = "GET")]
public string Level { get; set; }
[ApiMember(Name = "SubtitleDeliveryMethod", Description = "Optional. Specify the subtitle delivery method.", IsRequired = false, DataType = "string", ParameterType = "query", Verb = "GET")]
public SubtitleDeliveryMethod SubtitleMethod { get; set; }
/// <summary>
/// Gets a value indicating whether this instance has fixed resolution.
/// </summary>
@ -189,18 +54,6 @@ namespace MediaBrowser.Api.Playback
}
}
[ApiMember(Name = "EnableAutoStreamCopy", Description = "Whether or not to allow automatic stream copy if requested values match the original source. Defaults to true.", IsRequired = false, DataType = "bool", ParameterType = "query", Verb = "GET")]
public bool EnableAutoStreamCopy { get; set; }
[ApiMember(Name = "CopyTimestamps", Description = "Whether or not to copy timestamps when transcoding with an offset. Defaults to false.", IsRequired = false, DataType = "bool", ParameterType = "query", Verb = "GET")]
public bool CopyTimestamps { get; set; }
public bool EnableSubtitlesInManifest { get; set; }
public bool RequireAvc { get; set; }
public VideoStreamRequest()
{
EnableAutoStreamCopy = true;
}
}
}

@ -13,17 +13,28 @@ using System.Globalization;
using System.IO;
using System.Linq;
using System.Threading;
using MediaBrowser.MediaEncoding.Encoder;
namespace MediaBrowser.Api.Playback
{
public class StreamState : IDisposable
public class StreamState : EncodingJobInfo, IDisposable
{
private readonly ILogger _logger;
private readonly IMediaSourceManager _mediaSourceManager;
public string RequestedUrl { get; set; }
public StreamRequest Request { get; set; }
public StreamRequest Request
{
get { return (StreamRequest)BaseRequest; }
set
{
BaseRequest = value;
IsVideoRequest = VideoRequest != null;
}
}
public TranscodingThrottler TranscodingThrottler { get; set; }
public VideoStreamRequest VideoRequest
@ -31,8 +42,6 @@ namespace MediaBrowser.Api.Playback
get { return Request as VideoStreamRequest; }
}
public Dictionary<string, string> RemoteHttpHeaders { get; set; }
/// <summary>
/// Gets or sets the log file stream.
/// </summary>
@ -40,36 +49,12 @@ namespace MediaBrowser.Api.Playback
public Stream LogFileStream { get; set; }
public IDirectStreamProvider DirectStreamProvider { get; set; }
public string InputContainer { get; set; }
public MediaSourceInfo MediaSource { get; set; }
public MediaStream AudioStream { get; set; }
public MediaStream VideoStream { get; set; }
public MediaStream SubtitleStream { get; set; }
public SubtitleDeliveryMethod SubtitleDeliveryMethod { get; set; }
/// <summary>
/// Gets or sets the iso mount.
/// </summary>
/// <value>The iso mount.</value>
public IIsoMount IsoMount { get; set; }
public string MediaPath { get; set; }
public string WaitForPath { get; set; }
public MediaProtocol InputProtocol { get; set; }
public bool IsOutputVideo
{
get { return Request is VideoStreamRequest; }
}
public bool IsInputVideo { get; set; }
public VideoType VideoType { get; set; }
public IsoType? IsoType { get; set; }
public List<string> PlayableStreamFileNames { get; set; }
public int SegmentLength
{
@ -117,41 +102,19 @@ namespace MediaBrowser.Api.Playback
}
}
public long? RunTimeTicks;
public long? InputBitrate { get; set; }
public long? InputFileSize { get; set; }
public string OutputAudioSync = "1";
public string OutputVideoSync = "-1";
public List<string> SupportedSubtitleCodecs { get; set; }
public List<string> SupportedAudioCodecs { get; set; }
public List<string> SupportedVideoCodecs { get; set; }
public string UserAgent { get; set; }
public TranscodingJobType TranscodingType { get; set; }
public StreamState(IMediaSourceManager mediaSourceManager, ILogger logger, TranscodingJobType transcodingType)
public StreamState(IMediaSourceManager mediaSourceManager, ILogger logger, TranscodingJobType transcodingType)
: base(logger)
{
_mediaSourceManager = mediaSourceManager;
_logger = logger;
SupportedSubtitleCodecs = new List<string>();
SupportedAudioCodecs = new List<string>();
SupportedVideoCodecs = new List<string>();
PlayableStreamFileNames = new List<string>();
RemoteHttpHeaders = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
TranscodingType = transcodingType;
}
public string InputAudioSync { get; set; }
public string InputVideoSync { get; set; }
public bool DeInterlace { get; set; }
public bool ReadInputAtNativeFramerate { get; set; }
public TransportStreamTimestamp InputTimestamp { get; set; }
public string MimeType { get; set; }
public bool EstimateContentLength { get; set; }
@ -212,23 +175,6 @@ namespace MediaBrowser.Api.Playback
}
}
private void DisposeIsoMount()
{
if (IsoMount != null)
{
try
{
IsoMount.Dispose();
}
catch (Exception ex)
{
_logger.ErrorException("Error disposing iso mount", ex);
}
IsoMount = null;
}
}
private async void DisposeLiveStream()
{
if (MediaSource.RequiresClosing && string.IsNullOrWhiteSpace(Request.LiveStreamId) && !string.IsNullOrWhiteSpace(MediaSource.LiveStreamId))
@ -244,15 +190,8 @@ namespace MediaBrowser.Api.Playback
}
}
public int InternalSubtitleStreamOffset { get; set; }
public string OutputFilePath { get; set; }
public string OutputVideoCodec { get; set; }
public string OutputAudioCodec { get; set; }
public int? OutputAudioChannels;
public int? OutputAudioSampleRate;
public int? OutputAudioBitrate;
public int? OutputVideoBitrate;
public string ActualOutputVideoCodec
{
@ -298,8 +237,6 @@ namespace MediaBrowser.Api.Playback
}
}
public string OutputContainer { get; set; }
public DeviceProfile DeviceProfile { get; set; }
public int? TotalOutputBitrate
@ -447,20 +384,6 @@ namespace MediaBrowser.Api.Playback
}
}
/// <summary>
/// Predicts the audio sample rate that will be in the output stream
/// </summary>
public double? TargetVideoLevel
{
get
{
var stream = VideoStream;
return !string.IsNullOrEmpty(VideoRequest.Level) && !Request.Static
? double.Parse(VideoRequest.Level, CultureInfo.InvariantCulture)
: stream == null ? null : stream.Level;
}
}
public TransportStreamTimestamp TargetTimestamp
{
get

@ -1,51 +1,22 @@
using MediaBrowser.Model.Dlna;
using System.Globalization;
using MediaBrowser.Model.Dlna;
using MediaBrowser.Model.Services;
namespace MediaBrowser.Controller.MediaEncoding
{
public class EncodingJobOptions
public class EncodingJobOptions : BaseEncodingJobOptions
{
public string OutputContainer { get; set; }
public string OutputDirectory { get; set; }
public long? StartTimeTicks { get; set; }
public int? Width { get; set; }
public int? Height { get; set; }
public int? MaxWidth { get; set; }
public int? MaxHeight { get; set; }
public bool Static = false;
public float? Framerate { get; set; }
public float? MaxFramerate { get; set; }
public string Profile { get; set; }
public int? Level { get; set; }
public string DeviceId { get; set; }
public string ItemId { get; set; }
public string MediaSourceId { get; set; }
public string AudioCodec { get; set; }
public bool EnableAutoStreamCopy { get; set; }
public int? MaxAudioChannels { get; set; }
public int? AudioChannels { get; set; }
public int? AudioBitRate { get; set; }
public int? AudioSampleRate { get; set; }
public DeviceProfile DeviceProfile { get; set; }
public EncodingContext Context { get; set; }
public string VideoCodec { get; set; }
public int? TranscodingMaxAudioChannels { get; set; }
public int? VideoBitRate { get; set; }
public int? AudioStreamIndex { get; set; }
public int? VideoStreamIndex { get; set; }
public int? SubtitleStreamIndex { get; set; }
public int? MaxRefFrames { get; set; }
public int? MaxVideoBitDepth { get; set; }
public int? CpuCoreLimit { get; set; }
public bool ReadInputAtNativeFramerate { get; set; }
public SubtitleDeliveryMethod SubtitleMethod { get; set; }
public bool CopyTimestamps { get; set; }
/// <summary>
/// Gets a value indicating whether this instance has fixed resolution.
@ -59,11 +30,7 @@ namespace MediaBrowser.Controller.MediaEncoding
}
}
public EncodingJobOptions()
{
}
private readonly CultureInfo _usCulture = new CultureInfo("en-US");
public EncodingJobOptions(StreamInfo info, DeviceProfile deviceProfile)
{
OutputContainer = info.Container;
@ -72,7 +39,6 @@ namespace MediaBrowser.Controller.MediaEncoding
MaxHeight = info.MaxHeight;
MaxFramerate = info.MaxFramerate;
Profile = info.VideoProfile;
Level = info.VideoLevel;
ItemId = info.ItemId;
MediaSourceId = info.MediaSourceId;
AudioCodec = info.TargetAudioCodec;
@ -93,6 +59,160 @@ namespace MediaBrowser.Controller.MediaEncoding
{
SubtitleStreamIndex = info.SubtitleStreamIndex;
}
if (info.VideoLevel.HasValue)
{
Level = info.VideoLevel.Value.ToString(_usCulture);
}
}
}
// For now until api and media encoding layers are unified
public class BaseEncodingJobOptions
{
[ApiMember(Name = "EnableAutoStreamCopy", Description = "Whether or not to allow automatic stream copy if requested values match the original source. Defaults to true.", IsRequired = false, DataType = "bool", ParameterType = "query", Verb = "GET")]
public bool EnableAutoStreamCopy { get; set; }
/// <summary>
/// Gets or sets the audio sample rate.
/// </summary>
/// <value>The audio sample rate.</value>
[ApiMember(Name = "AudioSampleRate", Description = "Optional. Specify a specific audio sample rate, e.g. 44100", IsRequired = false, DataType = "int", ParameterType = "query", Verb = "GET")]
public int? AudioSampleRate { get; set; }
/// <summary>
/// Gets or sets the audio bit rate.
/// </summary>
/// <value>The audio bit rate.</value>
[ApiMember(Name = "AudioBitRate", Description = "Optional. Specify an audio bitrate to encode to, e.g. 128000. If omitted this will be left to encoder defaults.", IsRequired = false, DataType = "int", ParameterType = "query", Verb = "GET")]
public int? AudioBitRate { get; set; }
/// <summary>
/// Gets or sets the audio channels.
/// </summary>
/// <value>The audio channels.</value>
[ApiMember(Name = "AudioChannels", Description = "Optional. Specify a specific number of audio channels to encode to, e.g. 2", IsRequired = false, DataType = "int", ParameterType = "query", Verb = "GET")]
public int? AudioChannels { get; set; }
[ApiMember(Name = "MaxAudioChannels", Description = "Optional. Specify a maximum number of audio channels to encode to, e.g. 2", IsRequired = false, DataType = "int", ParameterType = "query", Verb = "GET")]
public int? MaxAudioChannels { get; set; }
[ApiMember(Name = "Static", Description = "Optional. If true, the original file will be streamed statically without any encoding. Use either no url extension or the original file extension. true/false", IsRequired = false, DataType = "bool", ParameterType = "query", Verb = "GET")]
public bool Static { get; set; }
/// <summary>
/// Gets or sets the profile.
/// </summary>
/// <value>The profile.</value>
[ApiMember(Name = "Profile", Description = "Optional. Specify a specific h264 profile, e.g. main, baseline, high.", IsRequired = false, DataType = "string", ParameterType = "query", Verb = "GET")]
public string Profile { get; set; }
/// <summary>
/// Gets or sets the level.
/// </summary>
/// <value>The level.</value>
[ApiMember(Name = "Level", Description = "Optional. Specify a level for the h264 profile, e.g. 3, 3.1.", IsRequired = false, DataType = "string", ParameterType = "query", Verb = "GET")]
public string Level { get; set; }
/// <summary>
/// Gets or sets the framerate.
/// </summary>
/// <value>The framerate.</value>
[ApiMember(Name = "Framerate", Description = "Optional. A specific video framerate to encode to, e.g. 23.976. Generally this should be omitted unless the device has specific requirements.", IsRequired = false, DataType = "double", ParameterType = "query", Verb = "GET")]
public float? Framerate { get; set; }
[ApiMember(Name = "MaxFramerate", Description = "Optional. A specific maximum video framerate to encode to, e.g. 23.976. Generally this should be omitted unless the device has specific requirements.", IsRequired = false, DataType = "double", ParameterType = "query", Verb = "GET")]
public float? MaxFramerate { get; set; }
[ApiMember(Name = "CopyTimestamps", Description = "Whether or not to copy timestamps when transcoding with an offset. Defaults to false.", IsRequired = false, DataType = "bool", ParameterType = "query", Verb = "GET")]
public bool CopyTimestamps { get; set; }
/// <summary>
/// Gets or sets the start time ticks.
/// </summary>
/// <value>The start time ticks.</value>
[ApiMember(Name = "StartTimeTicks", Description = "Optional. Specify a starting offset, in ticks. 1 tick = 10000 ms", IsRequired = false, DataType = "int", ParameterType = "query", Verb = "GET")]
public long? StartTimeTicks { get; set; }
/// <summary>
/// Gets or sets the width.
/// </summary>
/// <value>The width.</value>
[ApiMember(Name = "Width", Description = "Optional. The fixed horizontal resolution of the encoded video.", IsRequired = false, DataType = "int", ParameterType = "query", Verb = "GET")]
public int? Width { get; set; }
/// <summary>
/// Gets or sets the height.
/// </summary>
/// <value>The height.</value>
[ApiMember(Name = "Height", Description = "Optional. The fixed vertical resolution of the encoded video.", IsRequired = false, DataType = "int", ParameterType = "query", Verb = "GET")]
public int? Height { get; set; }
/// <summary>
/// Gets or sets the width of the max.
/// </summary>
/// <value>The width of the max.</value>
[ApiMember(Name = "MaxWidth", Description = "Optional. The maximum horizontal resolution of the encoded video.", IsRequired = false, DataType = "int", ParameterType = "query", Verb = "GET")]
public int? MaxWidth { get; set; }
/// <summary>
/// Gets or sets the height of the max.
/// </summary>
/// <value>The height of the max.</value>
[ApiMember(Name = "MaxHeight", Description = "Optional. The maximum vertical resolution of the encoded video.", IsRequired = false, DataType = "int", ParameterType = "query", Verb = "GET")]
public int? MaxHeight { get; set; }
/// <summary>
/// Gets or sets the video bit rate.
/// </summary>
/// <value>The video bit rate.</value>
[ApiMember(Name = "VideoBitRate", Description = "Optional. Specify a video bitrate to encode to, e.g. 500000. If omitted this will be left to encoder defaults.", IsRequired = false, DataType = "int", ParameterType = "query", Verb = "GET")]
public int? VideoBitRate { get; set; }
/// <summary>
/// Gets or sets the index of the subtitle stream.
/// </summary>
/// <value>The index of the subtitle stream.</value>
[ApiMember(Name = "SubtitleStreamIndex", Description = "Optional. The index of the subtitle stream to use. If omitted no subtitles will be used.", IsRequired = false, DataType = "int", ParameterType = "query", Verb = "GET")]
public int? SubtitleStreamIndex { get; set; }
[ApiMember(Name = "SubtitleMethod", Description = "Optional. Specify the subtitle delivery method.", IsRequired = false, DataType = "string", ParameterType = "query", Verb = "GET")]
public SubtitleDeliveryMethod SubtitleMethod { get; set; }
[ApiMember(Name = "MaxRefFrames", Description = "Optional.", IsRequired = false, DataType = "int", ParameterType = "query", Verb = "GET")]
public int? MaxRefFrames { get; set; }
[ApiMember(Name = "MaxVideoBitDepth", Description = "Optional.", IsRequired = false, DataType = "int", ParameterType = "query", Verb = "GET")]
public int? MaxVideoBitDepth { get; set; }
public bool RequireAvc { get; set; }
public int? TranscodingMaxAudioChannels { get; set; }
public int? CpuCoreLimit { get; set; }
public string OutputContainer { get; set; }
/// <summary>
/// Gets or sets the video codec.
/// </summary>
/// <value>The video codec.</value>
[ApiMember(Name = "VideoCodec", Description = "Optional. Specify a video codec to encode to, e.g. h264. If omitted the server will auto-select using the url's extension. Options: h264, mpeg4, theora, vpx, wmv.", IsRequired = false, DataType = "string", ParameterType = "query", Verb = "GET")]
public string VideoCodec { get; set; }
/// <summary>
/// Gets or sets the index of the audio stream.
/// </summary>
/// <value>The index of the audio stream.</value>
[ApiMember(Name = "AudioStreamIndex", Description = "Optional. The index of the audio stream to use. If omitted the first audio stream will be used.", IsRequired = false, DataType = "int", ParameterType = "query", Verb = "GET")]
public int? AudioStreamIndex { get; set; }
/// <summary>
/// Gets or sets the index of the video stream.
/// </summary>
/// <value>The index of the video stream.</value>
[ApiMember(Name = "VideoStreamIndex", Description = "Optional. The index of the video stream to use. If omitted the first video stream will be used.", IsRequired = false, DataType = "int", ParameterType = "query", Verb = "GET")]
public int? VideoStreamIndex { get; set; }
public BaseEncodingJobOptions()
{
EnableAutoStreamCopy = true;
}
}
}

@ -42,9 +42,11 @@ namespace MediaBrowser.MediaEncoding.Encoder
}
}
var threads = GetNumberOfThreads(state, false);
var encodingOptions = GetEncodingOptions();
var inputModifier = GetInputModifier(state);
var threads = EncodingHelper.GetNumberOfThreads(state, encodingOptions, false);
var inputModifier = EncodingHelper.GetInputModifier(state, encodingOptions);
var albumCoverInput = string.Empty;
var mapArgs = string.Empty;
@ -67,7 +69,7 @@ namespace MediaBrowser.MediaEncoding.Encoder
var result = string.Format("{0} {1}{6}{7} -threads {2}{3} {4} -id3v2_version 3 -write_id3v1 1{8} -y \"{5}\"",
inputModifier,
GetInputArgument(state),
EncodingHelper.GetInputArgument(state, GetEncodingOptions()),
threads,
vn,
string.Join(" ", audioTranscodeParams.ToArray()),

@ -36,6 +36,8 @@ namespace MediaBrowser.MediaEncoding.Encoder
protected readonly CultureInfo UsCulture = new CultureInfo("en-US");
protected EncodingHelper EncodingHelper;
protected BaseEncoder(MediaEncoder mediaEncoder,
ILogger logger,
IServerConfigurationManager configurationManager,
@ -56,6 +58,8 @@ namespace MediaBrowser.MediaEncoding.Encoder
SubtitleEncoder = subtitleEncoder;
MediaSourceManager = mediaSourceManager;
ProcessFactory = processFactory;
EncodingHelper = new EncodingHelper(MediaEncoder, ConfigurationManager, FileSystem, SubtitleEncoder);
}
public async Task<EncodingJob> Start(EncodingJobOptions options,
@ -63,7 +67,7 @@ namespace MediaBrowser.MediaEncoding.Encoder
CancellationToken cancellationToken)
{
var encodingJob = await new EncodingJobFactory(Logger, LibraryManager, MediaSourceManager, ConfigurationManager)
.CreateJob(options, IsVideoEncoder, progress, cancellationToken).ConfigureAwait(false);
.CreateJob(options, EncodingHelper, IsVideoEncoder, progress, cancellationToken).ConfigureAwait(false);
encodingJob.OutputFilePath = GetOutputFilePath(encodingJob);
FileSystem.CreateDirectory(Path.GetDirectoryName(encodingJob.OutputFilePath));
@ -285,72 +289,6 @@ namespace MediaBrowser.MediaEncoding.Encoder
return null;
}
/// <summary>
/// Gets the number of threads.
/// </summary>
/// <returns>System.Int32.</returns>
protected int GetNumberOfThreads(EncodingJob job, bool isWebm)
{
return job.Options.CpuCoreLimit ?? 0;
}
protected string GetInputModifier(EncodingJob state, bool genPts = true)
{
var inputModifier = string.Empty;
var probeSize = GetProbeSizeArgument(state);
inputModifier += " " + probeSize;
inputModifier = inputModifier.Trim();
var userAgentParam = GetUserAgentParam(state);
if (!string.IsNullOrWhiteSpace(userAgentParam))
{
inputModifier += " " + userAgentParam;
}
inputModifier = inputModifier.Trim();
inputModifier += " " + GetFastSeekCommandLineParameter(state.Options);
inputModifier = inputModifier.Trim();
if (state.IsVideoRequest && genPts)
{
inputModifier += " -fflags +genpts";
}
if (!string.IsNullOrEmpty(state.InputAudioSync))
{
inputModifier += " -async " + state.InputAudioSync;
}
if (!string.IsNullOrEmpty(state.InputVideoSync))
{
inputModifier += " -vsync " + state.InputVideoSync;
}
if (state.ReadInputAtNativeFramerate)
{
inputModifier += " -re";
}
var videoDecoder = GetVideoDecoder(state);
if (!string.IsNullOrWhiteSpace(videoDecoder))
{
inputModifier += " " + videoDecoder;
}
//if (state.IsVideoRequest)
//{
// if (string.Equals(state.OutputContainer, "mkv", StringComparison.OrdinalIgnoreCase))
// {
// //inputModifier += " -noaccurate_seek";
// }
//}
return inputModifier;
}
/// <summary>
/// Gets the name of the output video codec
/// </summary>
@ -405,130 +343,6 @@ namespace MediaBrowser.MediaEncoding.Encoder
return null;
}
private string GetUserAgentParam(EncodingJob state)
{
string useragent = null;
state.RemoteHttpHeaders.TryGetValue("User-Agent", out useragent);
if (!string.IsNullOrWhiteSpace(useragent))
{
return "-user-agent \"" + useragent + "\"";
}
return string.Empty;
}
/// <summary>
/// Gets the probe size argument.
/// </summary>
/// <param name="state">The state.</param>
/// <returns>System.String.</returns>
private string GetProbeSizeArgument(EncodingJob state)
{
if (state.PlayableStreamFileNames.Count > 0)
{
return MediaEncoder.GetProbeSizeAndAnalyzeDurationArgument(state.PlayableStreamFileNames.ToArray(), state.InputProtocol);
}
return MediaEncoder.GetProbeSizeAndAnalyzeDurationArgument(new[] { state.MediaPath }, state.InputProtocol);
}
/// <summary>
/// Gets the fast seek command line parameter.
/// </summary>
/// <param name="request">The request.</param>
/// <returns>System.String.</returns>
/// <value>The fast seek command line parameter.</value>
protected string GetFastSeekCommandLineParameter(EncodingJobOptions request)
{
var time = request.StartTimeTicks ?? 0;
if (time > 0)
{
return string.Format("-ss {0}", MediaEncoder.GetTimeParameter(time));
}
return string.Empty;
}
/// <summary>
/// Gets the input argument.
/// </summary>
/// <param name="state">The state.</param>
/// <returns>System.String.</returns>
protected string GetInputArgument(EncodingJob state)
{
var arg = string.Format("-i {0}", GetInputPathArgument(state));
if (state.SubtitleStream != null && state.Options.SubtitleMethod == SubtitleDeliveryMethod.Encode)
{
if (state.SubtitleStream.IsExternal && !state.SubtitleStream.IsTextSubtitleStream)
{
if (state.VideoStream != null && state.VideoStream.Width.HasValue)
{
// This is hacky but not sure how to get the exact subtitle resolution
double height = state.VideoStream.Width.Value;
height /= 16;
height *= 9;
arg += string.Format(" -canvas_size {0}:{1}", state.VideoStream.Width.Value.ToString(CultureInfo.InvariantCulture), Convert.ToInt32(height).ToString(CultureInfo.InvariantCulture));
}
var subtitlePath = state.SubtitleStream.Path;
if (string.Equals(Path.GetExtension(subtitlePath), ".sub", StringComparison.OrdinalIgnoreCase))
{
var idxFile = Path.ChangeExtension(subtitlePath, ".idx");
if (FileSystem.FileExists(idxFile))
{
subtitlePath = idxFile;
}
}
arg += " -i \"" + subtitlePath + "\"";
}
}
if (state.IsVideoRequest)
{
var encodingOptions = GetEncodingOptions();
var videoEncoder = EncodingJobFactory.GetVideoEncoder(MediaEncoder, state, encodingOptions);
if (videoEncoder.IndexOf("vaapi", StringComparison.OrdinalIgnoreCase) != -1)
{
var hasGraphicalSubs = state.SubtitleStream != null && !state.SubtitleStream.IsTextSubtitleStream && state.Options.SubtitleMethod == SubtitleDeliveryMethod.Encode;
var hwOutputFormat = "vaapi";
if (hasGraphicalSubs)
{
hwOutputFormat = "yuv420p";
}
arg = "-hwaccel vaapi -hwaccel_output_format " + hwOutputFormat + " -vaapi_device " + encodingOptions.VaapiDevice + " " + arg;
}
}
return arg.Trim();
}
private string GetInputPathArgument(EncodingJob state)
{
var protocol = state.InputProtocol;
var mediaPath = state.MediaPath ?? string.Empty;
var inputPath = new[] { mediaPath };
if (state.IsInputVideo)
{
if (!(state.VideoType == VideoType.Iso && state.IsoMount == null))
{
inputPath = MediaEncoderHelpers.GetInputArgument(FileSystem, mediaPath, state.InputProtocol, state.IsoMount, state.PlayableStreamFileNames);
}
}
return MediaEncoder.GetInputArgument(inputPath, protocol);
}
private async Task AcquireResources(EncodingJob state, CancellationToken cancellationToken)
{
if (state.VideoType == VideoType.Iso && state.IsoType.HasValue && IsoManager.CanMount(state.MediaPath))
@ -544,11 +358,11 @@ namespace MediaBrowser.MediaEncoding.Encoder
}, false, cancellationToken).ConfigureAwait(false);
AttachMediaSourceInfo(state, liveStreamResponse.MediaSource, state.Options);
EncodingHelper.AttachMediaSourceInfo(state, liveStreamResponse.MediaSource, null);
if (state.IsVideoRequest)
{
EncodingJobFactory.TryStreamCopy(state, state.Options);
EncodingHelper.TryStreamCopy(state);
}
}
@ -557,603 +371,5 @@ namespace MediaBrowser.MediaEncoding.Encoder
await Task.Delay(state.MediaSource.BufferMs.Value, cancellationToken).ConfigureAwait(false);
}
}
private void AttachMediaSourceInfo(EncodingJob state,
MediaSourceInfo mediaSource,
EncodingJobOptions videoRequest)
{
EncodingJobFactory.AttachMediaSourceInfo(state, mediaSource, videoRequest);
}
/// <summary>
/// Gets the internal graphical subtitle param.
/// </summary>
/// <param name="state">The state.</param>
/// <param name="outputVideoCodec">The output video codec.</param>
/// <returns>System.String.</returns>
protected async Task<string> GetGraphicalSubtitleParam(EncodingJob state, string outputVideoCodec)
{
var outputSizeParam = string.Empty;
var request = state.Options;
// Add resolution params, if specified
if (request.Width.HasValue || request.Height.HasValue || request.MaxHeight.HasValue || request.MaxWidth.HasValue)
{
outputSizeParam = await GetOutputSizeParam(state, outputVideoCodec).ConfigureAwait(false);
outputSizeParam = outputSizeParam.TrimEnd('"');
if (string.Equals(outputVideoCodec, "h264_vaapi", StringComparison.OrdinalIgnoreCase))
{
outputSizeParam = "," + outputSizeParam.Substring(outputSizeParam.IndexOf("format", StringComparison.OrdinalIgnoreCase));
}
else
{
outputSizeParam = "," + outputSizeParam.Substring(outputSizeParam.IndexOf("scale", StringComparison.OrdinalIgnoreCase));
}
}
if (string.Equals(outputVideoCodec, "h264_vaapi", StringComparison.OrdinalIgnoreCase) && outputSizeParam.Length == 0)
{
outputSizeParam = ",format=nv12|vaapi,hwupload";
}
var videoSizeParam = string.Empty;
if (state.VideoStream != null && state.VideoStream.Width.HasValue && state.VideoStream.Height.HasValue)
{
videoSizeParam = string.Format(",scale={0}:{1}", state.VideoStream.Width.Value.ToString(UsCulture), state.VideoStream.Height.Value.ToString(UsCulture));
}
var mapPrefix = state.SubtitleStream.IsExternal ?
1 :
0;
var subtitleStreamIndex = state.SubtitleStream.IsExternal
? 0
: state.SubtitleStream.Index;
return string.Format(" -filter_complex \"[{0}:{1}]format=yuva444p{4},lut=u=128:v=128:y=gammaval(.3)[sub] ; [0:{2}] [sub] overlay{3}\"",
mapPrefix.ToString(UsCulture),
subtitleStreamIndex.ToString(UsCulture),
state.VideoStream.Index.ToString(UsCulture),
outputSizeParam,
videoSizeParam);
}
/// <summary>
/// Gets the video bitrate to specify on the command line
/// </summary>
/// <param name="state">The state.</param>
/// <param name="videoEncoder">The video codec.</param>
/// <returns>System.String.</returns>
protected string GetVideoQualityParam(EncodingJob state, string videoEncoder)
{
var param = string.Empty;
var isVc1 = state.VideoStream != null &&
string.Equals(state.VideoStream.Codec, "vc1", StringComparison.OrdinalIgnoreCase);
if (string.Equals(videoEncoder, "libx264", StringComparison.OrdinalIgnoreCase))
{
param = "-preset superfast";
param += " -crf 23";
}
else if (string.Equals(videoEncoder, "libx265", StringComparison.OrdinalIgnoreCase))
{
param = "-preset fast";
param += " -crf 28";
}
// h264 (h264_qsv)
else if (string.Equals(videoEncoder, "h264_qsv", StringComparison.OrdinalIgnoreCase))
{
param = "-preset 7 -look_ahead 0";
}
// h264 (h264_nvenc)
else if (string.Equals(videoEncoder, "h264_nvenc", StringComparison.OrdinalIgnoreCase))
{
param = "-preset llhq";
}
// webm
else if (string.Equals(videoEncoder, "libvpx", StringComparison.OrdinalIgnoreCase))
{
// Values 0-3, 0 being highest quality but slower
var profileScore = 0;
string crf;
var qmin = "0";
var qmax = "50";
crf = "10";
if (isVc1)
{
profileScore++;
}
// Max of 2
profileScore = Math.Min(profileScore, 2);
// http://www.webmproject.org/docs/encoder-parameters/
param = string.Format("-speed 16 -quality good -profile:v {0} -slices 8 -crf {1} -qmin {2} -qmax {3}",
profileScore.ToString(UsCulture),
crf,
qmin,
qmax);
}
else if (string.Equals(videoEncoder, "mpeg4", StringComparison.OrdinalIgnoreCase))
{
param = "-mbd rd -flags +mv4+aic -trellis 2 -cmp 2 -subcmp 2 -bf 2";
}
// asf/wmv
else if (string.Equals(videoEncoder, "wmv2", StringComparison.OrdinalIgnoreCase))
{
param = "-qmin 2";
}
else if (string.Equals(videoEncoder, "msmpeg4", StringComparison.OrdinalIgnoreCase))
{
param = "-mbd 2";
}
param += GetVideoBitrateParam(state, videoEncoder);
var framerate = GetFramerateParam(state);
if (framerate.HasValue)
{
param += string.Format(" -r {0}", framerate.Value.ToString(UsCulture));
}
if (!string.IsNullOrEmpty(state.OutputVideoSync))
{
param += " -vsync " + state.OutputVideoSync;
}
if (!string.IsNullOrEmpty(state.Options.Profile))
{
if (!string.Equals(videoEncoder, "h264_omx", StringComparison.OrdinalIgnoreCase) &&
!string.Equals(videoEncoder, "h264_vaapi", StringComparison.OrdinalIgnoreCase))
{
// not supported by h264_omx
param += " -profile:v " + state.Options.Profile;
}
}
var levelString = state.Options.Level.HasValue ? state.Options.Level.Value.ToString(CultureInfo.InvariantCulture) : null;
if (!string.IsNullOrEmpty(levelString))
{
levelString = NormalizeTranscodingLevel(state.OutputVideoCodec, levelString);
// h264_qsv and h264_nvenc expect levels to be expressed as a decimal. libx264 supports decimal and non-decimal format
// also needed for libx264 due to https://trac.ffmpeg.org/ticket/3307
if (string.Equals(videoEncoder, "h264_qsv", StringComparison.OrdinalIgnoreCase) ||
string.Equals(videoEncoder, "h264_nvenc", StringComparison.OrdinalIgnoreCase) ||
string.Equals(videoEncoder, "libx264", StringComparison.OrdinalIgnoreCase))
{
switch (levelString)
{
case "30":
param += " -level 3.0";
break;
case "31":
param += " -level 3.1";
break;
case "32":
param += " -level 3.2";
break;
case "40":
param += " -level 4.0";
break;
case "41":
param += " -level 4.1";
break;
case "42":
param += " -level 4.2";
break;
case "50":
param += " -level 5.0";
break;
case "51":
param += " -level 5.1";
break;
case "52":
param += " -level 5.2";
break;
default:
param += " -level " + levelString;
break;
}
}
else if (!string.Equals(videoEncoder, "h264_omx", StringComparison.OrdinalIgnoreCase))
{
param += " -level " + levelString;
}
}
if (!string.Equals(videoEncoder, "h264_omx", StringComparison.OrdinalIgnoreCase) &&
!string.Equals(videoEncoder, "h264_qsv", StringComparison.OrdinalIgnoreCase) &&
!string.Equals(videoEncoder, "h264_vaapi", StringComparison.OrdinalIgnoreCase))
{
param = "-pix_fmt yuv420p " + param;
}
return param;
}
private string NormalizeTranscodingLevel(string videoCodec, string level)
{
double requestLevel;
// Clients may direct play higher than level 41, but there's no reason to transcode higher
if (double.TryParse(level, NumberStyles.Any, UsCulture, out requestLevel))
{
if (string.Equals(videoCodec, "h264", StringComparison.OrdinalIgnoreCase))
{
if (requestLevel > 41)
{
return "41";
}
}
}
return level;
}
protected string GetVideoBitrateParam(EncodingJob state, string videoCodec)
{
var bitrate = state.OutputVideoBitrate;
if (bitrate.HasValue)
{
if (string.Equals(videoCodec, "libvpx", StringComparison.OrdinalIgnoreCase))
{
// With vpx when crf is used, b:v becomes a max rate
// https://trac.ffmpeg.org/wiki/vpxEncodingGuide. But higher bitrate source files -b:v causes judder so limite the bitrate but dont allow it to "saturate" the bitrate. So dont contrain it down just up.
return string.Format(" -maxrate:v {0} -bufsize:v ({0}*2) -b:v {0}", bitrate.Value.ToString(UsCulture));
}
if (string.Equals(videoCodec, "msmpeg4", StringComparison.OrdinalIgnoreCase))
{
return string.Format(" -b:v {0}", bitrate.Value.ToString(UsCulture));
}
if (string.Equals(videoCodec, "libx264", StringComparison.OrdinalIgnoreCase))
{
// h264
return string.Format(" -maxrate {0} -bufsize {1}",
bitrate.Value.ToString(UsCulture),
(bitrate.Value * 2).ToString(UsCulture));
}
// h264
return string.Format(" -b:v {0} -maxrate {0} -bufsize {1}",
bitrate.Value.ToString(UsCulture),
(bitrate.Value * 2).ToString(UsCulture));
}
return string.Empty;
}
protected double? GetFramerateParam(EncodingJob state)
{
if (state.Options != null)
{
if (state.Options.Framerate.HasValue)
{
return state.Options.Framerate.Value;
}
var maxrate = state.Options.MaxFramerate;
if (maxrate.HasValue && state.VideoStream != null)
{
var contentRate = state.VideoStream.AverageFrameRate ?? state.VideoStream.RealFrameRate;
if (contentRate.HasValue && contentRate.Value > maxrate.Value)
{
return maxrate;
}
}
}
return null;
}
/// <summary>
/// Gets the map args.
/// </summary>
/// <param name="state">The state.</param>
/// <returns>System.String.</returns>
protected virtual string GetMapArgs(EncodingJob state)
{
// If we don't have known media info
// If input is video, use -sn to drop subtitles
// Otherwise just return empty
if (state.VideoStream == null && state.AudioStream == null)
{
return state.IsInputVideo ? "-sn" : string.Empty;
}
// We have media info, but we don't know the stream indexes
if (state.VideoStream != null && state.VideoStream.Index == -1)
{
return "-sn";
}
// We have media info, but we don't know the stream indexes
if (state.AudioStream != null && state.AudioStream.Index == -1)
{
return state.IsInputVideo ? "-sn" : string.Empty;
}
var args = string.Empty;
if (state.VideoStream != null)
{
args += string.Format("-map 0:{0}", state.VideoStream.Index);
}
else
{
// No known video stream
args += "-vn";
}
if (state.AudioStream != null)
{
args += string.Format(" -map 0:{0}", state.AudioStream.Index);
}
else
{
args += " -map -0:a";
}
if (state.SubtitleStream == null || state.Options.SubtitleMethod == SubtitleDeliveryMethod.Hls)
{
args += " -map -0:s";
}
else if (state.SubtitleStream.IsExternal && !state.SubtitleStream.IsTextSubtitleStream)
{
args += " -map 1:0 -sn";
}
return args;
}
/// <summary>
/// Determines whether the specified stream is H264.
/// </summary>
/// <param name="stream">The stream.</param>
/// <returns><c>true</c> if the specified stream is H264; otherwise, <c>false</c>.</returns>
protected bool IsH264(MediaStream stream)
{
var codec = stream.Codec ?? string.Empty;
return codec.IndexOf("264", StringComparison.OrdinalIgnoreCase) != -1 ||
codec.IndexOf("avc", StringComparison.OrdinalIgnoreCase) != -1;
}
/// <summary>
/// If we're going to put a fixed size on the command line, this will calculate it
/// </summary>
/// <param name="state">The state.</param>
/// <param name="outputVideoCodec">The output video codec.</param>
/// <param name="allowTimeStampCopy">if set to <c>true</c> [allow time stamp copy].</param>
/// <returns>System.String.</returns>
protected async Task<string> GetOutputSizeParam(EncodingJob state,
string outputVideoCodec,
bool allowTimeStampCopy = true)
{
// http://sonnati.wordpress.com/2012/10/19/ffmpeg-the-swiss-army-knife-of-internet-streaming-part-vi/
var request = state.Options;
var filters = new List<string>();
if (string.Equals(outputVideoCodec, "h264_vaapi", StringComparison.OrdinalIgnoreCase))
{
filters.Add("format=nv12|vaapi");
filters.Add("hwupload");
}
else if (state.DeInterlace && !string.Equals(outputVideoCodec, "h264_vaapi", StringComparison.OrdinalIgnoreCase))
{
filters.Add("yadif=0:-1:0");
}
if (string.Equals(outputVideoCodec, "h264_vaapi", StringComparison.OrdinalIgnoreCase))
{
// Work around vaapi's reduced scaling features
var scaler = "scale_vaapi";
// Given the input dimensions (inputWidth, inputHeight), determine the output dimensions
// (outputWidth, outputHeight). The user may request precise output dimensions or maximum
// output dimensions. Output dimensions are guaranteed to be even.
decimal inputWidth = Convert.ToDecimal(state.VideoStream.Width);
decimal inputHeight = Convert.ToDecimal(state.VideoStream.Height);
decimal outputWidth = request.Width.HasValue ? Convert.ToDecimal(request.Width.Value) : inputWidth;
decimal outputHeight = request.Height.HasValue ? Convert.ToDecimal(request.Height.Value) : inputHeight;
decimal maximumWidth = request.MaxWidth.HasValue ? Convert.ToDecimal(request.MaxWidth.Value) : outputWidth;
decimal maximumHeight = request.MaxHeight.HasValue ? Convert.ToDecimal(request.MaxHeight.Value) : outputHeight;
if (outputWidth > maximumWidth || outputHeight > maximumHeight)
{
var scale = Math.Min(maximumWidth / outputWidth, maximumHeight / outputHeight);
outputWidth = Math.Min(maximumWidth, Math.Truncate(outputWidth * scale));
outputHeight = Math.Min(maximumHeight, Math.Truncate(outputHeight * scale));
}
outputWidth = 2 * Math.Truncate(outputWidth / 2);
outputHeight = 2 * Math.Truncate(outputHeight / 2);
if (outputWidth != inputWidth || outputHeight != inputHeight)
{
filters.Add(string.Format("{0}=w={1}:h={2}", scaler, outputWidth.ToString(UsCulture), outputHeight.ToString(UsCulture)));
}
}
else
{
// If fixed dimensions were supplied
if (request.Width.HasValue && request.Height.HasValue)
{
var widthParam = request.Width.Value.ToString(UsCulture);
var heightParam = request.Height.Value.ToString(UsCulture);
filters.Add(string.Format("scale=trunc({0}/2)*2:trunc({1}/2)*2", widthParam, heightParam));
}
// If Max dimensions were supplied, for width selects lowest even number between input width and width req size and selects lowest even number from in width*display aspect and requested size
else if (request.MaxWidth.HasValue && request.MaxHeight.HasValue)
{
var maxWidthParam = request.MaxWidth.Value.ToString(UsCulture);
var maxHeightParam = request.MaxHeight.Value.ToString(UsCulture);
filters.Add(string.Format("scale=trunc(min(max(iw\\,ih*dar)\\,min({0}\\,{1}*dar))/2)*2:trunc(min(max(iw/dar\\,ih)\\,min({0}/dar\\,{1}))/2)*2", maxWidthParam, maxHeightParam));
}
// If a fixed width was requested
else if (request.Width.HasValue)
{
var widthParam = request.Width.Value.ToString(UsCulture);
filters.Add(string.Format("scale={0}:trunc(ow/a/2)*2", widthParam));
}
// If a fixed height was requested
else if (request.Height.HasValue)
{
var heightParam = request.Height.Value.ToString(UsCulture);
filters.Add(string.Format("scale=trunc(oh*a/2)*2:{0}", heightParam));
}
// If a max width was requested
else if (request.MaxWidth.HasValue)
{
var maxWidthParam = request.MaxWidth.Value.ToString(UsCulture);
filters.Add(string.Format("scale=trunc(min(max(iw\\,ih*dar)\\,{0})/2)*2:trunc(ow/dar/2)*2", maxWidthParam));
}
// If a max height was requested
else if (request.MaxHeight.HasValue)
{
var maxHeightParam = request.MaxHeight.Value.ToString(UsCulture);
filters.Add(string.Format("scale=trunc(oh*a/2)*2:min(max(iw/dar\\,ih)\\,{0})", maxHeightParam));
}
}
var output = string.Empty;
if (state.SubtitleStream != null && state.SubtitleStream.IsTextSubtitleStream && state.Options.SubtitleMethod == SubtitleDeliveryMethod.Encode)
{
var subParam = await GetTextSubtitleParam(state).ConfigureAwait(false);
filters.Add(subParam);
if (allowTimeStampCopy)
{
output += " -copyts";
}
}
if (filters.Count > 0)
{
output += string.Format(" -vf \"{0}\"", string.Join(",", filters.ToArray()));
}
return output;
}
/// <summary>
/// Gets the text subtitle param.
/// </summary>
/// <param name="state">The state.</param>
/// <returns>System.String.</returns>
protected async Task<string> GetTextSubtitleParam(EncodingJob state)
{
var seconds = Math.Round(TimeSpan.FromTicks(state.Options.StartTimeTicks ?? 0).TotalSeconds);
if (state.SubtitleStream.IsExternal)
{
var subtitlePath = state.SubtitleStream.Path;
var charsetParam = string.Empty;
if (!string.IsNullOrEmpty(state.SubtitleStream.Language))
{
var charenc = await SubtitleEncoder.GetSubtitleFileCharacterSet(subtitlePath, state.SubtitleStream.Language, state.MediaSource.Protocol, CancellationToken.None).ConfigureAwait(false);
if (!string.IsNullOrEmpty(charenc))
{
charsetParam = ":charenc=" + charenc;
}
}
// TODO: Perhaps also use original_size=1920x800 ??
return string.Format("subtitles=filename='{0}'{1},setpts=PTS -{2}/TB",
MediaEncoder.EscapeSubtitleFilterPath(subtitlePath),
charsetParam,
seconds.ToString(UsCulture));
}
var mediaPath = state.MediaPath ?? string.Empty;
return string.Format("subtitles='{0}:si={1}',setpts=PTS -{2}/TB",
MediaEncoder.EscapeSubtitleFilterPath(mediaPath),
state.InternalSubtitleStreamOffset.ToString(UsCulture),
seconds.ToString(UsCulture));
}
protected string GetAudioFilterParam(EncodingJob state, bool isHls)
{
var volParam = string.Empty;
var audioSampleRate = string.Empty;
var channels = state.OutputAudioChannels;
// Boost volume to 200% when downsampling from 6ch to 2ch
if (channels.HasValue && channels.Value <= 2)
{
if (state.AudioStream != null && state.AudioStream.Channels.HasValue && state.AudioStream.Channels.Value > 5 && !GetEncodingOptions().DownMixAudioBoost.Equals(1))
{
volParam = ",volume=" + GetEncodingOptions().DownMixAudioBoost.ToString(UsCulture);
}
}
if (state.OutputAudioSampleRate.HasValue)
{
audioSampleRate = state.OutputAudioSampleRate.Value + ":";
}
var adelay = isHls ? "adelay=1," : string.Empty;
var pts = string.Empty;
if (state.SubtitleStream != null && state.SubtitleStream.IsTextSubtitleStream && state.Options.SubtitleMethod == SubtitleDeliveryMethod.Encode && !state.Options.CopyTimestamps)
{
var seconds = TimeSpan.FromTicks(state.Options.StartTimeTicks ?? 0).TotalSeconds;
pts = string.Format(",asetpts=PTS-{0}/TB", Math.Round(seconds).ToString(UsCulture));
}
return string.Format("-af \"{0}aresample={1}async={4}{2}{3}\"",
adelay,
audioSampleRate,
volParam,
pts,
state.OutputAudioSync);
}
}
}

File diff suppressed because it is too large Load Diff

@ -17,7 +17,7 @@ using System.Threading.Tasks;
namespace MediaBrowser.MediaEncoding.Encoder
{
public class EncodingJob : IDisposable
public class EncodingJob : EncodingJobInfo, IDisposable
{
public bool HasExited { get; internal set; }
public bool IsCancelled { get; internal set; }
@ -25,46 +25,24 @@ namespace MediaBrowser.MediaEncoding.Encoder
public Stream LogFileStream { get; set; }
public IProgress<double> Progress { get; set; }
public TaskCompletionSource<bool> TaskCompletionSource;
public EncodingJobOptions Options { get; set; }
public string InputContainer { get; set; }
public MediaSourceInfo MediaSource { get; set; }
public MediaStream AudioStream { get; set; }
public MediaStream VideoStream { get; set; }
public MediaStream SubtitleStream { get; set; }
public IIsoMount IsoMount { get; set; }
public bool ReadInputAtNativeFramerate { get; set; }
public bool IsVideoRequest { get; set; }
public string InputAudioSync { get; set; }
public string InputVideoSync { get; set; }
public string Id { get; set; }
public string MediaPath { get; set; }
public MediaProtocol InputProtocol { get; set; }
public bool IsInputVideo { get; set; }
public VideoType VideoType { get; set; }
public IsoType? IsoType { get; set; }
public List<string> PlayableStreamFileNames { get; set; }
public EncodingJobOptions Options
{
get { return (EncodingJobOptions) BaseRequest; }
set { BaseRequest = value; }
}
public List<string> SupportedAudioCodecs { get; set; }
public Dictionary<string, string> RemoteHttpHeaders { get; set; }
public TransportStreamTimestamp InputTimestamp { get; set; }
public string Id { get; set; }
public bool DeInterlace { get; set; }
public string MimeType { get; set; }
public bool EstimateContentLength { get; set; }
public bool EnableMpegtsM2TsMode { get; set; }
public TranscodeSeekInfo TranscodeSeekInfo { get; set; }
public long? EncodingDurationTicks { get; set; }
public string LiveStreamId { get; set; }
public long? RunTimeTicks;
public string ItemType { get; set; }
public long? InputBitrate { get; set; }
public long? InputFileSize { get; set; }
public string OutputAudioSync = "1";
public string OutputVideoSync = "vfr";
public string AlbumCoverPath { get; set; }
public string GetMimeType(string outputPath)
@ -80,17 +58,14 @@ namespace MediaBrowser.MediaEncoding.Encoder
private readonly ILogger _logger;
private readonly IMediaSourceManager _mediaSourceManager;
public EncodingJob(ILogger logger, IMediaSourceManager mediaSourceManager)
public EncodingJob(ILogger logger, IMediaSourceManager mediaSourceManager) :
base(logger)
{
_logger = logger;
_mediaSourceManager = mediaSourceManager;
Id = Guid.NewGuid().ToString("N");
RemoteHttpHeaders = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
_logger = logger;
SupportedAudioCodecs = new List<string>();
PlayableStreamFileNames = new List<string>();
RemoteHttpHeaders = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
TaskCompletionSource = new TaskCompletionSource<bool>();
}
@ -118,23 +93,6 @@ namespace MediaBrowser.MediaEncoding.Encoder
}
}
private void DisposeIsoMount()
{
if (IsoMount != null)
{
try
{
IsoMount.Dispose();
}
catch (Exception ex)
{
_logger.ErrorException("Error disposing iso mount", ex);
}
IsoMount = null;
}
}
private async void DisposeLiveStream()
{
if (MediaSource.RequiresClosing)
@ -150,15 +108,8 @@ namespace MediaBrowser.MediaEncoding.Encoder
}
}
public int InternalSubtitleStreamOffset { get; set; }
public string OutputFilePath { get; set; }
public string OutputVideoCodec { get; set; }
public string OutputAudioCodec { get; set; }
public int? OutputAudioChannels;
public int? OutputAudioSampleRate;
public int? OutputAudioBitrate;
public int? OutputVideoBitrate;
public string ActualOutputVideoCodec
{
@ -313,25 +264,11 @@ namespace MediaBrowser.MediaEncoding.Encoder
}
}
/// <summary>
/// Predicts the audio sample rate that will be in the output stream
/// </summary>
public double? TargetVideoLevel
{
get
{
var stream = VideoStream;
return Options.Level.HasValue && !Options.Static
? Options.Level.Value
: stream == null ? null : stream.Level;
}
}
public TransportStreamTimestamp TargetTimestamp
{
get
{
var defaultValue = string.Equals(Options.OutputContainer, "m2ts", StringComparison.OrdinalIgnoreCase) ?
var defaultValue = string.Equals(OutputContainer, "m2ts", StringComparison.OrdinalIgnoreCase) ?
TransportStreamTimestamp.Valid :
TransportStreamTimestamp.None;

@ -33,7 +33,7 @@ namespace MediaBrowser.MediaEncoding.Encoder
_config = config;
}
public async Task<EncodingJob> CreateJob(EncodingJobOptions options, bool isVideoRequest, IProgress<double> progress, CancellationToken cancellationToken)
public async Task<EncodingJob> CreateJob(EncodingJobOptions options, EncodingHelper encodingHelper, bool isVideoRequest, IProgress<double> progress, CancellationToken cancellationToken)
{
var request = options;
@ -49,6 +49,12 @@ namespace MediaBrowser.MediaEncoding.Encoder
Progress = progress
};
if (!string.IsNullOrWhiteSpace(request.VideoCodec))
{
state.SupportedVideoCodecs = request.VideoCodec.Split(',').Where(i => !string.IsNullOrWhiteSpace(i)).ToList();
request.VideoCodec = state.SupportedVideoCodecs.FirstOrDefault();
}
if (!string.IsNullOrWhiteSpace(request.AudioCodec))
{
state.SupportedAudioCodecs = request.AudioCodec.Split(',').Where(i => !string.IsNullOrWhiteSpace(i)).ToList();
@ -76,7 +82,7 @@ namespace MediaBrowser.MediaEncoding.Encoder
var videoRequest = state.Options;
AttachMediaSourceInfo(state, mediaSource, videoRequest);
encodingHelper.AttachMediaSourceInfo(state, mediaSource, null);
//var container = Path.GetExtension(state.RequestedUrl);
@ -89,17 +95,17 @@ namespace MediaBrowser.MediaEncoding.Encoder
//state.OutputContainer = (container ?? string.Empty).TrimStart('.');
state.OutputAudioBitrate = GetAudioBitrateParam(state.Options, state.AudioStream);
state.OutputAudioBitrate = encodingHelper.GetAudioBitrateParam(state.Options, state.AudioStream);
state.OutputAudioSampleRate = request.AudioSampleRate;
state.OutputAudioCodec = state.Options.AudioCodec;
state.OutputAudioChannels = GetNumAudioChannelsParam(state.Options, state.AudioStream, state.OutputAudioCodec);
state.OutputAudioChannels = encodingHelper.GetNumAudioChannelsParam(state.Options, state.AudioStream, state.OutputAudioCodec);
if (videoRequest != null)
{
state.OutputVideoCodec = state.Options.VideoCodec;
state.OutputVideoBitrate = GetVideoBitrateParamValue(state.Options, state.VideoStream, state.OutputVideoCodec);
state.OutputVideoBitrate = encodingHelper.GetVideoBitrateParamValue(state.Options, state.VideoStream, state.OutputVideoCodec);
if (state.OutputVideoBitrate.HasValue)
{
@ -120,7 +126,7 @@ namespace MediaBrowser.MediaEncoding.Encoder
if (videoRequest != null)
{
TryStreamCopy(state, videoRequest);
encodingHelper.TryStreamCopy(state);
}
//state.OutputFilePath = GetOutputFilePath(state);
@ -128,104 +134,6 @@ namespace MediaBrowser.MediaEncoding.Encoder
return state;
}
internal static void TryStreamCopy(EncodingJob state,
EncodingJobOptions videoRequest)
{
if (state.IsVideoRequest)
{
if (state.VideoStream != null && CanStreamCopyVideo(videoRequest, state.VideoStream))
{
state.OutputVideoCodec = "copy";
}
if (state.AudioStream != null && CanStreamCopyAudio(videoRequest, state.AudioStream, state.SupportedAudioCodecs))
{
state.OutputAudioCodec = "copy";
}
}
}
internal static void AttachMediaSourceInfo(EncodingJob state,
MediaSourceInfo mediaSource,
EncodingJobOptions videoRequest)
{
state.MediaPath = mediaSource.Path;
state.InputProtocol = mediaSource.Protocol;
state.InputContainer = mediaSource.Container;
state.InputFileSize = mediaSource.Size;
state.InputBitrate = mediaSource.Bitrate;
state.RunTimeTicks = mediaSource.RunTimeTicks;
state.RemoteHttpHeaders = mediaSource.RequiredHttpHeaders;
if (mediaSource.VideoType.HasValue)
{
state.VideoType = mediaSource.VideoType.Value;
}
state.IsoType = mediaSource.IsoType;
state.PlayableStreamFileNames = mediaSource.PlayableStreamFileNames.ToList();
if (mediaSource.Timestamp.HasValue)
{
state.InputTimestamp = mediaSource.Timestamp.Value;
}
state.InputProtocol = mediaSource.Protocol;
state.MediaPath = mediaSource.Path;
state.RunTimeTicks = mediaSource.RunTimeTicks;
state.RemoteHttpHeaders = mediaSource.RequiredHttpHeaders;
state.InputBitrate = mediaSource.Bitrate;
state.InputFileSize = mediaSource.Size;
state.ReadInputAtNativeFramerate = mediaSource.ReadAtNativeFramerate;
if (state.ReadInputAtNativeFramerate ||
mediaSource.Protocol == MediaProtocol.File && string.Equals(mediaSource.Container, "wtv", StringComparison.OrdinalIgnoreCase))
{
state.OutputAudioSync = "1000";
state.InputVideoSync = "-1";
state.InputAudioSync = "1";
}
if (string.Equals(mediaSource.Container, "wma", StringComparison.OrdinalIgnoreCase))
{
// Seeing some stuttering when transcoding wma to audio-only HLS
state.InputAudioSync = "1";
}
var mediaStreams = mediaSource.MediaStreams;
if (videoRequest != null)
{
if (string.IsNullOrEmpty(videoRequest.VideoCodec))
{
videoRequest.VideoCodec = InferVideoCodec(videoRequest.OutputContainer);
}
state.VideoStream = GetMediaStream(mediaStreams, videoRequest.VideoStreamIndex, MediaStreamType.Video);
state.SubtitleStream = GetMediaStream(mediaStreams, videoRequest.SubtitleStreamIndex, MediaStreamType.Subtitle, false);
state.AudioStream = GetMediaStream(mediaStreams, videoRequest.AudioStreamIndex, MediaStreamType.Audio);
if (state.SubtitleStream != null && !state.SubtitleStream.IsExternal)
{
state.InternalSubtitleStreamOffset = mediaStreams.Where(i => i.Type == MediaStreamType.Subtitle && !i.IsExternal).ToList().IndexOf(state.SubtitleStream);
}
if (state.VideoStream != null && state.VideoStream.IsInterlaced)
{
state.DeInterlace = true;
}
EnforceResolutionLimit(state, videoRequest);
}
else
{
state.AudioStream = GetMediaStream(mediaStreams, null, MediaStreamType.Audio, true);
}
state.MediaSource = mediaSource;
}
protected EncodingOptions GetEncodingOptions()
{
return _config.GetConfiguration<EncodingOptions>("encoding");
@ -300,203 +208,6 @@ namespace MediaBrowser.MediaEncoding.Encoder
return "copy";
}
/// <summary>
/// Determines which stream will be used for playback
/// </summary>
/// <param name="allStream">All stream.</param>
/// <param name="desiredIndex">Index of the desired.</param>
/// <param name="type">The type.</param>
/// <param name="returnFirstIfNoIndex">if set to <c>true</c> [return first if no index].</param>
/// <returns>MediaStream.</returns>
private static MediaStream GetMediaStream(IEnumerable<MediaStream> allStream, int? desiredIndex, MediaStreamType type, bool returnFirstIfNoIndex = true)
{
var streams = allStream.Where(s => s.Type == type).OrderBy(i => i.Index).ToList();
if (desiredIndex.HasValue)
{
var stream = streams.FirstOrDefault(s => s.Index == desiredIndex.Value);
if (stream != null)
{
return stream;
}
}
if (type == MediaStreamType.Video)
{
streams = streams.Where(i => !string.Equals(i.Codec, "mjpeg", StringComparison.OrdinalIgnoreCase)).ToList();
}
if (returnFirstIfNoIndex && type == MediaStreamType.Audio)
{
return streams.FirstOrDefault(i => i.Channels.HasValue && i.Channels.Value > 0) ??
streams.FirstOrDefault();
}
// Just return the first one
return returnFirstIfNoIndex ? streams.FirstOrDefault() : null;
}
/// <summary>
/// Enforces the resolution limit.
/// </summary>
/// <param name="state">The state.</param>
/// <param name="videoRequest">The video request.</param>
private static void EnforceResolutionLimit(EncodingJob state, EncodingJobOptions videoRequest)
{
// Switch the incoming params to be ceilings rather than fixed values
videoRequest.MaxWidth = videoRequest.MaxWidth ?? videoRequest.Width;
videoRequest.MaxHeight = videoRequest.MaxHeight ?? videoRequest.Height;
videoRequest.Width = null;
videoRequest.Height = null;
}
/// <summary>
/// Gets the number of audio channels to specify on the command line
/// </summary>
/// <param name="request">The request.</param>
/// <param name="audioStream">The audio stream.</param>
/// <param name="outputAudioCodec">The output audio codec.</param>
/// <returns>System.Nullable{System.Int32}.</returns>
private int? GetNumAudioChannelsParam(EncodingJobOptions request, MediaStream audioStream, string outputAudioCodec)
{
var inputChannels = audioStream == null
? null
: audioStream.Channels;
if (inputChannels <= 0)
{
inputChannels = null;
}
int? transcoderChannelLimit = null;
var codec = outputAudioCodec ?? string.Empty;
if (codec.IndexOf("wma", StringComparison.OrdinalIgnoreCase) != -1)
{
// wmav2 currently only supports two channel output
transcoderChannelLimit = 2;
}
else if (codec.IndexOf("mp3", StringComparison.OrdinalIgnoreCase) != -1)
{
// libmp3lame currently only supports two channel output
transcoderChannelLimit = 2;
}
else
{
// If we don't have any media info then limit it to 6 to prevent encoding errors due to asking for too many channels
transcoderChannelLimit = 6;
}
var isTranscodingAudio = !string.Equals(codec, "copy", StringComparison.OrdinalIgnoreCase);
int? resultChannels = null;
if (isTranscodingAudio)
{
resultChannels = request.TranscodingMaxAudioChannels;
}
resultChannels = resultChannels ?? request.MaxAudioChannels ?? request.AudioChannels;
if (inputChannels.HasValue)
{
resultChannels = resultChannels.HasValue
? Math.Min(resultChannels.Value, inputChannels.Value)
: inputChannels.Value;
}
if (isTranscodingAudio && transcoderChannelLimit.HasValue)
{
resultChannels = resultChannels.HasValue
? Math.Min(resultChannels.Value, transcoderChannelLimit.Value)
: transcoderChannelLimit.Value;
}
return resultChannels ?? request.AudioChannels;
}
private int? GetVideoBitrateParamValue(EncodingJobOptions request, MediaStream videoStream, string outputVideoCodec)
{
var bitrate = request.VideoBitRate;
if (videoStream != null)
{
var isUpscaling = request.Height.HasValue && videoStream.Height.HasValue &&
request.Height.Value > videoStream.Height.Value;
if (request.Width.HasValue && videoStream.Width.HasValue &&
request.Width.Value > videoStream.Width.Value)
{
isUpscaling = true;
}
// Don't allow bitrate increases unless upscaling
if (!isUpscaling)
{
if (bitrate.HasValue && videoStream.BitRate.HasValue)
{
bitrate = Math.Min(bitrate.Value, videoStream.BitRate.Value);
}
}
}
if (bitrate.HasValue)
{
var inputVideoCodec = videoStream == null ? null : videoStream.Codec;
bitrate = ResolutionNormalizer.ScaleBitrate(bitrate.Value, inputVideoCodec, outputVideoCodec);
// If a max bitrate was requested, don't let the scaled bitrate exceed it
if (request.VideoBitRate.HasValue)
{
bitrate = Math.Min(bitrate.Value, request.VideoBitRate.Value);
}
}
return bitrate;
}
protected string GetVideoBitrateParam(EncodingJob state, string videoCodec, bool isHls)
{
var bitrate = state.OutputVideoBitrate;
if (bitrate.HasValue)
{
if (string.Equals(videoCodec, "libvpx", StringComparison.OrdinalIgnoreCase))
{
// With vpx when crf is used, b:v becomes a max rate
// https://trac.ffmpeg.org/wiki/vpxEncodingGuide. But higher bitrate source files -b:v causes judder so limite the bitrate but dont allow it to "saturate" the bitrate. So dont contrain it down just up.
return string.Format(" -maxrate:v {0} -bufsize:v ({0}*2) -b:v {0}", bitrate.Value.ToString(UsCulture));
}
if (string.Equals(videoCodec, "msmpeg4", StringComparison.OrdinalIgnoreCase))
{
return string.Format(" -b:v {0}", bitrate.Value.ToString(UsCulture));
}
// h264
return string.Format(" -b:v {0} -maxrate {0} -bufsize {1}",
bitrate.Value.ToString(UsCulture),
(bitrate.Value * 2).ToString(UsCulture));
}
return string.Empty;
}
private int? GetAudioBitrateParam(EncodingJobOptions request, MediaStream audioStream)
{
if (request.AudioBitRate.HasValue)
{
// Make sure we don't request a bitrate higher than the source
var currentBitrate = audioStream == null ? request.AudioBitRate.Value : audioStream.BitRate ?? request.AudioBitRate.Value;
return request.AudioBitRate.Value;
//return Math.Min(currentBitrate, request.AudioBitRate.Value);
}
return null;
}
/// <summary>
/// Determines whether the specified stream is H264.
/// </summary>
@ -510,260 +221,6 @@ namespace MediaBrowser.MediaEncoding.Encoder
codec.IndexOf("avc", StringComparison.OrdinalIgnoreCase) != -1;
}
/// <summary>
/// Gets the name of the output audio codec
/// </summary>
/// <param name="state">The state.</param>
/// <returns>System.String.</returns>
internal static string GetAudioEncoder(EncodingJob state)
{
var codec = state.OutputAudioCodec;
if (string.Equals(codec, "aac", StringComparison.OrdinalIgnoreCase))
{
return "aac -strict experimental";
}
if (string.Equals(codec, "mp3", StringComparison.OrdinalIgnoreCase))
{
return "libmp3lame";
}
if (string.Equals(codec, "vorbis", StringComparison.OrdinalIgnoreCase))
{
return "libvorbis";
}
if (string.Equals(codec, "wma", StringComparison.OrdinalIgnoreCase))
{
return "wmav2";
}
return codec.ToLower();
}
/// <summary>
/// Gets the name of the output video codec
/// </summary>
/// <returns>System.String.</returns>
internal static string GetVideoEncoder(IMediaEncoder mediaEncoder, EncodingJob state, EncodingOptions options)
{
var codec = state.OutputVideoCodec;
if (!string.IsNullOrEmpty(codec))
{
if (string.Equals(codec, "h264", StringComparison.OrdinalIgnoreCase))
{
return GetH264Encoder(mediaEncoder, state, options);
}
if (string.Equals(codec, "vpx", StringComparison.OrdinalIgnoreCase))
{
return "libvpx";
}
if (string.Equals(codec, "wmv", StringComparison.OrdinalIgnoreCase))
{
return "wmv2";
}
if (string.Equals(codec, "theora", StringComparison.OrdinalIgnoreCase))
{
return "libtheora";
}
return codec.ToLower();
}
return "copy";
}
private static string GetAvailableEncoder(IMediaEncoder mediaEncoder, string preferredEncoder, string defaultEncoder)
{
if (mediaEncoder.SupportsEncoder(preferredEncoder))
{
return preferredEncoder;
}
return defaultEncoder;
}
internal static string GetH264Encoder(IMediaEncoder mediaEncoder, EncodingJob state, EncodingOptions options)
{
var defaultEncoder = "libx264";
// Only use alternative encoders for video files.
// When using concat with folder rips, if the mfx session fails to initialize, ffmpeg will be stuck retrying and will not exit gracefully
// Since transcoding of folder rips is expiremental anyway, it's not worth adding additional variables such as this.
if (state.VideoType == VideoType.VideoFile)
{
var hwType = options.HardwareAccelerationType;
if (string.Equals(hwType, "qsv", StringComparison.OrdinalIgnoreCase) ||
string.Equals(hwType, "h264_qsv", StringComparison.OrdinalIgnoreCase))
{
return GetAvailableEncoder(mediaEncoder, "h264_qsv", defaultEncoder);
}
if (string.Equals(hwType, "nvenc", StringComparison.OrdinalIgnoreCase))
{
return GetAvailableEncoder(mediaEncoder, "h264_nvenc", defaultEncoder);
}
if (string.Equals(hwType, "h264_omx", StringComparison.OrdinalIgnoreCase))
{
return GetAvailableEncoder(mediaEncoder, "h264_omx", defaultEncoder);
}
if (string.Equals(hwType, "vaapi", StringComparison.OrdinalIgnoreCase) && !string.IsNullOrWhiteSpace(options.VaapiDevice))
{
if (IsVaapiSupported(state))
{
return GetAvailableEncoder(mediaEncoder, "h264_vaapi", defaultEncoder);
}
}
}
return defaultEncoder;
}
private static bool IsVaapiSupported(EncodingJob state)
{
var videoStream = state.VideoStream;
if (videoStream != null)
{
// vaapi will throw an error with this input
// [vaapi @ 0x7faed8000960] No VAAPI support for codec mpeg4 profile -99.
if (string.Equals(videoStream.Codec, "mpeg4", StringComparison.OrdinalIgnoreCase))
{
if (videoStream.Level == -99 || videoStream.Level == 15)
{
return false;
}
}
}
return true;
}
internal static bool CanStreamCopyVideo(EncodingJobOptions request, MediaStream videoStream)
{
if (videoStream.IsInterlaced)
{
return false;
}
if (videoStream.IsAnamorphic ?? false)
{
return false;
}
// Can't stream copy if we're burning in subtitles
if (request.SubtitleStreamIndex.HasValue)
{
if (request.SubtitleMethod == SubtitleDeliveryMethod.Encode)
{
return false;
}
}
// Source and target codecs must match
if (!string.Equals(request.VideoCodec, videoStream.Codec, StringComparison.OrdinalIgnoreCase))
{
return false;
}
if (string.Equals("h264", videoStream.Codec, StringComparison.OrdinalIgnoreCase))
{
if (videoStream.IsAVC.HasValue && !videoStream.IsAVC.Value)
{
return false;
}
}
// If client is requesting a specific video profile, it must match the source
if (!string.IsNullOrEmpty(request.Profile))
{
if (string.IsNullOrEmpty(videoStream.Profile))
{
return false;
}
if (!string.Equals(request.Profile, videoStream.Profile, StringComparison.OrdinalIgnoreCase))
{
var currentScore = GetVideoProfileScore(videoStream.Profile);
var requestedScore = GetVideoProfileScore(request.Profile);
if (currentScore == -1 || currentScore > requestedScore)
{
return false;
}
}
}
// Video width must fall within requested value
if (request.MaxWidth.HasValue)
{
if (!videoStream.Width.HasValue || videoStream.Width.Value > request.MaxWidth.Value)
{
return false;
}
}
// Video height must fall within requested value
if (request.MaxHeight.HasValue)
{
if (!videoStream.Height.HasValue || videoStream.Height.Value > request.MaxHeight.Value)
{
return false;
}
}
// Video framerate must fall within requested value
var requestedFramerate = request.MaxFramerate ?? request.Framerate;
if (requestedFramerate.HasValue)
{
var videoFrameRate = videoStream.AverageFrameRate ?? videoStream.RealFrameRate;
if (!videoFrameRate.HasValue || videoFrameRate.Value > requestedFramerate.Value)
{
return false;
}
}
// Video bitrate must fall within requested value
if (request.VideoBitRate.HasValue)
{
if (!videoStream.BitRate.HasValue || videoStream.BitRate.Value > request.VideoBitRate.Value)
{
return false;
}
}
if (request.MaxVideoBitDepth.HasValue)
{
if (videoStream.BitDepth.HasValue && videoStream.BitDepth.Value > request.MaxVideoBitDepth.Value)
{
return false;
}
}
if (request.MaxRefFrames.HasValue)
{
if (videoStream.RefFrames.HasValue && videoStream.RefFrames.Value > request.MaxRefFrames.Value)
{
return false;
}
}
// If a specific level was requested, the source must match or be less than
if (request.Level.HasValue)
{
if (!videoStream.Level.HasValue)
{
return false;
}
if (videoStream.Level.Value > request.Level.Value)
{
return false;
}
}
return request.EnableAutoStreamCopy;
}
private static int GetVideoProfileScore(string profile)
{
var list = new List<string>
@ -780,57 +237,6 @@ namespace MediaBrowser.MediaEncoding.Encoder
return Array.FindIndex(list.ToArray(), t => string.Equals(t, profile, StringComparison.OrdinalIgnoreCase));
}
internal static bool CanStreamCopyAudio(EncodingJobOptions request, MediaStream audioStream, List<string> supportedAudioCodecs)
{
// Source and target codecs must match
if (string.IsNullOrEmpty(audioStream.Codec) || !supportedAudioCodecs.Contains(audioStream.Codec, StringComparer.OrdinalIgnoreCase))
{
return false;
}
// Video bitrate must fall within requested value
if (request.AudioBitRate.HasValue)
{
if (!audioStream.BitRate.HasValue || audioStream.BitRate.Value <= 0)
{
return false;
}
if (audioStream.BitRate.Value > request.AudioBitRate.Value)
{
return false;
}
}
// Channels must fall within requested value
var channels = request.AudioChannels ?? request.MaxAudioChannels;
if (channels.HasValue)
{
if (!audioStream.Channels.HasValue || audioStream.Channels.Value <= 0)
{
return false;
}
if (audioStream.Channels.Value > channels.Value)
{
return false;
}
}
// Sample rate must fall within requested value
if (request.AudioSampleRate.HasValue)
{
if (!audioStream.SampleRate.HasValue || audioStream.SampleRate.Value <= 0)
{
return false;
}
if (audioStream.SampleRate.Value > request.AudioSampleRate.Value)
{
return false;
}
}
return request.EnableAutoStreamCopy;
}
private void ApplyDeviceProfileSettings(EncodingJob state)
{
var profile = state.Options.DeviceProfile;

@ -0,0 +1,118 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using MediaBrowser.Controller.Entities;
using MediaBrowser.Controller.MediaEncoding;
using MediaBrowser.Model.Dlna;
using MediaBrowser.Model.Dto;
using MediaBrowser.Model.Entities;
using MediaBrowser.Model.IO;
using MediaBrowser.Model.Logging;
using MediaBrowser.Model.MediaInfo;
namespace MediaBrowser.MediaEncoding.Encoder
{
// For now, a common base class until the API and MediaEncoding classes are unified
public class EncodingJobInfo
{
private readonly ILogger _logger;
public MediaStream VideoStream { get; set; }
public VideoType VideoType { get; set; }
public Dictionary<string, string> RemoteHttpHeaders { get; set; }
public string OutputVideoCodec { get; set; }
public MediaProtocol InputProtocol { get; set; }
public string MediaPath { get; set; }
public bool IsInputVideo { get; set; }
public IIsoMount IsoMount { get; set; }
public List<string> PlayableStreamFileNames { get; set; }
public string OutputAudioCodec { get; set; }
public int? OutputVideoBitrate { get; set; }
public MediaStream SubtitleStream { get; set; }
public SubtitleDeliveryMethod SubtitleDeliveryMethod { get; set; }
public int InternalSubtitleStreamOffset { get; set; }
public MediaSourceInfo MediaSource { get; set; }
public User User { get; set; }
public long? RunTimeTicks { get; set; }
public bool ReadInputAtNativeFramerate { get; set; }
public string OutputContainer { get; set; }
public string OutputVideoSync = "-1";
public string OutputAudioSync = "1";
public string InputAudioSync { get; set; }
public string InputVideoSync { get; set; }
public TransportStreamTimestamp InputTimestamp { get; set; }
public MediaStream AudioStream { get; set; }
public List<string> SupportedAudioCodecs { get; set; }
public List<string> SupportedVideoCodecs { get; set; }
public string InputContainer { get; set; }
public IsoType? IsoType { get; set; }
public BaseEncodingJobOptions BaseRequest { get; set; }
public long? StartTimeTicks
{
get { return BaseRequest.StartTimeTicks; }
}
public bool CopyTimestamps
{
get { return BaseRequest.CopyTimestamps; }
}
public int? OutputAudioChannels;
public int? OutputAudioSampleRate;
public bool DeInterlace { get; set; }
public bool IsVideoRequest { get; set; }
public EncodingJobInfo(ILogger logger)
{
_logger = logger;
RemoteHttpHeaders = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
PlayableStreamFileNames = new List<string>();
SupportedVideoCodecs = new List<string>();
SupportedVideoCodecs = new List<string>();
}
/// <summary>
/// Predicts the audio sample rate that will be in the output stream
/// </summary>
public double? TargetVideoLevel
{
get
{
var stream = VideoStream;
var request = BaseRequest;
return !string.IsNullOrEmpty(request.Level) && !request.Static
? double.Parse(request.Level, CultureInfo.InvariantCulture)
: stream == null ? null : stream.Level;
}
}
protected void DisposeIsoMount()
{
if (IsoMount != null)
{
try
{
IsoMount.Dispose();
}
catch (Exception ex)
{
_logger.ErrorException("Error disposing iso mount", ex);
}
IsoMount = null;
}
}
}
}

@ -21,7 +21,8 @@ namespace MediaBrowser.MediaEncoding.Encoder
protected override async Task<string> GetCommandLineArguments(EncodingJob state)
{
// Get the output codec name
var videoCodec = EncodingJobFactory.GetVideoEncoder(MediaEncoder, state, GetEncodingOptions());
var encodingOptions = GetEncodingOptions();
var videoCodec = EncodingHelper.GetVideoEncoder(state, encodingOptions);
var format = string.Empty;
var keyFrame = string.Empty;
@ -33,17 +34,17 @@ namespace MediaBrowser.MediaEncoding.Encoder
format = " -f mp4 -movflags frag_keyframe+empty_moov";
}
var threads = GetNumberOfThreads(state, string.Equals(videoCodec, "libvpx", StringComparison.OrdinalIgnoreCase));
var threads = EncodingHelper.GetNumberOfThreads(state, encodingOptions, string.Equals(videoCodec, "libvpx", StringComparison.OrdinalIgnoreCase));
var inputModifier = GetInputModifier(state);
var inputModifier = EncodingHelper.GetInputModifier(state, encodingOptions);
var videoArguments = await GetVideoArguments(state, videoCodec).ConfigureAwait(false);
return string.Format("{0} {1}{2} {3} {4} -map_metadata -1 -threads {5} {6}{7} -y \"{8}\"",
inputModifier,
GetInputArgument(state),
EncodingHelper.GetInputArgument(state, encodingOptions),
keyFrame,
GetMapArgs(state),
EncodingHelper.GetMapArgs(state),
videoArguments,
threads,
GetAudioArguments(state),
@ -76,7 +77,7 @@ namespace MediaBrowser.MediaEncoding.Encoder
if (string.Equals(videoCodec, "copy", StringComparison.OrdinalIgnoreCase))
{
if (state.VideoStream != null && IsH264(state.VideoStream) && string.Equals(state.Options.OutputContainer, "ts", StringComparison.OrdinalIgnoreCase) && !string.Equals(state.VideoStream.NalLengthSize, "0", StringComparison.OrdinalIgnoreCase))
if (state.VideoStream != null && EncodingHelper.IsH264(state.VideoStream) && string.Equals(state.Options.OutputContainer, "ts", StringComparison.OrdinalIgnoreCase) && !string.Equals(state.VideoStream.NalLengthSize, "0", StringComparison.OrdinalIgnoreCase))
{
args += " -bsf:v h264_mp4toannexb";
}
@ -94,10 +95,10 @@ namespace MediaBrowser.MediaEncoding.Encoder
// Add resolution params, if specified
if (!hasGraphicalSubs)
{
args += await GetOutputSizeParam(state, videoCodec).ConfigureAwait(false);
args += EncodingHelper.GetOutputSizeParam(state, videoCodec);
}
var qualityParam = GetVideoQualityParam(state, videoCodec);
var qualityParam = EncodingHelper.GetVideoQualityParam(state, videoCodec, GetEncodingOptions(), "superfast");
if (!string.IsNullOrEmpty(qualityParam))
{
@ -107,7 +108,7 @@ namespace MediaBrowser.MediaEncoding.Encoder
// This is for internal graphical subs
if (hasGraphicalSubs)
{
args += await GetGraphicalSubtitleParam(state, videoCodec).ConfigureAwait(false);
args += EncodingHelper.GetGraphicalSubtitleParam(state, videoCodec);
}
return args;
@ -127,7 +128,7 @@ namespace MediaBrowser.MediaEncoding.Encoder
}
// Get the output codec name
var codec = EncodingJobFactory.GetAudioEncoder(state);
var codec = EncodingHelper.GetAudioEncoder(state);
var args = "-codec:a:0 " + codec;
@ -151,7 +152,7 @@ namespace MediaBrowser.MediaEncoding.Encoder
args += " -ab " + bitrate.Value.ToString(UsCulture);
}
args += " " + GetAudioFilterParam(state, false);
args += " " + EncodingHelper.GetAudioFilterParam(state, GetEncodingOptions(), false);
return args;
}

@ -50,8 +50,10 @@
<Compile Include="Configuration\EncodingConfigurationFactory.cs" />
<Compile Include="Encoder\AudioEncoder.cs" />
<Compile Include="Encoder\BaseEncoder.cs" />
<Compile Include="Encoder\EncodingHelper.cs" />
<Compile Include="Encoder\EncodingJob.cs" />
<Compile Include="Encoder\EncodingJobFactory.cs" />
<Compile Include="Encoder\EncodingJobInfo.cs" />
<Compile Include="Encoder\EncodingUtils.cs" />
<Compile Include="Encoder\EncoderValidator.cs" />
<Compile Include="Encoder\FontConfigLoader.cs" />

Loading…
Cancel
Save