sync video transcoding

pull/702/head
Luke Pulverenti 10 years ago
parent c93740461e
commit c63c39ce57

@ -0,0 +1,91 @@
using MediaBrowser.Model.Dlna;
namespace MediaBrowser.Controller.MediaEncoding
{
public class EncodingJobOptions
{
public string OutputContainer { get; set; }
public long? StartTimeTicks { get; set; }
public int? Width { get; set; }
public int? Height { get; set; }
public int? MaxWidth { get; set; }
public int? MaxHeight { get; set; }
public bool Static = false;
public float? Framerate { get; set; }
public float? MaxFramerate { get; set; }
public string Profile { get; set; }
public int? Level { get; set; }
public string DeviceId { get; set; }
public string ItemId { get; set; }
public string MediaSourceId { get; set; }
public string AudioCodec { get; set; }
public bool EnableAutoStreamCopy { get; set; }
public int? MaxAudioChannels { get; set; }
public int? AudioChannels { get; set; }
public int? AudioBitRate { get; set; }
public int? AudioSampleRate { get; set; }
public DeviceProfile DeviceProfile { get; set; }
public EncodingContext Context { get; set; }
public string VideoCodec { get; set; }
public int? VideoBitRate { get; set; }
public int? AudioStreamIndex { get; set; }
public int? VideoStreamIndex { get; set; }
public int? SubtitleStreamIndex { get; set; }
public int? MaxRefFrames { get; set; }
public int? MaxVideoBitDepth { get; set; }
public SubtitleDeliveryMethod SubtitleMethod { get; set; }
/// <summary>
/// Gets a value indicating whether this instance has fixed resolution.
/// </summary>
/// <value><c>true</c> if this instance has fixed resolution; otherwise, <c>false</c>.</value>
public bool HasFixedResolution
{
get
{
return Width.HasValue || Height.HasValue;
}
}
public bool? Cabac { get; set; }
public EncodingJobOptions()
{
}
public EncodingJobOptions(StreamInfo info, DeviceProfile deviceProfile)
{
OutputContainer = info.Container;
StartTimeTicks = info.StartPositionTicks;
MaxWidth = info.MaxWidth;
MaxHeight = info.MaxHeight;
MaxFramerate = info.MaxFramerate;
Profile = info.VideoProfile;
Level = info.VideoLevel;
ItemId = info.ItemId;
MediaSourceId = info.MediaSourceId;
AudioCodec = info.AudioCodec;
MaxAudioChannels = info.MaxAudioChannels;
AudioBitRate = info.AudioBitrate;
AudioSampleRate = info.TargetAudioSampleRate;
DeviceProfile = deviceProfile;
VideoCodec = info.VideoCodec;
VideoBitRate = info.VideoBitrate;
AudioStreamIndex = info.AudioStreamIndex;
SubtitleStreamIndex = info.SubtitleStreamIndex;
MaxRefFrames = info.MaxRefFrames;
MaxVideoBitDepth = info.MaxVideoBitDepth;
SubtitleMethod = info.SubtitleDeliveryMethod;
Cabac = info.Cabac;
Context = info.Context;
}
}
}

@ -107,5 +107,16 @@ namespace MediaBrowser.Controller.MediaEncoding
Task<string> EncodeAudio(EncodingJobOptions options,
IProgress<double> progress,
CancellationToken cancellationToken);
/// <summary>
/// Encodes the video.
/// </summary>
/// <param name="options">The options.</param>
/// <param name="progress">The progress.</param>
/// <param name="cancellationToken">The cancellation token.</param>
/// <returns>Task&lt;System.String&gt;.</returns>
Task<string> EncodeVideo(EncodingJobOptions options,
IProgress<double> progress,
CancellationToken cancellationToken);
}
}

@ -0,0 +1,86 @@
using MediaBrowser.Common.IO;
using MediaBrowser.Controller.Channels;
using MediaBrowser.Controller.Configuration;
using MediaBrowser.Controller.Library;
using MediaBrowser.Controller.LiveTv;
using MediaBrowser.Controller.MediaEncoding;
using MediaBrowser.Controller.Session;
using MediaBrowser.Model.IO;
using MediaBrowser.Model.Logging;
using System;
using System.Collections.Generic;
namespace MediaBrowser.MediaEncoding.Encoder
{
public class AudioEncoder : BaseEncoder
{
public AudioEncoder(MediaEncoder mediaEncoder, ILogger logger, IServerConfigurationManager configurationManager, IFileSystem fileSystem, ILiveTvManager liveTvManager, IIsoManager isoManager, ILibraryManager libraryManager, IChannelManager channelManager, ISessionManager sessionManager, ISubtitleEncoder subtitleEncoder) : base(mediaEncoder, logger, configurationManager, fileSystem, liveTvManager, isoManager, libraryManager, channelManager, sessionManager, subtitleEncoder)
{
}
protected override string GetCommandLineArguments(EncodingJob job)
{
var audioTranscodeParams = new List<string>();
var bitrate = job.OutputAudioBitrate;
if (bitrate.HasValue)
{
audioTranscodeParams.Add("-ab " + bitrate.Value.ToString(UsCulture));
}
if (job.OutputAudioChannels.HasValue)
{
audioTranscodeParams.Add("-ac " + job.OutputAudioChannels.Value.ToString(UsCulture));
}
if (job.OutputAudioSampleRate.HasValue)
{
audioTranscodeParams.Add("-ar " + job.OutputAudioSampleRate.Value.ToString(UsCulture));
}
var threads = GetNumberOfThreads(job, false);
var inputModifier = GetInputModifier(job);
return string.Format("{0} {1} -threads {2}{3} {4} -id3v2_version 3 -write_id3v1 1 -y \"{5}\"",
inputModifier,
GetInputArgument(job),
threads,
" -vn",
string.Join(" ", audioTranscodeParams.ToArray()),
job.OutputFilePath).Trim();
}
protected override string GetOutputFileExtension(EncodingJob state)
{
var ext = base.GetOutputFileExtension(state);
if (!string.IsNullOrEmpty(ext))
{
return ext;
}
var audioCodec = state.Options.AudioCodec;
if (string.Equals("aac", audioCodec, StringComparison.OrdinalIgnoreCase))
{
return ".aac";
}
if (string.Equals("mp3", audioCodec, StringComparison.OrdinalIgnoreCase))
{
return ".mp3";
}
if (string.Equals("vorbis", audioCodec, StringComparison.OrdinalIgnoreCase))
{
return ".ogg";
}
if (string.Equals("wma", audioCodec, StringComparison.OrdinalIgnoreCase))
{
return ".wma";
}
return null;
}
}
}

File diff suppressed because it is too large Load Diff

@ -0,0 +1,434 @@
using MediaBrowser.Controller.LiveTv;
using MediaBrowser.Controller.MediaEncoding;
using MediaBrowser.Model.Dlna;
using MediaBrowser.Model.Drawing;
using MediaBrowser.Model.Entities;
using MediaBrowser.Model.IO;
using MediaBrowser.Model.Logging;
using MediaBrowser.Model.MediaInfo;
using MediaBrowser.Model.Net;
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace MediaBrowser.MediaEncoding.Encoder
{
public class EncodingJob : IDisposable
{
public bool HasExited { get; internal set; }
public Stream LogFileStream { get; set; }
public IProgress<double> Progress { get; set; }
public TaskCompletionSource<bool> TaskCompletionSource;
public EncodingJobOptions Options { get; set; }
public string InputContainer { get; set; }
public List<MediaStream> AllMediaStreams { get; set; }
public MediaStream AudioStream { get; set; }
public MediaStream VideoStream { get; set; }
public MediaStream SubtitleStream { get; set; }
public IIsoMount IsoMount { get; set; }
public bool ReadInputAtNativeFramerate { get; set; }
public bool IsVideoRequest { get; set; }
public string InputAudioSync { get; set; }
public string InputVideoSync { get; set; }
public string Id { get; set; }
public string MediaPath { get; set; }
public MediaProtocol InputProtocol { get; set; }
public bool IsInputVideo { get; set; }
public VideoType VideoType { get; set; }
public IsoType? IsoType { get; set; }
public List<string> PlayableStreamFileNames { get; set; }
public List<string> SupportedAudioCodecs { get; set; }
public Dictionary<string, string> RemoteHttpHeaders { get; set; }
public TransportStreamTimestamp InputTimestamp { get; set; }
public bool DeInterlace { get; set; }
public string MimeType { get; set; }
public bool EstimateContentLength { get; set; }
public bool EnableMpegtsM2TsMode { get; set; }
public TranscodeSeekInfo TranscodeSeekInfo { get; set; }
public long? EncodingDurationTicks { get; set; }
public string LiveTvStreamId { get; set; }
public long? RunTimeTicks;
public string ItemType { get; set; }
public long? InputBitrate { get; set; }
public long? InputFileSize { get; set; }
public string OutputAudioSync = "1";
public string OutputVideoSync = "vfr";
public string GetMimeType(string outputPath)
{
if (!string.IsNullOrEmpty(MimeType))
{
return MimeType;
}
return MimeTypes.GetMimeType(outputPath);
}
private readonly ILogger _logger;
private readonly ILiveTvManager _liveTvManager;
public EncodingJob(ILogger logger, ILiveTvManager liveTvManager)
{
_logger = logger;
_liveTvManager = liveTvManager;
Id = Guid.NewGuid().ToString("N");
RemoteHttpHeaders = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
_logger = logger;
SupportedAudioCodecs = new List<string>();
PlayableStreamFileNames = new List<string>();
RemoteHttpHeaders = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
AllMediaStreams = new List<MediaStream>();
TaskCompletionSource = new TaskCompletionSource<bool>();
}
public void Dispose()
{
DisposeLiveStream();
DisposeLogStream();
DisposeIsoMount();
}
private void DisposeLogStream()
{
if (LogFileStream != null)
{
try
{
LogFileStream.Dispose();
}
catch (Exception ex)
{
_logger.ErrorException("Error disposing log stream", ex);
}
LogFileStream = null;
}
}
private void DisposeIsoMount()
{
if (IsoMount != null)
{
try
{
IsoMount.Dispose();
}
catch (Exception ex)
{
_logger.ErrorException("Error disposing iso mount", ex);
}
IsoMount = null;
}
}
private async void DisposeLiveStream()
{
if (!string.IsNullOrEmpty(LiveTvStreamId))
{
try
{
await _liveTvManager.CloseLiveStream(LiveTvStreamId, CancellationToken.None).ConfigureAwait(false);
}
catch (Exception ex)
{
_logger.ErrorException("Error closing live tv stream", ex);
}
}
}
public int InternalSubtitleStreamOffset { get; set; }
public string OutputFilePath { get; set; }
public string OutputVideoCodec { get; set; }
public string OutputAudioCodec { get; set; }
public int? OutputAudioChannels;
public int? OutputAudioSampleRate;
public int? OutputAudioBitrate;
public int? OutputVideoBitrate;
public string ActualOutputVideoCodec
{
get
{
var codec = OutputVideoCodec;
if (string.Equals(codec, "copy", StringComparison.OrdinalIgnoreCase))
{
var stream = VideoStream;
if (stream != null)
{
return stream.Codec;
}
return null;
}
return codec;
}
}
public string ActualOutputAudioCodec
{
get
{
var codec = OutputAudioCodec;
if (string.Equals(codec, "copy", StringComparison.OrdinalIgnoreCase))
{
var stream = AudioStream;
if (stream != null)
{
return stream.Codec;
}
return null;
}
return codec;
}
}
public int? TotalOutputBitrate
{
get
{
return (OutputAudioBitrate ?? 0) + (OutputVideoBitrate ?? 0);
}
}
public int? OutputWidth
{
get
{
if (VideoStream != null && VideoStream.Width.HasValue && VideoStream.Height.HasValue)
{
var size = new ImageSize
{
Width = VideoStream.Width.Value,
Height = VideoStream.Height.Value
};
var newSize = DrawingUtils.Resize(size,
Options.Width,
Options.Height,
Options.MaxWidth,
Options.MaxHeight);
return Convert.ToInt32(newSize.Width);
}
if (!IsVideoRequest)
{
return null;
}
return Options.MaxWidth ?? Options.Width;
}
}
public int? OutputHeight
{
get
{
if (VideoStream != null && VideoStream.Width.HasValue && VideoStream.Height.HasValue)
{
var size = new ImageSize
{
Width = VideoStream.Width.Value,
Height = VideoStream.Height.Value
};
var newSize = DrawingUtils.Resize(size,
Options.Width,
Options.Height,
Options.MaxWidth,
Options.MaxHeight);
return Convert.ToInt32(newSize.Height);
}
if (!IsVideoRequest)
{
return null;
}
return Options.MaxHeight ?? Options.Height;
}
}
/// <summary>
/// Predicts the audio sample rate that will be in the output stream
/// </summary>
public int? TargetVideoBitDepth
{
get
{
var stream = VideoStream;
return stream == null || !Options.Static ? null : stream.BitDepth;
}
}
/// <summary>
/// Gets the target reference frames.
/// </summary>
/// <value>The target reference frames.</value>
public int? TargetRefFrames
{
get
{
var stream = VideoStream;
return stream == null || !Options.Static ? null : stream.RefFrames;
}
}
/// <summary>
/// Predicts the audio sample rate that will be in the output stream
/// </summary>
public float? TargetFramerate
{
get
{
var stream = VideoStream;
var requestedFramerate = Options.MaxFramerate ?? Options.Framerate;
return requestedFramerate.HasValue && !Options.Static
? requestedFramerate
: stream == null ? null : stream.AverageFrameRate ?? stream.RealFrameRate;
}
}
/// <summary>
/// Predicts the audio sample rate that will be in the output stream
/// </summary>
public double? TargetVideoLevel
{
get
{
var stream = VideoStream;
return Options.Level.HasValue && !Options.Static
? Options.Level.Value
: stream == null ? null : stream.Level;
}
}
public TransportStreamTimestamp TargetTimestamp
{
get
{
var defaultValue = string.Equals(Options.OutputContainer, "m2ts", StringComparison.OrdinalIgnoreCase) ?
TransportStreamTimestamp.Valid :
TransportStreamTimestamp.None;
return !Options.Static
? defaultValue
: InputTimestamp;
}
}
/// <summary>
/// Predicts the audio sample rate that will be in the output stream
/// </summary>
public int? TargetPacketLength
{
get
{
var stream = VideoStream;
return !Options.Static
? null
: stream == null ? null : stream.PacketLength;
}
}
/// <summary>
/// Predicts the audio sample rate that will be in the output stream
/// </summary>
public string TargetVideoProfile
{
get
{
var stream = VideoStream;
return !string.IsNullOrEmpty(Options.Profile) && !Options.Static
? Options.Profile
: stream == null ? null : stream.Profile;
}
}
public bool? IsTargetAnamorphic
{
get
{
if (Options.Static)
{
return VideoStream == null ? null : VideoStream.IsAnamorphic;
}
return false;
}
}
public bool? IsTargetCabac
{
get
{
if (Options.Static)
{
return VideoStream == null ? null : VideoStream.IsCabac;
}
return true;
}
}
public void ReportTranscodingProgress(TimeSpan? transcodingPosition, float? framerate, double? percentComplete, long? bytesTranscoded)
{
var ticks = transcodingPosition.HasValue ? transcodingPosition.Value.Ticks : (long?)null;
// job.Framerate = framerate;
if (percentComplete.HasValue)
{
Progress.Report(percentComplete.Value);
}
// job.TranscodingPositionTicks = ticks;
// job.BytesTranscoded = bytesTranscoded;
var deviceId = Options.DeviceId;
if (!string.IsNullOrWhiteSpace(deviceId))
{
var audioCodec = ActualOutputVideoCodec;
var videoCodec = ActualOutputVideoCodec;
// SessionManager.ReportTranscodingInfo(deviceId, new TranscodingInfo
// {
// Bitrate = job.TotalOutputBitrate,
// AudioCodec = audioCodec,
// VideoCodec = videoCodec,
// Container = job.Options.OutputContainer,
// Framerate = framerate,
// CompletionPercentage = percentComplete,
// Width = job.OutputWidth,
// Height = job.OutputHeight,
// AudioChannels = job.OutputAudioChannels,
// IsAudioDirect = string.Equals(job.OutputAudioCodec, "copy", StringComparison.OrdinalIgnoreCase),
// IsVideoDirect = string.Equals(job.OutputVideoCodec, "copy", StringComparison.OrdinalIgnoreCase)
// });
}
}
}
}

@ -0,0 +1,830 @@
using MediaBrowser.Controller.Channels;
using MediaBrowser.Controller.Entities;
using MediaBrowser.Controller.Library;
using MediaBrowser.Controller.LiveTv;
using MediaBrowser.Controller.MediaEncoding;
using MediaBrowser.Model.Dlna;
using MediaBrowser.Model.Dto;
using MediaBrowser.Model.Entities;
using MediaBrowser.Model.Logging;
using MediaBrowser.Model.MediaInfo;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
namespace MediaBrowser.MediaEncoding.Encoder
{
public class EncodingJobFactory
{
private readonly ILogger _logger;
private readonly ILiveTvManager _liveTvManager;
private readonly ILibraryManager _libraryManager;
private readonly IChannelManager _channelManager;
protected static readonly CultureInfo UsCulture = new CultureInfo("en-US");
public EncodingJobFactory(ILogger logger, ILiveTvManager liveTvManager, ILibraryManager libraryManager, IChannelManager channelManager)
{
_logger = logger;
_liveTvManager = liveTvManager;
_libraryManager = libraryManager;
_channelManager = channelManager;
}
public async Task<EncodingJob> CreateJob(EncodingJobOptions options, bool isVideoRequest, IProgress<double> progress, CancellationToken cancellationToken)
{
var request = options;
if (string.IsNullOrEmpty(request.AudioCodec))
{
request.AudioCodec = InferAudioCodec(request.OutputContainer);
}
var state = new EncodingJob(_logger, _liveTvManager)
{
Options = options,
IsVideoRequest = isVideoRequest,
Progress = progress
};
if (!string.IsNullOrWhiteSpace(request.AudioCodec))
{
state.SupportedAudioCodecs = request.AudioCodec.Split(',').Where(i => !string.IsNullOrWhiteSpace(i)).ToList();
request.AudioCodec = state.SupportedAudioCodecs.FirstOrDefault();
}
var item = _libraryManager.GetItemById(request.ItemId);
List<MediaStream> mediaStreams = null;
state.ItemType = item.GetType().Name;
if (item is ILiveTvRecording)
{
var recording = await _liveTvManager.GetInternalRecording(request.ItemId, cancellationToken).ConfigureAwait(false);
state.VideoType = VideoType.VideoFile;
state.IsInputVideo = string.Equals(recording.MediaType, MediaType.Video, StringComparison.OrdinalIgnoreCase);
var path = recording.RecordingInfo.Path;
var mediaUrl = recording.RecordingInfo.Url;
var source = string.IsNullOrEmpty(request.MediaSourceId)
? recording.GetMediaSources(false).First()
: recording.GetMediaSources(false).First(i => string.Equals(i.Id, request.MediaSourceId));
mediaStreams = source.MediaStreams;
// Just to prevent this from being null and causing other methods to fail
state.MediaPath = string.Empty;
if (!string.IsNullOrEmpty(path))
{
state.MediaPath = path;
state.InputProtocol = MediaProtocol.File;
}
else if (!string.IsNullOrEmpty(mediaUrl))
{
state.MediaPath = mediaUrl;
state.InputProtocol = MediaProtocol.Http;
}
state.RunTimeTicks = recording.RunTimeTicks;
state.DeInterlace = true;
state.OutputAudioSync = "1000";
state.InputVideoSync = "-1";
state.InputAudioSync = "1";
state.InputContainer = recording.Container;
state.ReadInputAtNativeFramerate = source.ReadAtNativeFramerate;
}
else if (item is LiveTvChannel)
{
var channel = _liveTvManager.GetInternalChannel(request.ItemId);
state.VideoType = VideoType.VideoFile;
state.IsInputVideo = string.Equals(channel.MediaType, MediaType.Video, StringComparison.OrdinalIgnoreCase);
mediaStreams = new List<MediaStream>();
state.DeInterlace = true;
// Just to prevent this from being null and causing other methods to fail
state.MediaPath = string.Empty;
}
else if (item is IChannelMediaItem)
{
var mediaSource = await GetChannelMediaInfo(request.ItemId, request.MediaSourceId, cancellationToken).ConfigureAwait(false);
state.IsInputVideo = string.Equals(item.MediaType, MediaType.Video, StringComparison.OrdinalIgnoreCase);
state.InputProtocol = mediaSource.Protocol;
state.MediaPath = mediaSource.Path;
state.RunTimeTicks = item.RunTimeTicks;
state.RemoteHttpHeaders = mediaSource.RequiredHttpHeaders;
state.InputBitrate = mediaSource.Bitrate;
state.InputFileSize = mediaSource.Size;
state.ReadInputAtNativeFramerate = mediaSource.ReadAtNativeFramerate;
mediaStreams = mediaSource.MediaStreams;
}
else
{
var hasMediaSources = (IHasMediaSources)item;
var mediaSource = string.IsNullOrEmpty(request.MediaSourceId)
? hasMediaSources.GetMediaSources(false).First()
: hasMediaSources.GetMediaSources(false).First(i => string.Equals(i.Id, request.MediaSourceId));
mediaStreams = mediaSource.MediaStreams;
state.MediaPath = mediaSource.Path;
state.InputProtocol = mediaSource.Protocol;
state.InputContainer = mediaSource.Container;
state.InputFileSize = mediaSource.Size;
state.InputBitrate = mediaSource.Bitrate;
state.ReadInputAtNativeFramerate = mediaSource.ReadAtNativeFramerate;
var video = item as Video;
if (video != null)
{
state.IsInputVideo = true;
if (mediaSource.VideoType.HasValue)
{
state.VideoType = mediaSource.VideoType.Value;
}
state.IsoType = mediaSource.IsoType;
state.PlayableStreamFileNames = mediaSource.PlayableStreamFileNames.ToList();
if (mediaSource.Timestamp.HasValue)
{
state.InputTimestamp = mediaSource.Timestamp.Value;
}
}
state.RunTimeTicks = mediaSource.RunTimeTicks;
}
AttachMediaStreamInfo(state, mediaStreams, request);
state.OutputAudioBitrate = GetAudioBitrateParam(request, state.AudioStream);
state.OutputAudioSampleRate = request.AudioSampleRate;
state.OutputAudioCodec = GetAudioCodec(request);
state.OutputAudioChannels = GetNumAudioChannelsParam(request, state.AudioStream, state.OutputAudioCodec);
if (isVideoRequest)
{
state.OutputVideoCodec = GetVideoCodec(request);
state.OutputVideoBitrate = GetVideoBitrateParamValue(request, state.VideoStream);
if (state.OutputVideoBitrate.HasValue)
{
var resolution = ResolutionNormalizer.Normalize(state.OutputVideoBitrate.Value,
state.OutputVideoCodec,
request.MaxWidth,
request.MaxHeight);
request.MaxWidth = resolution.MaxWidth;
request.MaxHeight = resolution.MaxHeight;
}
}
ApplyDeviceProfileSettings(state);
if (isVideoRequest)
{
if (state.VideoStream != null && CanStreamCopyVideo(request, state.VideoStream))
{
state.OutputVideoCodec = "copy";
}
if (state.AudioStream != null && CanStreamCopyAudio(request, state.AudioStream, state.SupportedAudioCodecs))
{
state.OutputAudioCodec = "copy";
}
}
return state;
}
internal static void AttachMediaStreamInfo(EncodingJob state,
List<MediaStream> mediaStreams,
EncodingJobOptions videoRequest)
{
if (videoRequest != null)
{
if (string.IsNullOrEmpty(videoRequest.VideoCodec))
{
videoRequest.VideoCodec = InferVideoCodec(videoRequest.OutputContainer);
}
state.VideoStream = GetMediaStream(mediaStreams, videoRequest.VideoStreamIndex, MediaStreamType.Video);
state.SubtitleStream = GetMediaStream(mediaStreams, videoRequest.SubtitleStreamIndex, MediaStreamType.Subtitle, false);
state.AudioStream = GetMediaStream(mediaStreams, videoRequest.AudioStreamIndex, MediaStreamType.Audio);
if (state.SubtitleStream != null && !state.SubtitleStream.IsExternal)
{
state.InternalSubtitleStreamOffset = mediaStreams.Where(i => i.Type == MediaStreamType.Subtitle && !i.IsExternal).ToList().IndexOf(state.SubtitleStream);
}
if (state.VideoStream != null && state.VideoStream.IsInterlaced)
{
state.DeInterlace = true;
}
EnforceResolutionLimit(state, videoRequest);
}
else
{
state.AudioStream = GetMediaStream(mediaStreams, null, MediaStreamType.Audio, true);
}
state.AllMediaStreams = mediaStreams;
}
/// <summary>
/// Infers the video codec.
/// </summary>
/// <param name="container">The container.</param>
/// <returns>System.Nullable{VideoCodecs}.</returns>
private static string InferVideoCodec(string container)
{
if (string.Equals(container, "asf", StringComparison.OrdinalIgnoreCase))
{
return "wmv";
}
if (string.Equals(container, "webm", StringComparison.OrdinalIgnoreCase))
{
return "vpx";
}
if (string.Equals(container, "ogg", StringComparison.OrdinalIgnoreCase) || string.Equals(container, "ogv", StringComparison.OrdinalIgnoreCase))
{
return "theora";
}
if (string.Equals(container, "m3u8", StringComparison.OrdinalIgnoreCase) || string.Equals(container, "ts", StringComparison.OrdinalIgnoreCase))
{
return "h264";
}
return "copy";
}
private string InferAudioCodec(string container)
{
if (string.Equals(container, "mp3", StringComparison.OrdinalIgnoreCase))
{
return "mp3";
}
if (string.Equals(container, "aac", StringComparison.OrdinalIgnoreCase))
{
return "aac";
}
if (string.Equals(container, "wma", StringComparison.OrdinalIgnoreCase))
{
return "wma";
}
if (string.Equals(container, "ogg", StringComparison.OrdinalIgnoreCase))
{
return "vorbis";
}
if (string.Equals(container, "oga", StringComparison.OrdinalIgnoreCase))
{
return "vorbis";
}
if (string.Equals(container, "ogv", StringComparison.OrdinalIgnoreCase))
{
return "vorbis";
}
if (string.Equals(container, "webm", StringComparison.OrdinalIgnoreCase))
{
return "vorbis";
}
if (string.Equals(container, "webma", StringComparison.OrdinalIgnoreCase))
{
return "vorbis";
}
return "copy";
}
/// <summary>
/// Determines which stream will be used for playback
/// </summary>
/// <param name="allStream">All stream.</param>
/// <param name="desiredIndex">Index of the desired.</param>
/// <param name="type">The type.</param>
/// <param name="returnFirstIfNoIndex">if set to <c>true</c> [return first if no index].</param>
/// <returns>MediaStream.</returns>
private static MediaStream GetMediaStream(IEnumerable<MediaStream> allStream, int? desiredIndex, MediaStreamType type, bool returnFirstIfNoIndex = true)
{
var streams = allStream.Where(s => s.Type == type).OrderBy(i => i.Index).ToList();
if (desiredIndex.HasValue)
{
var stream = streams.FirstOrDefault(s => s.Index == desiredIndex.Value);
if (stream != null)
{
return stream;
}
}
if (type == MediaStreamType.Video)
{
streams = streams.Where(i => !string.Equals(i.Codec, "mjpeg", StringComparison.OrdinalIgnoreCase)).ToList();
}
if (returnFirstIfNoIndex && type == MediaStreamType.Audio)
{
return streams.FirstOrDefault(i => i.Channels.HasValue && i.Channels.Value > 0) ??
streams.FirstOrDefault();
}
// Just return the first one
return returnFirstIfNoIndex ? streams.FirstOrDefault() : null;
}
/// <summary>
/// Enforces the resolution limit.
/// </summary>
/// <param name="state">The state.</param>
/// <param name="videoRequest">The video request.</param>
private static void EnforceResolutionLimit(EncodingJob state, EncodingJobOptions videoRequest)
{
// Switch the incoming params to be ceilings rather than fixed values
videoRequest.MaxWidth = videoRequest.MaxWidth ?? videoRequest.Width;
videoRequest.MaxHeight = videoRequest.MaxHeight ?? videoRequest.Height;
videoRequest.Width = null;
videoRequest.Height = null;
}
/// <summary>
/// Gets the number of audio channels to specify on the command line
/// </summary>
/// <param name="request">The request.</param>
/// <param name="audioStream">The audio stream.</param>
/// <param name="outputAudioCodec">The output audio codec.</param>
/// <returns>System.Nullable{System.Int32}.</returns>
private int? GetNumAudioChannelsParam(EncodingJobOptions request, MediaStream audioStream, string outputAudioCodec)
{
if (audioStream != null)
{
var codec = outputAudioCodec ?? string.Empty;
if (audioStream.Channels > 2 && codec.IndexOf("wma", StringComparison.OrdinalIgnoreCase) != -1)
{
// wmav2 currently only supports two channel output
return 2;
}
}
if (request.MaxAudioChannels.HasValue)
{
if (audioStream != null && audioStream.Channels.HasValue)
{
return Math.Min(request.MaxAudioChannels.Value, audioStream.Channels.Value);
}
// If we don't have any media info then limit it to 5 to prevent encoding errors due to asking for too many channels
return Math.Min(request.MaxAudioChannels.Value, 5);
}
return request.AudioChannels;
}
private int? GetVideoBitrateParamValue(EncodingJobOptions request, MediaStream videoStream)
{
var bitrate = request.VideoBitRate;
if (videoStream != null)
{
var isUpscaling = request.Height.HasValue && videoStream.Height.HasValue &&
request.Height.Value > videoStream.Height.Value;
if (request.Width.HasValue && videoStream.Width.HasValue &&
request.Width.Value > videoStream.Width.Value)
{
isUpscaling = true;
}
// Don't allow bitrate increases unless upscaling
if (!isUpscaling)
{
if (bitrate.HasValue && videoStream.BitRate.HasValue)
{
bitrate = Math.Min(bitrate.Value, videoStream.BitRate.Value);
}
}
}
return bitrate;
}
private async Task<MediaSourceInfo> GetChannelMediaInfo(string id,
string mediaSourceId,
CancellationToken cancellationToken)
{
var channelMediaSources = await _channelManager.GetChannelItemMediaSources(id, true, cancellationToken)
.ConfigureAwait(false);
var list = channelMediaSources.ToList();
if (!string.IsNullOrWhiteSpace(mediaSourceId))
{
var source = list
.FirstOrDefault(i => string.Equals(mediaSourceId, i.Id));
if (source != null)
{
return source;
}
}
return list.First();
}
protected string GetVideoBitrateParam(EncodingJob state, string videoCodec, bool isHls)
{
var bitrate = state.OutputVideoBitrate;
if (bitrate.HasValue)
{
var hasFixedResolution = state.Options.HasFixedResolution;
if (string.Equals(videoCodec, "libvpx", StringComparison.OrdinalIgnoreCase))
{
if (hasFixedResolution)
{
return string.Format(" -minrate:v ({0}*.90) -maxrate:v ({0}*1.10) -bufsize:v {0} -b:v {0}", bitrate.Value.ToString(UsCulture));
}
// With vpx when crf is used, b:v becomes a max rate
// https://trac.ffmpeg.org/wiki/vpxEncodingGuide. But higher bitrate source files -b:v causes judder so limite the bitrate but dont allow it to "saturate" the bitrate. So dont contrain it down just up.
return string.Format(" -maxrate:v {0} -bufsize:v ({0}*2) -b:v {0}", bitrate.Value.ToString(UsCulture));
}
if (string.Equals(videoCodec, "msmpeg4", StringComparison.OrdinalIgnoreCase))
{
return string.Format(" -b:v {0}", bitrate.Value.ToString(UsCulture));
}
// H264
if (hasFixedResolution)
{
if (isHls)
{
return string.Format(" -b:v {0} -maxrate ({0}*.80) -bufsize {0}", bitrate.Value.ToString(UsCulture));
}
return string.Format(" -b:v {0}", bitrate.Value.ToString(UsCulture));
}
return string.Format(" -maxrate {0} -bufsize {1}",
bitrate.Value.ToString(UsCulture),
(bitrate.Value * 2).ToString(UsCulture));
}
return string.Empty;
}
private int? GetAudioBitrateParam(EncodingJobOptions request, MediaStream audioStream)
{
if (request.AudioBitRate.HasValue)
{
// Make sure we don't request a bitrate higher than the source
var currentBitrate = audioStream == null ? request.AudioBitRate.Value : audioStream.BitRate ?? request.AudioBitRate.Value;
return request.AudioBitRate.Value;
//return Math.Min(currentBitrate, request.AudioBitRate.Value);
}
return null;
}
/// <summary>
/// Determines whether the specified stream is H264.
/// </summary>
/// <param name="stream">The stream.</param>
/// <returns><c>true</c> if the specified stream is H264; otherwise, <c>false</c>.</returns>
protected bool IsH264(MediaStream stream)
{
var codec = stream.Codec ?? string.Empty;
return codec.IndexOf("264", StringComparison.OrdinalIgnoreCase) != -1 ||
codec.IndexOf("avc", StringComparison.OrdinalIgnoreCase) != -1;
}
/// <summary>
/// Gets the name of the output audio codec
/// </summary>
/// <param name="request">The request.</param>
/// <returns>System.String.</returns>
private string GetAudioCodec(EncodingJobOptions request)
{
var codec = request.AudioCodec;
if (string.Equals(codec, "aac", StringComparison.OrdinalIgnoreCase))
{
return "aac -strict experimental";
}
if (string.Equals(codec, "mp3", StringComparison.OrdinalIgnoreCase))
{
return "libmp3lame";
}
if (string.Equals(codec, "vorbis", StringComparison.OrdinalIgnoreCase))
{
return "libvorbis";
}
if (string.Equals(codec, "wma", StringComparison.OrdinalIgnoreCase))
{
return "wmav2";
}
return (codec ?? string.Empty).ToLower();
}
/// <summary>
/// Gets the name of the output video codec
/// </summary>
/// <param name="request">The request.</param>
/// <returns>System.String.</returns>
private string GetVideoCodec(EncodingJobOptions request)
{
var codec = request.VideoCodec;
if (!string.IsNullOrEmpty(codec))
{
if (string.Equals(codec, "h264", StringComparison.OrdinalIgnoreCase))
{
return "libx264";
}
if (string.Equals(codec, "h265", StringComparison.OrdinalIgnoreCase))
{
return "libx265";
}
if (string.Equals(codec, "vpx", StringComparison.OrdinalIgnoreCase))
{
return "libvpx";
}
if (string.Equals(codec, "wmv", StringComparison.OrdinalIgnoreCase))
{
return "wmv2";
}
if (string.Equals(codec, "theora", StringComparison.OrdinalIgnoreCase))
{
return "libtheora";
}
return codec.ToLower();
}
return "copy";
}
internal static bool CanStreamCopyVideo(EncodingJobOptions request, MediaStream videoStream)
{
if (videoStream.IsInterlaced)
{
return false;
}
// Can't stream copy if we're burning in subtitles
if (request.SubtitleStreamIndex.HasValue)
{
if (request.SubtitleMethod == SubtitleDeliveryMethod.Encode)
{
return false;
}
}
// Source and target codecs must match
if (!string.Equals(request.VideoCodec, videoStream.Codec, StringComparison.OrdinalIgnoreCase))
{
return false;
}
// If client is requesting a specific video profile, it must match the source
if (!string.IsNullOrEmpty(request.Profile))
{
if (string.IsNullOrEmpty(videoStream.Profile))
{
return false;
}
if (!string.Equals(request.Profile, videoStream.Profile, StringComparison.OrdinalIgnoreCase))
{
var currentScore = GetVideoProfileScore(videoStream.Profile);
var requestedScore = GetVideoProfileScore(request.Profile);
if (currentScore == -1 || currentScore > requestedScore)
{
return false;
}
}
}
// Video width must fall within requested value
if (request.MaxWidth.HasValue)
{
if (!videoStream.Width.HasValue || videoStream.Width.Value > request.MaxWidth.Value)
{
return false;
}
}
// Video height must fall within requested value
if (request.MaxHeight.HasValue)
{
if (!videoStream.Height.HasValue || videoStream.Height.Value > request.MaxHeight.Value)
{
return false;
}
}
// Video framerate must fall within requested value
var requestedFramerate = request.MaxFramerate ?? request.Framerate;
if (requestedFramerate.HasValue)
{
var videoFrameRate = videoStream.AverageFrameRate ?? videoStream.RealFrameRate;
if (!videoFrameRate.HasValue || videoFrameRate.Value > requestedFramerate.Value)
{
return false;
}
}
// Video bitrate must fall within requested value
if (request.VideoBitRate.HasValue)
{
if (!videoStream.BitRate.HasValue || videoStream.BitRate.Value > request.VideoBitRate.Value)
{
return false;
}
}
if (request.MaxVideoBitDepth.HasValue)
{
if (videoStream.BitDepth.HasValue && videoStream.BitDepth.Value > request.MaxVideoBitDepth.Value)
{
return false;
}
}
if (request.MaxRefFrames.HasValue)
{
if (videoStream.RefFrames.HasValue && videoStream.RefFrames.Value > request.MaxRefFrames.Value)
{
return false;
}
}
// If a specific level was requested, the source must match or be less than
if (request.Level.HasValue)
{
if (!videoStream.Level.HasValue)
{
return false;
}
if (videoStream.Level.Value > request.Level.Value)
{
return false;
}
}
if (request.Cabac.HasValue && request.Cabac.Value)
{
if (videoStream.IsCabac.HasValue && !videoStream.IsCabac.Value)
{
return false;
}
}
return request.EnableAutoStreamCopy;
}
private static int GetVideoProfileScore(string profile)
{
var list = new List<string>
{
"Constrained Baseline",
"Baseline",
"Extended",
"Main",
"High",
"Progressive High",
"Constrained High"
};
return Array.FindIndex(list.ToArray(), t => string.Equals(t, profile, StringComparison.OrdinalIgnoreCase));
}
internal static bool CanStreamCopyAudio(EncodingJobOptions request, MediaStream audioStream, List<string> supportedAudioCodecs)
{
// Source and target codecs must match
if (string.IsNullOrEmpty(audioStream.Codec) || !supportedAudioCodecs.Contains(audioStream.Codec, StringComparer.OrdinalIgnoreCase))
{
return false;
}
// Video bitrate must fall within requested value
if (request.AudioBitRate.HasValue)
{
if (!audioStream.BitRate.HasValue || audioStream.BitRate.Value <= 0)
{
return false;
}
if (audioStream.BitRate.Value > request.AudioBitRate.Value)
{
return false;
}
}
// Channels must fall within requested value
var channels = request.AudioChannels ?? request.MaxAudioChannels;
if (channels.HasValue)
{
if (!audioStream.Channels.HasValue || audioStream.Channels.Value <= 0)
{
return false;
}
if (audioStream.Channels.Value > channels.Value)
{
return false;
}
}
// Sample rate must fall within requested value
if (request.AudioSampleRate.HasValue)
{
if (!audioStream.SampleRate.HasValue || audioStream.SampleRate.Value <= 0)
{
return false;
}
if (audioStream.SampleRate.Value > request.AudioSampleRate.Value)
{
return false;
}
}
return request.EnableAutoStreamCopy;
}
private void ApplyDeviceProfileSettings(EncodingJob state)
{
var profile = state.Options.DeviceProfile;
if (profile == null)
{
// Don't use settings from the default profile.
// Only use a specific profile if it was requested.
return;
}
var audioCodec = state.ActualOutputAudioCodec;
var videoCodec = state.ActualOutputVideoCodec;
var outputContainer = state.Options.OutputContainer;
var mediaProfile = state.IsVideoRequest ?
profile.GetAudioMediaProfile(outputContainer, audioCodec, state.OutputAudioChannels, state.OutputAudioBitrate) :
profile.GetVideoMediaProfile(outputContainer,
audioCodec,
videoCodec,
state.OutputAudioBitrate,
state.OutputAudioChannels,
state.OutputWidth,
state.OutputHeight,
state.TargetVideoBitDepth,
state.OutputVideoBitrate,
state.TargetVideoProfile,
state.TargetVideoLevel,
state.TargetFramerate,
state.TargetPacketLength,
state.TargetTimestamp,
state.IsTargetAnamorphic,
state.IsTargetCabac,
state.TargetRefFrames);
if (mediaProfile != null)
{
state.MimeType = mediaProfile.MimeType;
}
var transcodingProfile = state.IsVideoRequest ?
profile.GetAudioTranscodingProfile(outputContainer, audioCodec) :
profile.GetVideoTranscodingProfile(outputContainer, audioCodec, videoCodec);
if (transcodingProfile != null)
{
state.EstimateContentLength = transcodingProfile.EstimateContentLength;
state.EnableMpegtsM2TsMode = transcodingProfile.EnableMpegtsM2TsMode;
state.TranscodeSeekInfo = transcodingProfile.TranscodeSeekInfo;
}
}
}
}

@ -0,0 +1,122 @@
using MediaBrowser.Common.Extensions;
using MediaBrowser.Model.Logging;
using System;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text;
namespace MediaBrowser.MediaEncoding.Encoder
{
public class JobLogger
{
private readonly CultureInfo _usCulture = new CultureInfo("en-US");
private readonly ILogger _logger;
public JobLogger(ILogger logger)
{
_logger = logger;
}
public async void StartStreamingLog(EncodingJob transcodingJob, Stream source, Stream target)
{
try
{
using (var reader = new StreamReader(source))
{
while (!reader.EndOfStream)
{
var line = await reader.ReadLineAsync().ConfigureAwait(false);
ParseLogLine(line, transcodingJob);
var bytes = Encoding.UTF8.GetBytes(Environment.NewLine + line);
await target.WriteAsync(bytes, 0, bytes.Length).ConfigureAwait(false);
}
}
}
catch (Exception ex)
{
_logger.ErrorException("Error reading ffmpeg log", ex);
}
}
private void ParseLogLine(string line, EncodingJob transcodingJob)
{
float? framerate = null;
double? percent = null;
TimeSpan? transcodingPosition = null;
long? bytesTranscoded = null;
var parts = line.Split(' ');
var totalMs = transcodingJob.RunTimeTicks.HasValue
? TimeSpan.FromTicks(transcodingJob.RunTimeTicks.Value).TotalMilliseconds
: 0;
var startMs = transcodingJob.Options.StartTimeTicks.HasValue
? TimeSpan.FromTicks(transcodingJob.Options.StartTimeTicks.Value).TotalMilliseconds
: 0;
for (var i = 0; i < parts.Length; i++)
{
var part = parts[i];
if (string.Equals(part, "fps=", StringComparison.OrdinalIgnoreCase) &&
(i + 1 < parts.Length))
{
var rate = parts[i + 1];
float val;
if (float.TryParse(rate, NumberStyles.Any, _usCulture, out val))
{
framerate = val;
}
}
else if (transcodingJob.RunTimeTicks.HasValue &&
part.StartsWith("time=", StringComparison.OrdinalIgnoreCase))
{
var time = part.Split(new[] { '=' }, 2).Last();
TimeSpan val;
if (TimeSpan.TryParse(time, _usCulture, out val))
{
var currentMs = startMs + val.TotalMilliseconds;
var percentVal = currentMs / totalMs;
percent = 100 * percentVal;
transcodingPosition = val;
}
}
else if (part.StartsWith("size=", StringComparison.OrdinalIgnoreCase))
{
var size = part.Split(new[] { '=' }, 2).Last();
int? scale = null;
if (size.IndexOf("kb", StringComparison.OrdinalIgnoreCase) != -1)
{
scale = 1024;
size = size.Replace("kb", string.Empty, StringComparison.OrdinalIgnoreCase);
}
if (scale.HasValue)
{
long val;
if (long.TryParse(size, NumberStyles.Any, _usCulture, out val))
{
bytesTranscoded = val * scale.Value;
}
}
}
}
if (framerate.HasValue || percent.HasValue)
{
transcodingJob.ReportTranscodingProgress(transcodingPosition, framerate, percent, bytesTranscoded);
}
}
}
}

@ -64,8 +64,9 @@ namespace MediaBrowser.MediaEncoding.Encoder
protected readonly ILibraryManager LibraryManager;
protected readonly IChannelManager ChannelManager;
protected readonly ISessionManager SessionManager;
protected readonly Func<ISubtitleEncoder> SubtitleEncoder;
public MediaEncoder(ILogger logger, IJsonSerializer jsonSerializer, string ffMpegPath, string ffProbePath, string version, IServerConfigurationManager configurationManager, IFileSystem fileSystem, ILiveTvManager liveTvManager, IIsoManager isoManager, ILibraryManager libraryManager, IChannelManager channelManager, ISessionManager sessionManager)
public MediaEncoder(ILogger logger, IJsonSerializer jsonSerializer, string ffMpegPath, string ffProbePath, string version, IServerConfigurationManager configurationManager, IFileSystem fileSystem, ILiveTvManager liveTvManager, IIsoManager isoManager, ILibraryManager libraryManager, IChannelManager channelManager, ISessionManager sessionManager, Func<ISubtitleEncoder> subtitleEncoder)
{
_logger = logger;
_jsonSerializer = jsonSerializer;
@ -77,6 +78,7 @@ namespace MediaBrowser.MediaEncoding.Encoder
LibraryManager = libraryManager;
ChannelManager = channelManager;
SessionManager = sessionManager;
SubtitleEncoder = subtitleEncoder;
FFProbePath = ffProbePath;
FFMpegPath = ffMpegPath;
}
@ -545,7 +547,29 @@ namespace MediaBrowser.MediaEncoding.Encoder
IsoManager,
LibraryManager,
ChannelManager,
SessionManager)
SessionManager,
SubtitleEncoder())
.Start(options, progress, cancellationToken).ConfigureAwait(false);
await job.TaskCompletionSource.Task.ConfigureAwait(false);
return job.OutputFilePath;
}
public async Task<string> EncodeVideo(EncodingJobOptions options,
IProgress<double> progress,
CancellationToken cancellationToken)
{
var job = await new VideoEncoder(this,
_logger,
ConfigurationManager,
FileSystem,
LiveTvManager,
IsoManager,
LibraryManager,
ChannelManager,
SessionManager,
SubtitleEncoder())
.Start(options, progress, cancellationToken).ConfigureAwait(false);
await job.TaskCompletionSource.Task.ConfigureAwait(false);

@ -0,0 +1,177 @@
using MediaBrowser.Common.IO;
using MediaBrowser.Controller.Channels;
using MediaBrowser.Controller.Configuration;
using MediaBrowser.Controller.Library;
using MediaBrowser.Controller.LiveTv;
using MediaBrowser.Controller.MediaEncoding;
using MediaBrowser.Controller.Session;
using MediaBrowser.Model.IO;
using MediaBrowser.Model.Logging;
using System;
using System.IO;
namespace MediaBrowser.MediaEncoding.Encoder
{
public class VideoEncoder : BaseEncoder
{
public VideoEncoder(MediaEncoder mediaEncoder, ILogger logger, IServerConfigurationManager configurationManager, IFileSystem fileSystem, ILiveTvManager liveTvManager, IIsoManager isoManager, ILibraryManager libraryManager, IChannelManager channelManager, ISessionManager sessionManager, ISubtitleEncoder subtitleEncoder) : base(mediaEncoder, logger, configurationManager, fileSystem, liveTvManager, isoManager, libraryManager, channelManager, sessionManager, subtitleEncoder)
{
}
protected override string GetCommandLineArguments(EncodingJob state)
{
// Get the output codec name
var videoCodec = state.OutputVideoCodec;
var format = string.Empty;
var keyFrame = string.Empty;
if (string.Equals(Path.GetExtension(state.OutputFilePath), ".mp4", StringComparison.OrdinalIgnoreCase))
{
format = " -f mp4 -movflags frag_keyframe+empty_moov";
}
var threads = GetNumberOfThreads(state, string.Equals(videoCodec, "libvpx", StringComparison.OrdinalIgnoreCase));
var inputModifier = GetInputModifier(state);
return string.Format("{0} {1}{2} {3} {4} -map_metadata -1 -threads {5} {6}{7} -y \"{8}\"",
inputModifier,
GetInputArgument(state),
keyFrame,
GetMapArgs(state),
GetVideoArguments(state, videoCodec),
threads,
GetAudioArguments(state),
format,
state.OutputFilePath
).Trim();
}
/// <summary>
/// Gets video arguments to pass to ffmpeg
/// </summary>
/// <param name="state">The state.</param>
/// <param name="codec">The video codec.</param>
/// <returns>System.String.</returns>
private string GetVideoArguments(EncodingJob state, string codec)
{
var args = "-codec:v:0 " + codec;
if (state.EnableMpegtsM2TsMode)
{
args += " -mpegts_m2ts_mode 1";
}
// See if we can save come cpu cycles by avoiding encoding
if (string.Equals(codec, "copy", StringComparison.OrdinalIgnoreCase))
{
return state.VideoStream != null && IsH264(state.VideoStream) && string.Equals(state.Options.OutputContainer, "ts", StringComparison.OrdinalIgnoreCase) ?
args + " -bsf:v h264_mp4toannexb" :
args;
}
var keyFrameArg = string.Format(" -force_key_frames expr:gte(t,n_forced*{0})",
5.ToString(UsCulture));
args += keyFrameArg;
var hasGraphicalSubs = state.SubtitleStream != null && !state.SubtitleStream.IsTextSubtitleStream;
// Add resolution params, if specified
if (!hasGraphicalSubs)
{
args += GetOutputSizeParam(state, codec);
}
var qualityParam = GetVideoQualityParam(state, codec, false);
if (!string.IsNullOrEmpty(qualityParam))
{
args += " " + qualityParam.Trim();
}
// This is for internal graphical subs
if (hasGraphicalSubs)
{
args += GetGraphicalSubtitleParam(state, codec);
}
return args;
}
/// <summary>
/// Gets audio arguments to pass to ffmpeg
/// </summary>
/// <param name="state">The state.</param>
/// <returns>System.String.</returns>
private string GetAudioArguments(EncodingJob state)
{
// If the video doesn't have an audio stream, return a default.
if (state.AudioStream == null && state.VideoStream != null)
{
return string.Empty;
}
// Get the output codec name
var codec = state.OutputAudioCodec;
var args = "-codec:a:0 " + codec;
if (codec.Equals("copy", StringComparison.OrdinalIgnoreCase))
{
return args;
}
// Add the number of audio channels
var channels = state.OutputAudioChannels;
if (channels.HasValue)
{
args += " -ac " + channels.Value;
}
var bitrate = state.OutputAudioBitrate;
if (bitrate.HasValue)
{
args += " -ab " + bitrate.Value.ToString(UsCulture);
}
args += " " + GetAudioFilterParam(state, false);
return args;
}
protected override string GetOutputFileExtension(EncodingJob state)
{
var ext = base.GetOutputFileExtension(state);
if (!string.IsNullOrEmpty(ext))
{
return ext;
}
var videoCodec = state.Options.VideoCodec;
if (string.Equals(videoCodec, "h264", StringComparison.OrdinalIgnoreCase))
{
return ".ts";
}
if (string.Equals(videoCodec, "theora", StringComparison.OrdinalIgnoreCase))
{
return ".ogv";
}
if (string.Equals(videoCodec, "vpx", StringComparison.OrdinalIgnoreCase))
{
return ".webm";
}
if (string.Equals(videoCodec, "wmv", StringComparison.OrdinalIgnoreCase))
{
return ".asf";
}
return null;
}
}
}

@ -64,6 +64,7 @@
<Compile Include="Encoder\EncodingUtils.cs" />
<Compile Include="Encoder\JobLogger.cs" />
<Compile Include="Encoder\MediaEncoder.cs" />
<Compile Include="Encoder\VideoEncoder.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
<Compile Include="Subtitles\ISubtitleParser.cs" />
<Compile Include="Subtitles\ISubtitleWriter.cs" />

@ -412,7 +412,7 @@ namespace MediaBrowser.Server.Implementations.Sync
jobItem.Status = SyncJobItemStatus.Converting;
await _syncRepo.Update(jobItem).ConfigureAwait(false);
//jobItem.OutputPath = await MediaEncoder.EncodeAudio(new EncodingJobOptions(streamInfo, profile), new Progress<double>(), cancellationToken);
jobItem.OutputPath = await MediaEncoder.EncodeVideo(new EncodingJobOptions(streamInfo, profile), new Progress<double>(), cancellationToken);
}
else
{
@ -420,7 +420,7 @@ namespace MediaBrowser.Server.Implementations.Sync
{
jobItem.OutputPath = mediaSource.Path;
}
if (mediaSource.Protocol == MediaProtocol.Http)
else if (mediaSource.Protocol == MediaProtocol.Http)
{
jobItem.OutputPath = await DownloadFile(jobItem, mediaSource, cancellationToken).ConfigureAwait(false);
}
@ -464,7 +464,7 @@ namespace MediaBrowser.Server.Implementations.Sync
{
jobItem.OutputPath = mediaSource.Path;
}
if (mediaSource.Protocol == MediaProtocol.Http)
else if (mediaSource.Protocol == MediaProtocol.Http)
{
jobItem.OutputPath = await DownloadFile(jobItem, mediaSource, cancellationToken).ConfigureAwait(false);
}

@ -185,6 +185,7 @@ namespace MediaBrowser.Server.Startup.Common
/// </summary>
/// <value>The media encoder.</value>
private IMediaEncoder MediaEncoder { get; set; }
private ISubtitleEncoder SubtitleEncoder { get; set; }
private IConnectManager ConnectManager { get; set; }
private ISessionManager SessionManager { get; set; }
@ -560,7 +561,8 @@ namespace MediaBrowser.Server.Startup.Common
RegisterSingleInstance<ISessionContext>(new SessionContext(UserManager, authContext, SessionManager));
RegisterSingleInstance<IAuthService>(new AuthService(UserManager, authContext, ServerConfigurationManager, ConnectManager, SessionManager, DeviceManager));
RegisterSingleInstance<ISubtitleEncoder>(new SubtitleEncoder(LibraryManager, LogManager.GetLogger("SubtitleEncoder"), ApplicationPaths, FileSystemManager, MediaEncoder, JsonSerializer));
SubtitleEncoder = new SubtitleEncoder(LibraryManager, LogManager.GetLogger("SubtitleEncoder"), ApplicationPaths, FileSystemManager, MediaEncoder, JsonSerializer);
RegisterSingleInstance(SubtitleEncoder);
await ConfigureDisplayPreferencesRepositories().ConfigureAwait(false);
await ConfigureItemRepositories().ConfigureAwait(false);
@ -602,7 +604,8 @@ namespace MediaBrowser.Server.Startup.Common
IsoManager,
LibraryManager,
ChannelManager,
SessionManager);
SessionManager,
() => SubtitleEncoder);
RegisterSingleInstance(MediaEncoder);
}

Loading…
Cancel
Save