diff --git a/Emby.Server.Implementations/ApplicationHost.cs b/Emby.Server.Implementations/ApplicationHost.cs index 17208d97d7..ad6cbe167f 100644 --- a/Emby.Server.Implementations/ApplicationHost.cs +++ b/Emby.Server.Implementations/ApplicationHost.cs @@ -4,7 +4,6 @@ using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Diagnostics; -using System.Globalization; using System.IO; using System.Linq; using System.Net; @@ -46,6 +45,7 @@ using Emby.Server.Implementations.Session; using Emby.Server.Implementations.SyncPlay; using Emby.Server.Implementations.TV; using Emby.Server.Implementations.Updates; +using Jellyfin.Api.Helpers; using MediaBrowser.Api; using MediaBrowser.Common; using MediaBrowser.Common.Configuration; @@ -633,6 +633,8 @@ namespace Emby.Server.Implementations serviceCollection.AddSingleton(); serviceCollection.AddSingleton(); + + serviceCollection.AddSingleton(); } /// diff --git a/Jellyfin.Api/Controllers/AudioController.cs b/Jellyfin.Api/Controllers/AudioController.cs new file mode 100644 index 0000000000..7405c26fb8 --- /dev/null +++ b/Jellyfin.Api/Controllers/AudioController.cs @@ -0,0 +1,350 @@ +using System; +using System.Collections.Generic; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Jellyfin.Api.Helpers; +using Jellyfin.Api.Models.StreamingDtos; +using MediaBrowser.Common.Configuration; +using MediaBrowser.Controller.Configuration; +using MediaBrowser.Controller.Devices; +using MediaBrowser.Controller.Dlna; +using MediaBrowser.Controller.Library; +using MediaBrowser.Controller.MediaEncoding; +using MediaBrowser.Controller.Net; +using MediaBrowser.Model.Dlna; +using MediaBrowser.Model.IO; +using MediaBrowser.Model.MediaInfo; +using MediaBrowser.Model.Net; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Extensions.Configuration; + +namespace Jellyfin.Api.Controllers +{ + /// + /// The audio controller. + /// + // TODO: In order to autheneticate this in the future, Dlna playback will require updating + public class AudioController : BaseJellyfinApiController + { + private readonly IDlnaManager _dlnaManager; + private readonly IAuthorizationContext _authContext; + private readonly IUserManager _userManager; + private readonly ILibraryManager _libraryManager; + private readonly IMediaSourceManager _mediaSourceManager; + private readonly IServerConfigurationManager _serverConfigurationManager; + private readonly IMediaEncoder _mediaEncoder; + private readonly IStreamHelper _streamHelper; + private readonly IFileSystem _fileSystem; + private readonly ISubtitleEncoder _subtitleEncoder; + private readonly IConfiguration _configuration; + private readonly IDeviceManager _deviceManager; + private readonly TranscodingJobHelper _transcodingJobHelper; + private readonly HttpClient _httpClient; + + private readonly TranscodingJobType _transcodingJobType = TranscodingJobType.Progressive; + + /// + /// Initializes a new instance of the class. + /// + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// The singleton. + /// Instance of the . + public AudioController( + IDlnaManager dlnaManager, + IUserManager userManger, + IAuthorizationContext authorizationContext, + ILibraryManager libraryManager, + IMediaSourceManager mediaSourceManager, + IServerConfigurationManager serverConfigurationManager, + IMediaEncoder mediaEncoder, + IStreamHelper streamHelper, + IFileSystem fileSystem, + ISubtitleEncoder subtitleEncoder, + IConfiguration configuration, + IDeviceManager deviceManager, + TranscodingJobHelper transcodingJobHelper, + HttpClient httpClient) + { + _dlnaManager = dlnaManager; + _authContext = authorizationContext; + _userManager = userManger; + _libraryManager = libraryManager; + _mediaSourceManager = mediaSourceManager; + _serverConfigurationManager = serverConfigurationManager; + _mediaEncoder = mediaEncoder; + _streamHelper = streamHelper; + _fileSystem = fileSystem; + _subtitleEncoder = subtitleEncoder; + _configuration = configuration; + _deviceManager = deviceManager; + _transcodingJobHelper = transcodingJobHelper; + _httpClient = httpClient; + } + + /// + /// Gets an audio stream. + /// + /// The item id. + /// The audio container. + /// Optional. If true, the original file will be streamed statically without any encoding. Use either no url extension or the original file extension. true/false. + /// The streaming parameters. + /// The tag. + /// Optional. The dlna device profile id to utilize. + /// The play session id. + /// The segment container. + /// The segment lenght. + /// The minimum number of segments. + /// The media version id, if playing an alternate version. + /// The device id of the client requesting. Used to stop encoding processes when needed. + /// Optional. Specify a audio codec to encode to, e.g. mp3. If omitted the server will auto-select using the url's extension. Options: aac, mp3, vorbis, wma. + /// Whether or not to allow automatic stream copy if requested values match the original source. Defaults to true. + /// Whether or not to allow copying of the video stream url. + /// Whether or not to allow copying of the audio stream url. + /// Optional. Whether to break on non key frames. + /// Optional. Specify a specific audio sample rate, e.g. 44100. + /// Optional. The maximum audio bit depth. + /// Optional. Specify an audio bitrate to encode to, e.g. 128000. If omitted this will be left to encoder defaults. + /// Optional. Specify a specific number of audio channels to encode to, e.g. 2. + /// Optional. Specify a maximum number of audio channels to encode to, e.g. 2. + /// Optional. Specify a specific an encoder profile (varies by encoder), e.g. main, baseline, high. + /// Optional. Specify a level for the encoder profile (varies by encoder), e.g. 3, 3.1. + /// Optional. A specific video framerate to encode to, e.g. 23.976. Generally this should be omitted unless the device has specific requirements. + /// Optional. A specific maximum video framerate to encode to, e.g. 23.976. Generally this should be omitted unless the device has specific requirements. + /// Whether or not to copy timestamps when transcoding with an offset. Defaults to false. + /// Optional. Specify a starting offset, in ticks. 1 tick = 10000 ms. + /// Optional. The fixed horizontal resolution of the encoded video. + /// Optional. The fixed vertical resolution of the encoded video. + /// Optional. Specify a video bitrate to encode to, e.g. 500000. If omitted this will be left to encoder defaults. + /// Optional. The index of the subtitle stream to use. If omitted no subtitles will be used. + /// Optional. Specify the subtitle delivery method. + /// Optional. + /// Optional. The maximum video bit depth. + /// Optional. Whether to require avc. + /// Optional. Whether to deinterlace the video. + /// Optional. Whether to require a non anamporphic stream. + /// Optional. The maximum number of audio channels to transcode. + /// Optional. The limit of how many cpu cores to use. + /// The live stream id. + /// Optional. Whether to enable the MpegtsM2Ts mode. + /// Optional. Specify a video codec to encode to, e.g. h264. If omitted the server will auto-select using the url's extension. Options: h265, h264, mpeg4, theora, vpx, wmv. + /// Optional. Specify a subtitle codec to encode to. + /// Optional. The transcoding reason. + /// Optional. The index of the audio stream to use. If omitted the first audio stream will be used. + /// Optional. The index of the video stream to use. If omitted the first video stream will be used. + /// Optional. The . + /// Optional. The streaming options. + /// A containing the audio file. + [HttpGet("{itemId}/{stream=stream}.{container?}")] + [HttpGet("{itemId}/stream")] + [HttpHead("{itemId}/{stream=stream}.{container?}")] + [HttpHead("{itemId}/stream")] + [ProducesResponseType(StatusCodes.Status200OK)] + public async Task GetAudioStream( + [FromRoute] Guid itemId, + [FromRoute] string? container, + [FromQuery] bool? @static, + [FromQuery] string? @params, + [FromQuery] string? tag, + [FromQuery] string? deviceProfileId, + [FromQuery] string? playSessionId, + [FromQuery] string? segmentContainer, + [FromQuery] int? segmentLength, + [FromQuery] int? minSegments, + [FromQuery] string? mediaSourceId, + [FromQuery] string? deviceId, + [FromQuery] string? audioCodec, + [FromQuery] bool? enableAutoStreamCopy, + [FromQuery] bool? allowVideoStreamCopy, + [FromQuery] bool? allowAudioStreamCopy, + [FromQuery] bool? breakOnNonKeyFrames, + [FromQuery] int? audioSampleRate, + [FromQuery] int? maxAudioBitDepth, + [FromQuery] int? audioBitRate, + [FromQuery] int? audioChannels, + [FromQuery] int? maxAudioChannels, + [FromQuery] string? profile, + [FromQuery] string? level, + [FromQuery] float? framerate, + [FromQuery] float? maxFramerate, + [FromQuery] bool? copyTimestamps, + [FromQuery] long? startTimeTicks, + [FromQuery] int? width, + [FromQuery] int? height, + [FromQuery] int? videoBitRate, + [FromQuery] int? subtitleStreamIndex, + [FromQuery] SubtitleDeliveryMethod subtitleMethod, + [FromQuery] int? maxRefFrames, + [FromQuery] int? maxVideoBitDepth, + [FromQuery] bool? requireAvc, + [FromQuery] bool? deInterlace, + [FromQuery] bool? requireNonAnamorphic, + [FromQuery] int? transcodingMaxAudioChannels, + [FromQuery] int? cpuCoreLimit, + [FromQuery] string? liveStreamId, + [FromQuery] bool? enableMpegtsM2TsMode, + [FromQuery] string? videoCodec, + [FromQuery] string? subtitleCodec, + [FromQuery] string? transcodingReasons, + [FromQuery] int? audioStreamIndex, + [FromQuery] int? videoStreamIndex, + [FromQuery] EncodingContext context, + [FromQuery] Dictionary streamOptions) + { + bool isHeadRequest = Request.Method == System.Net.WebRequestMethods.Http.Head; + + var cancellationTokenSource = new CancellationTokenSource(); + + StreamingRequestDto streamingRequest = new StreamingRequestDto + { + Id = itemId, + Container = container, + Static = @static.HasValue ? @static.Value : true, + Params = @params, + Tag = tag, + DeviceProfileId = deviceProfileId, + PlaySessionId = playSessionId, + SegmentContainer = segmentContainer, + SegmentLength = segmentLength, + MinSegments = minSegments, + MediaSourceId = mediaSourceId, + DeviceId = deviceId, + AudioCodec = audioCodec, + EnableAutoStreamCopy = enableAutoStreamCopy.HasValue ? enableAutoStreamCopy.Value : true, + AllowAudioStreamCopy = allowAudioStreamCopy.HasValue ? allowAudioStreamCopy.Value : true, + AllowVideoStreamCopy = allowVideoStreamCopy.HasValue ? allowVideoStreamCopy.Value : true, + BreakOnNonKeyFrames = breakOnNonKeyFrames.HasValue ? breakOnNonKeyFrames.Value : false, + AudioSampleRate = audioSampleRate, + MaxAudioChannels = maxAudioChannels, + AudioBitRate = audioBitRate, + MaxAudioBitDepth = maxAudioBitDepth, + AudioChannels = audioChannels, + Profile = profile, + Level = level, + Framerate = framerate, + MaxFramerate = maxFramerate, + CopyTimestamps = copyTimestamps.HasValue ? copyTimestamps.Value : true, + StartTimeTicks = startTimeTicks, + Width = width, + Height = height, + VideoBitRate = videoBitRate, + SubtitleStreamIndex = subtitleStreamIndex, + SubtitleMethod = subtitleMethod, + MaxRefFrames = maxRefFrames, + MaxVideoBitDepth = maxVideoBitDepth, + RequireAvc = requireAvc.HasValue ? requireAvc.Value : true, + DeInterlace = deInterlace.HasValue ? deInterlace.Value : true, + RequireNonAnamorphic = requireNonAnamorphic.HasValue ? requireNonAnamorphic.Value : true, + TranscodingMaxAudioChannels = transcodingMaxAudioChannels, + CpuCoreLimit = cpuCoreLimit, + LiveStreamId = liveStreamId, + EnableMpegtsM2TsMode = enableMpegtsM2TsMode.HasValue ? enableMpegtsM2TsMode.Value : true, + VideoCodec = videoCodec, + SubtitleCodec = subtitleCodec, + TranscodeReasons = transcodingReasons, + AudioStreamIndex = audioStreamIndex, + VideoStreamIndex = videoStreamIndex, + Context = context, + StreamOptions = streamOptions + }; + + using var state = await StreamingHelpers.GetStreamingState( + streamingRequest, + Request, + _authContext, + _mediaSourceManager, + _userManager, + _libraryManager, + _serverConfigurationManager, + _mediaEncoder, + _fileSystem, + _subtitleEncoder, + _configuration, + _dlnaManager, + _deviceManager, + _transcodingJobHelper, + _transcodingJobType, + cancellationTokenSource.Token) + .ConfigureAwait(false); + + if (@static.HasValue && @static.Value && state.DirectStreamProvider != null) + { + StreamingHelpers.AddDlnaHeaders(state, Response.Headers, true, startTimeTicks, Request, _dlnaManager); + + // TODO AllowEndOfFile = false + await new ProgressiveFileCopier(_streamHelper, state.DirectStreamProvider).WriteToAsync(Response.Body, CancellationToken.None).ConfigureAwait(false); + + // TODO (moved from MediaBrowser.Api): Don't hardcode contentType + return File(Response.Body, MimeTypes.GetMimeType("file.ts")!); + } + + // Static remote stream + if (@static.HasValue && @static.Value && state.InputProtocol == MediaProtocol.Http) + { + StreamingHelpers.AddDlnaHeaders(state, Response.Headers, true, startTimeTicks, Request, _dlnaManager); + + return await FileStreamResponseHelpers.GetStaticRemoteStreamResult(state, isHeadRequest, this, _httpClient).ConfigureAwait(false); + } + + if (@static.HasValue && @static.Value && state.InputProtocol != MediaProtocol.File) + { + return BadRequest($"Input protocol {state.InputProtocol} cannot be streamed statically"); + } + + var outputPath = state.OutputFilePath; + var outputPathExists = System.IO.File.Exists(outputPath); + + var transcodingJob = _transcodingJobHelper.GetTranscodingJob(outputPath, TranscodingJobType.Progressive); + var isTranscodeCached = outputPathExists && transcodingJob != null; + + StreamingHelpers.AddDlnaHeaders(state, Response.Headers, (@static.HasValue && @static.Value) || isTranscodeCached, startTimeTicks, Request, _dlnaManager); + + // Static stream + if (@static.HasValue && @static.Value) + { + var contentType = state.GetMimeType("." + state.OutputContainer, false) ?? state.GetMimeType(state.MediaPath); + + if (state.MediaSource.IsInfiniteStream) + { + // TODO AllowEndOfFile = false + await new ProgressiveFileCopier(_streamHelper, state.MediaPath).WriteToAsync(Response.Body, CancellationToken.None).ConfigureAwait(false); + + return File(Response.Body, contentType); + } + + return FileStreamResponseHelpers.GetStaticFileResult( + state.MediaPath, + contentType, + isHeadRequest, + this); + } + + // Need to start ffmpeg (because media can't be returned directly) + var encodingOptions = _serverConfigurationManager.GetEncodingOptions(); + var encodingHelper = new EncodingHelper(_mediaEncoder, _fileSystem, _subtitleEncoder, _configuration); + var ffmpegCommandLineArguments = encodingHelper.GetProgressiveAudioFullCommandLine(state, encodingOptions, outputPath); + return await FileStreamResponseHelpers.GetTranscodedFile( + state, + isHeadRequest, + _streamHelper, + this, + _transcodingJobHelper, + ffmpegCommandLineArguments, + Request, + _transcodingJobType, + cancellationTokenSource).ConfigureAwait(false); + } + } +} diff --git a/Jellyfin.Api/Controllers/PlaystateController.cs b/Jellyfin.Api/Controllers/PlaystateController.cs index 9fcf041996..3ebd003f1a 100644 --- a/Jellyfin.Api/Controllers/PlaystateController.cs +++ b/Jellyfin.Api/Controllers/PlaystateController.cs @@ -8,7 +8,6 @@ using MediaBrowser.Controller.Library; using MediaBrowser.Controller.Net; using MediaBrowser.Controller.Session; using MediaBrowser.Model.Dto; -using MediaBrowser.Model.IO; using MediaBrowser.Model.Session; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Http; @@ -40,8 +39,7 @@ namespace Jellyfin.Api.Controllers /// Instance of the interface. /// Instance of the interface. /// Instance of the interface. - /// Instance of the interface. - /// Instance of the interface. + /// Th singleton. public PlaystateController( IUserManager userManager, IUserDataManager userDataRepository, @@ -49,8 +47,7 @@ namespace Jellyfin.Api.Controllers ISessionManager sessionManager, IAuthorizationContext authContext, ILoggerFactory loggerFactory, - IMediaSourceManager mediaSourceManager, - IFileSystem fileSystem) + TranscodingJobHelper transcodingJobHelper) { _userManager = userManager; _userDataRepository = userDataRepository; @@ -59,10 +56,7 @@ namespace Jellyfin.Api.Controllers _authContext = authContext; _logger = loggerFactory.CreateLogger(); - _transcodingJobHelper = new TranscodingJobHelper( - loggerFactory.CreateLogger(), - mediaSourceManager, - fileSystem); + _transcodingJobHelper = transcodingJobHelper; } /// diff --git a/Jellyfin.Api/Helpers/FileStreamResponseHelpers.cs b/Jellyfin.Api/Helpers/FileStreamResponseHelpers.cs new file mode 100644 index 0000000000..636f47f5f1 --- /dev/null +++ b/Jellyfin.Api/Helpers/FileStreamResponseHelpers.cs @@ -0,0 +1,139 @@ +using System; +using System.IO; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Jellyfin.Api.Models.StreamingDtos; +using MediaBrowser.Controller.MediaEncoding; +using MediaBrowser.Model.IO; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Net.Http.Headers; + +namespace Jellyfin.Api.Helpers +{ + /// + /// The stream response helpers. + /// + public static class FileStreamResponseHelpers + { + /// + /// Returns a static file from a remote source. + /// + /// The current . + /// Whether the current request is a HTTP HEAD request so only the headers get returned. + /// The managing the response. + /// The making the remote request. + /// A containing the API response. + public static async Task GetStaticRemoteStreamResult( + StreamState state, + bool isHeadRequest, + ControllerBase controller, + HttpClient httpClient) + { + if (state.RemoteHttpHeaders.TryGetValue(HeaderNames.UserAgent, out var useragent)) + { + httpClient.DefaultRequestHeaders.Add(HeaderNames.UserAgent, useragent); + } + + using var response = await httpClient.GetAsync(state.MediaPath).ConfigureAwait(false); + var contentType = response.Content.Headers.ContentType.ToString(); + + controller.Response.Headers[HeaderNames.AcceptRanges] = "none"; + + if (isHeadRequest) + { + return controller.File(Array.Empty(), contentType); + } + + return controller.File(await response.Content.ReadAsStreamAsync().ConfigureAwait(false), contentType); + } + + /// + /// Returns a static file from the server. + /// + /// The path to the file. + /// The content type of the file. + /// Whether the current request is a HTTP HEAD request so only the headers get returned. + /// The managing the response. + /// An the file. + public static ActionResult GetStaticFileResult( + string path, + string contentType, + bool isHeadRequest, + ControllerBase controller) + { + controller.Response.ContentType = contentType; + + // if the request is a head request, return a NoContent result with the same headers as it would with a GET request + if (isHeadRequest) + { + return controller.NoContent(); + } + + using var stream = new FileStream(path, FileMode.Open, FileAccess.Read); + return controller.File(stream, contentType); + } + + /// + /// Returns a transcoded file from the server. + /// + /// The current . + /// Whether the current request is a HTTP HEAD request so only the headers get returned. + /// Instance of the interface. + /// The managing the response. + /// The singleton. + /// The command line arguments to start ffmpeg. + /// The starting the transcoding. + /// The . + /// The . + /// A containing the transcoded file. + public static async Task GetTranscodedFile( + StreamState state, + bool isHeadRequest, + IStreamHelper streamHelper, + ControllerBase controller, + TranscodingJobHelper transcodingJobHelper, + string ffmpegCommandLineArguments, + HttpRequest request, + TranscodingJobType transcodingJobType, + CancellationTokenSource cancellationTokenSource) + { + // Use the command line args with a dummy playlist path + var outputPath = state.OutputFilePath; + + controller.Response.Headers[HeaderNames.AcceptRanges] = "none"; + + var contentType = state.GetMimeType(outputPath); + + // Headers only + if (isHeadRequest) + { + return controller.File(Array.Empty(), contentType); + } + + var transcodingLock = transcodingJobHelper.GetTranscodingLock(outputPath); + await transcodingLock.WaitAsync(cancellationTokenSource.Token).ConfigureAwait(false); + try + { + if (!File.Exists(outputPath)) + { + await transcodingJobHelper.StartFfMpeg(state, outputPath, ffmpegCommandLineArguments, request, transcodingJobType, cancellationTokenSource).ConfigureAwait(false); + } + else + { + transcodingJobHelper.OnTranscodeBeginRequest(outputPath, TranscodingJobType.Progressive); + state.Dispose(); + } + + await using var memoryStream = new MemoryStream(); + await new ProgressiveFileCopier(streamHelper, outputPath).WriteToAsync(memoryStream, CancellationToken.None).ConfigureAwait(false); + return controller.File(memoryStream, contentType); + } + finally + { + transcodingLock.Release(); + } + } + } +} diff --git a/Jellyfin.Api/Helpers/StreamingHelpers.cs b/Jellyfin.Api/Helpers/StreamingHelpers.cs new file mode 100644 index 0000000000..b12590080c --- /dev/null +++ b/Jellyfin.Api/Helpers/StreamingHelpers.cs @@ -0,0 +1,758 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using System.IO; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Jellyfin.Api.Models.StreamingDtos; +using MediaBrowser.Common.Configuration; +using MediaBrowser.Common.Extensions; +using MediaBrowser.Controller.Configuration; +using MediaBrowser.Controller.Devices; +using MediaBrowser.Controller.Dlna; +using MediaBrowser.Controller.Library; +using MediaBrowser.Controller.MediaEncoding; +using MediaBrowser.Controller.Net; +using MediaBrowser.Model.Dlna; +using MediaBrowser.Model.Dto; +using MediaBrowser.Model.Entities; +using MediaBrowser.Model.IO; +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.Primitives; +using Microsoft.Net.Http.Headers; + +namespace Jellyfin.Api.Helpers +{ + /// + /// The streaming helpers. + /// + public static class StreamingHelpers + { + /// + /// Gets the current streaming state. + /// + /// The . + /// The . + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Initialized . + /// The . + /// The . + /// A containing the current . + public static async Task GetStreamingState( + StreamingRequestDto streamingRequest, + HttpRequest httpRequest, + IAuthorizationContext authorizationContext, + IMediaSourceManager mediaSourceManager, + IUserManager userManager, + ILibraryManager libraryManager, + IServerConfigurationManager serverConfigurationManager, + IMediaEncoder mediaEncoder, + IFileSystem fileSystem, + ISubtitleEncoder subtitleEncoder, + IConfiguration configuration, + IDlnaManager dlnaManager, + IDeviceManager deviceManager, + TranscodingJobHelper transcodingJobHelper, + TranscodingJobType transcodingJobType, + CancellationToken cancellationToken) + { + EncodingHelper encodingHelper = new EncodingHelper(mediaEncoder, fileSystem, subtitleEncoder, configuration); + // Parse the DLNA time seek header + if (!streamingRequest.StartTimeTicks.HasValue) + { + var timeSeek = httpRequest.Headers["TimeSeekRange.dlna.org"]; + + streamingRequest.StartTimeTicks = ParseTimeSeekHeader(timeSeek.ToString()); + } + + if (!string.IsNullOrWhiteSpace(streamingRequest.Params)) + { + ParseParams(streamingRequest); + } + + streamingRequest.StreamOptions = ParseStreamOptions(httpRequest.Query); + + var url = httpRequest.Path.Value.Split('.').Last(); + + if (string.IsNullOrEmpty(streamingRequest.AudioCodec)) + { + streamingRequest.AudioCodec = encodingHelper.InferAudioCodec(url); + } + + var enableDlnaHeaders = !string.IsNullOrWhiteSpace(streamingRequest.Params) || + string.Equals(httpRequest.Headers["GetContentFeatures.DLNA.ORG"], "1", StringComparison.OrdinalIgnoreCase); + + var state = new StreamState(mediaSourceManager, transcodingJobType, transcodingJobHelper) + { + Request = streamingRequest, + RequestedUrl = url, + UserAgent = httpRequest.Headers[HeaderNames.UserAgent], + EnableDlnaHeaders = enableDlnaHeaders + }; + + var auth = authorizationContext.GetAuthorizationInfo(httpRequest); + if (!auth.UserId.Equals(Guid.Empty)) + { + state.User = userManager.GetUserById(auth.UserId); + } + + if (state.IsVideoRequest && !string.IsNullOrWhiteSpace(state.Request.VideoCodec)) + { + state.SupportedVideoCodecs = state.Request.VideoCodec.Split(',', StringSplitOptions.RemoveEmptyEntries); + state.Request.VideoCodec = state.SupportedVideoCodecs.FirstOrDefault(); + } + + if (!string.IsNullOrWhiteSpace(streamingRequest.AudioCodec)) + { + state.SupportedAudioCodecs = streamingRequest.AudioCodec.Split(',', StringSplitOptions.RemoveEmptyEntries); + state.Request.AudioCodec = state.SupportedAudioCodecs.FirstOrDefault(i => mediaEncoder.CanEncodeToAudioCodec(i)) + ?? state.SupportedAudioCodecs.FirstOrDefault(); + } + + if (!string.IsNullOrWhiteSpace(streamingRequest.SubtitleCodec)) + { + state.SupportedSubtitleCodecs = streamingRequest.SubtitleCodec.Split(',', StringSplitOptions.RemoveEmptyEntries); + state.Request.SubtitleCodec = state.SupportedSubtitleCodecs.FirstOrDefault(i => mediaEncoder.CanEncodeToSubtitleCodec(i)) + ?? state.SupportedSubtitleCodecs.FirstOrDefault(); + } + + var item = libraryManager.GetItemById(streamingRequest.Id); + + state.IsInputVideo = string.Equals(item.MediaType, MediaType.Video, StringComparison.OrdinalIgnoreCase); + + MediaSourceInfo? mediaSource = null; + if (string.IsNullOrWhiteSpace(streamingRequest.LiveStreamId)) + { + var currentJob = !string.IsNullOrWhiteSpace(streamingRequest.PlaySessionId) + ? transcodingJobHelper.GetTranscodingJob(streamingRequest.PlaySessionId) + : null; + + if (currentJob != null) + { + mediaSource = currentJob.MediaSource; + } + + if (mediaSource == null) + { + var mediaSources = await mediaSourceManager.GetPlaybackMediaSources(libraryManager.GetItemById(streamingRequest.Id), null, false, false, cancellationToken).ConfigureAwait(false); + + mediaSource = string.IsNullOrEmpty(streamingRequest.MediaSourceId) + ? mediaSources[0] + : mediaSources.Find(i => string.Equals(i.Id, streamingRequest.MediaSourceId, StringComparison.InvariantCulture)); + + if (mediaSource == null && Guid.Parse(streamingRequest.MediaSourceId) == streamingRequest.Id) + { + mediaSource = mediaSources[0]; + } + } + } + else + { + var liveStreamInfo = await mediaSourceManager.GetLiveStreamWithDirectStreamProvider(streamingRequest.LiveStreamId, cancellationToken).ConfigureAwait(false); + mediaSource = liveStreamInfo.Item1; + state.DirectStreamProvider = liveStreamInfo.Item2; + } + + encodingHelper.AttachMediaSourceInfo(state, mediaSource, url); + + string? containerInternal = Path.GetExtension(state.RequestedUrl); + + if (string.IsNullOrEmpty(streamingRequest.Container)) + { + containerInternal = streamingRequest.Container; + } + + if (string.IsNullOrEmpty(containerInternal)) + { + containerInternal = streamingRequest.Static ? + StreamBuilder.NormalizeMediaSourceFormatIntoSingleContainer(state.InputContainer, state.MediaPath, null, DlnaProfileType.Audio) + : GetOutputFileExtension(state); + } + + state.OutputContainer = (containerInternal ?? string.Empty).TrimStart('.'); + + state.OutputAudioBitrate = encodingHelper.GetAudioBitrateParam(streamingRequest.AudioBitRate, state.AudioStream); + + state.OutputAudioCodec = streamingRequest.AudioCodec; + + state.OutputAudioChannels = encodingHelper.GetNumAudioChannelsParam(state, state.AudioStream, state.OutputAudioCodec); + + if (state.VideoRequest != null) + { + state.OutputVideoCodec = state.Request.VideoCodec; + state.OutputVideoBitrate = encodingHelper.GetVideoBitrateParamValue(state.VideoRequest, state.VideoStream, state.OutputVideoCodec); + + encodingHelper.TryStreamCopy(state); + + if (state.OutputVideoBitrate.HasValue && !EncodingHelper.IsCopyCodec(state.OutputVideoCodec)) + { + var resolution = ResolutionNormalizer.Normalize( + state.VideoStream?.BitRate, + state.VideoStream?.Width, + state.VideoStream?.Height, + state.OutputVideoBitrate.Value, + state.VideoStream?.Codec, + state.OutputVideoCodec, + state.VideoRequest.MaxWidth, + state.VideoRequest.MaxHeight); + + state.VideoRequest.MaxWidth = resolution.MaxWidth; + state.VideoRequest.MaxHeight = resolution.MaxHeight; + } + } + + ApplyDeviceProfileSettings(state, dlnaManager, deviceManager, httpRequest, streamingRequest.DeviceProfileId, streamingRequest.Static); + + var ext = string.IsNullOrWhiteSpace(state.OutputContainer) + ? GetOutputFileExtension(state) + : ('.' + state.OutputContainer); + + state.OutputFilePath = GetOutputFilePath(state, ext!, serverConfigurationManager, streamingRequest.DeviceId, streamingRequest.PlaySessionId); + + return state; + } + + /// + /// Adds the dlna headers. + /// + /// The state. + /// The response headers. + /// if set to true [is statically streamed]. + /// The start time in ticks. + /// The . + /// Instance of the interface. + public static void AddDlnaHeaders( + StreamState state, + IHeaderDictionary responseHeaders, + bool isStaticallyStreamed, + long? startTimeTicks, + HttpRequest request, + IDlnaManager dlnaManager) + { + if (!state.EnableDlnaHeaders) + { + return; + } + + var profile = state.DeviceProfile; + + StringValues transferMode = request.Headers["transferMode.dlna.org"]; + responseHeaders.Add("transferMode.dlna.org", string.IsNullOrEmpty(transferMode) ? "Streaming" : transferMode.ToString()); + responseHeaders.Add("realTimeInfo.dlna.org", "DLNA.ORG_TLAG=*"); + + if (state.RunTimeTicks.HasValue) + { + if (string.Equals(request.Headers["getMediaInfo.sec"], "1", StringComparison.OrdinalIgnoreCase)) + { + var ms = TimeSpan.FromTicks(state.RunTimeTicks.Value).TotalMilliseconds; + responseHeaders.Add("MediaInfo.sec", string.Format( + CultureInfo.InvariantCulture, + "SEC_Duration={0};", + Convert.ToInt32(ms))); + } + + if (!isStaticallyStreamed && profile != null) + { + AddTimeSeekResponseHeaders(state, responseHeaders, startTimeTicks); + } + } + + if (profile == null) + { + profile = dlnaManager.GetDefaultProfile(); + } + + var audioCodec = state.ActualOutputAudioCodec; + + if (!state.IsVideoRequest) + { + responseHeaders.Add("contentFeatures.dlna.org", new ContentFeatureBuilder(profile).BuildAudioHeader( + state.OutputContainer, + audioCodec, + state.OutputAudioBitrate, + state.OutputAudioSampleRate, + state.OutputAudioChannels, + state.OutputAudioBitDepth, + isStaticallyStreamed, + state.RunTimeTicks, + state.TranscodeSeekInfo)); + } + else + { + var videoCodec = state.ActualOutputVideoCodec; + + responseHeaders.Add( + "contentFeatures.dlna.org", + new ContentFeatureBuilder(profile).BuildVideoHeader(state.OutputContainer, videoCodec, audioCodec, state.OutputWidth, state.OutputHeight, state.TargetVideoBitDepth, state.OutputVideoBitrate, state.TargetTimestamp, isStaticallyStreamed, state.RunTimeTicks, state.TargetVideoProfile, state.TargetVideoLevel, state.TargetFramerate, state.TargetPacketLength, state.TranscodeSeekInfo, state.IsTargetAnamorphic, state.IsTargetInterlaced, state.TargetRefFrames, state.TargetVideoStreamCount, state.TargetAudioStreamCount, state.TargetVideoCodecTag, state.IsTargetAVC).FirstOrDefault() ?? string.Empty); + } + } + + /// + /// Parses the time seek header. + /// + /// The time seek header string. + /// A nullable representing the seek time in ticks. + private static long? ParseTimeSeekHeader(ReadOnlySpan value) + { + if (value.IsEmpty) + { + return null; + } + + const string npt = "npt="; + if (!value.StartsWith(npt, StringComparison.OrdinalIgnoreCase)) + { + throw new ArgumentException("Invalid timeseek header"); + } + + var index = value.IndexOf('-'); + value = index == -1 + ? value.Slice(npt.Length) + : value.Slice(npt.Length, index - npt.Length); + if (value.IndexOf(':') == -1) + { + // Parses npt times in the format of '417.33' + if (double.TryParse(value, NumberStyles.Any, CultureInfo.InvariantCulture, out var seconds)) + { + return TimeSpan.FromSeconds(seconds).Ticks; + } + + throw new ArgumentException("Invalid timeseek header"); + } + + try + { + // Parses npt times in the format of '10:19:25.7' + return TimeSpan.Parse(value).Ticks; + } + catch + { + throw new ArgumentException("Invalid timeseek header"); + } + } + + /// + /// Parses query parameters as StreamOptions. + /// + /// The query string. + /// A containing the stream options. + private static Dictionary ParseStreamOptions(IQueryCollection queryString) + { + Dictionary streamOptions = new Dictionary(); + foreach (var param in queryString) + { + if (char.IsLower(param.Key[0])) + { + // This was probably not parsed initially and should be a StreamOptions + // or the generated URL should correctly serialize it + // TODO: This should be incorporated either in the lower framework for parsing requests + streamOptions[param.Key] = param.Value; + } + } + + return streamOptions; + } + + /// + /// Adds the dlna time seek headers to the response. + /// + /// The current . + /// The of the response. + /// The start time in ticks. + private static void AddTimeSeekResponseHeaders(StreamState state, IHeaderDictionary responseHeaders, long? startTimeTicks) + { + var runtimeSeconds = TimeSpan.FromTicks(state.RunTimeTicks!.Value).TotalSeconds.ToString(CultureInfo.InvariantCulture); + var startSeconds = TimeSpan.FromTicks(startTimeTicks ?? 0).TotalSeconds.ToString(CultureInfo.InvariantCulture); + + responseHeaders.Add("TimeSeekRange.dlna.org", string.Format( + CultureInfo.InvariantCulture, + "npt={0}-{1}/{1}", + startSeconds, + runtimeSeconds)); + responseHeaders.Add("X-AvailableSeekRange", string.Format( + CultureInfo.InvariantCulture, + "1 npt={0}-{1}", + startSeconds, + runtimeSeconds)); + } + + /// + /// Gets the output file extension. + /// + /// The state. + /// System.String. + private static string? GetOutputFileExtension(StreamState state) + { + var ext = Path.GetExtension(state.RequestedUrl); + + if (!string.IsNullOrEmpty(ext)) + { + return ext; + } + + // Try to infer based on the desired video codec + if (state.IsVideoRequest) + { + var videoCodec = state.Request.VideoCodec; + + if (string.Equals(videoCodec, "h264", StringComparison.OrdinalIgnoreCase) || + string.Equals(videoCodec, "h265", StringComparison.OrdinalIgnoreCase)) + { + return ".ts"; + } + + if (string.Equals(videoCodec, "theora", StringComparison.OrdinalIgnoreCase)) + { + return ".ogv"; + } + + if (string.Equals(videoCodec, "vpx", StringComparison.OrdinalIgnoreCase)) + { + return ".webm"; + } + + if (string.Equals(videoCodec, "wmv", StringComparison.OrdinalIgnoreCase)) + { + return ".asf"; + } + } + + // Try to infer based on the desired audio codec + if (!state.IsVideoRequest) + { + var audioCodec = state.Request.AudioCodec; + + if (string.Equals("aac", audioCodec, StringComparison.OrdinalIgnoreCase)) + { + return ".aac"; + } + + if (string.Equals("mp3", audioCodec, StringComparison.OrdinalIgnoreCase)) + { + return ".mp3"; + } + + if (string.Equals("vorbis", audioCodec, StringComparison.OrdinalIgnoreCase)) + { + return ".ogg"; + } + + if (string.Equals("wma", audioCodec, StringComparison.OrdinalIgnoreCase)) + { + return ".wma"; + } + } + + return null; + } + + /// + /// Gets the output file path for transcoding. + /// + /// The current . + /// The file extension of the output file. + /// Instance of the interface. + /// The device id. + /// The play session id. + /// The complete file path, including the folder, for the transcoding file. + private static string GetOutputFilePath(StreamState state, string outputFileExtension, IServerConfigurationManager serverConfigurationManager, string? deviceId, string? playSessionId) + { + var data = $"{state.MediaPath}-{state.UserAgent}-{deviceId!}-{playSessionId!}"; + + var filename = data.GetMD5().ToString("N", CultureInfo.InvariantCulture); + var ext = outputFileExtension?.ToLowerInvariant(); + var folder = serverConfigurationManager.GetTranscodePath(); + + return Path.Combine(folder, filename + ext); + } + + private static void ApplyDeviceProfileSettings(StreamState state, IDlnaManager dlnaManager, IDeviceManager deviceManager, HttpRequest request, string? deviceProfileId, bool? @static) + { + var headers = request.Headers; + + if (!string.IsNullOrWhiteSpace(deviceProfileId)) + { + state.DeviceProfile = dlnaManager.GetProfile(deviceProfileId); + } + else if (!string.IsNullOrWhiteSpace(deviceProfileId)) + { + var caps = deviceManager.GetCapabilities(deviceProfileId); + + state.DeviceProfile = caps == null ? dlnaManager.GetProfile(headers) : caps.DeviceProfile; + } + + var profile = state.DeviceProfile; + + if (profile == null) + { + // Don't use settings from the default profile. + // Only use a specific profile if it was requested. + return; + } + + var audioCodec = state.ActualOutputAudioCodec; + var videoCodec = state.ActualOutputVideoCodec; + + var mediaProfile = !state.IsVideoRequest + ? profile.GetAudioMediaProfile(state.OutputContainer, audioCodec, state.OutputAudioChannels, state.OutputAudioBitrate, state.OutputAudioSampleRate, state.OutputAudioBitDepth) + : profile.GetVideoMediaProfile( + state.OutputContainer, + audioCodec, + videoCodec, + state.OutputWidth, + state.OutputHeight, + state.TargetVideoBitDepth, + state.OutputVideoBitrate, + state.TargetVideoProfile, + state.TargetVideoLevel, + state.TargetFramerate, + state.TargetPacketLength, + state.TargetTimestamp, + state.IsTargetAnamorphic, + state.IsTargetInterlaced, + state.TargetRefFrames, + state.TargetVideoStreamCount, + state.TargetAudioStreamCount, + state.TargetVideoCodecTag, + state.IsTargetAVC); + + if (mediaProfile != null) + { + state.MimeType = mediaProfile.MimeType; + } + + if (!(@static.HasValue && @static.Value)) + { + var transcodingProfile = !state.IsVideoRequest ? profile.GetAudioTranscodingProfile(state.OutputContainer, audioCodec) : profile.GetVideoTranscodingProfile(state.OutputContainer, audioCodec, videoCodec); + + if (transcodingProfile != null) + { + state.EstimateContentLength = transcodingProfile.EstimateContentLength; + // state.EnableMpegtsM2TsMode = transcodingProfile.EnableMpegtsM2TsMode; + state.TranscodeSeekInfo = transcodingProfile.TranscodeSeekInfo; + + if (state.VideoRequest != null) + { + state.VideoRequest.CopyTimestamps = transcodingProfile.CopyTimestamps; + state.VideoRequest.EnableSubtitlesInManifest = transcodingProfile.EnableSubtitlesInManifest; + } + } + } + } + + /// + /// Parses the parameters. + /// + /// The request. + private static void ParseParams(StreamingRequestDto request) + { + if (string.IsNullOrEmpty(request.Params)) + { + return; + } + + var vals = request.Params.Split(';'); + + var videoRequest = request as VideoRequestDto; + + for (var i = 0; i < vals.Length; i++) + { + var val = vals[i]; + + if (string.IsNullOrWhiteSpace(val)) + { + continue; + } + + switch (i) + { + case 0: + request.DeviceProfileId = val; + break; + case 1: + request.DeviceId = val; + break; + case 2: + request.MediaSourceId = val; + break; + case 3: + request.Static = string.Equals("true", val, StringComparison.OrdinalIgnoreCase); + break; + case 4: + if (videoRequest != null) + { + videoRequest.VideoCodec = val; + } + + break; + case 5: + request.AudioCodec = val; + break; + case 6: + if (videoRequest != null) + { + videoRequest.AudioStreamIndex = int.Parse(val, CultureInfo.InvariantCulture); + } + + break; + case 7: + if (videoRequest != null) + { + videoRequest.SubtitleStreamIndex = int.Parse(val, CultureInfo.InvariantCulture); + } + + break; + case 8: + if (videoRequest != null) + { + videoRequest.VideoBitRate = int.Parse(val, CultureInfo.InvariantCulture); + } + + break; + case 9: + request.AudioBitRate = int.Parse(val, CultureInfo.InvariantCulture); + break; + case 10: + request.MaxAudioChannels = int.Parse(val, CultureInfo.InvariantCulture); + break; + case 11: + if (videoRequest != null) + { + videoRequest.MaxFramerate = float.Parse(val, CultureInfo.InvariantCulture); + } + + break; + case 12: + if (videoRequest != null) + { + videoRequest.MaxWidth = int.Parse(val, CultureInfo.InvariantCulture); + } + + break; + case 13: + if (videoRequest != null) + { + videoRequest.MaxHeight = int.Parse(val, CultureInfo.InvariantCulture); + } + + break; + case 14: + request.StartTimeTicks = long.Parse(val, CultureInfo.InvariantCulture); + break; + case 15: + if (videoRequest != null) + { + videoRequest.Level = val; + } + + break; + case 16: + if (videoRequest != null) + { + videoRequest.MaxRefFrames = int.Parse(val, CultureInfo.InvariantCulture); + } + + break; + case 17: + if (videoRequest != null) + { + videoRequest.MaxVideoBitDepth = int.Parse(val, CultureInfo.InvariantCulture); + } + + break; + case 18: + if (videoRequest != null) + { + videoRequest.Profile = val; + } + + break; + case 19: + // cabac no longer used + break; + case 20: + request.PlaySessionId = val; + break; + case 21: + // api_key + break; + case 22: + request.LiveStreamId = val; + break; + case 23: + // Duplicating ItemId because of MediaMonkey + break; + case 24: + if (videoRequest != null) + { + videoRequest.CopyTimestamps = string.Equals("true", val, StringComparison.OrdinalIgnoreCase); + } + + break; + case 25: + if (!string.IsNullOrWhiteSpace(val) && videoRequest != null) + { + if (Enum.TryParse(val, out SubtitleDeliveryMethod method)) + { + videoRequest.SubtitleMethod = method; + } + } + + break; + case 26: + request.TranscodingMaxAudioChannels = int.Parse(val, CultureInfo.InvariantCulture); + break; + case 27: + if (videoRequest != null) + { + videoRequest.EnableSubtitlesInManifest = string.Equals("true", val, StringComparison.OrdinalIgnoreCase); + } + + break; + case 28: + request.Tag = val; + break; + case 29: + if (videoRequest != null) + { + videoRequest.RequireAvc = string.Equals("true", val, StringComparison.OrdinalIgnoreCase); + } + + break; + case 30: + request.SubtitleCodec = val; + break; + case 31: + if (videoRequest != null) + { + videoRequest.RequireNonAnamorphic = string.Equals("true", val, StringComparison.OrdinalIgnoreCase); + } + + break; + case 32: + if (videoRequest != null) + { + videoRequest.DeInterlace = string.Equals("true", val, StringComparison.OrdinalIgnoreCase); + } + + break; + case 33: + request.TranscodeReasons = val; + break; + } + } + } + } +} diff --git a/Jellyfin.Api/Helpers/TranscodingJobHelper.cs b/Jellyfin.Api/Helpers/TranscodingJobHelper.cs index 34b371d3f8..c84135085f 100644 --- a/Jellyfin.Api/Helpers/TranscodingJobHelper.cs +++ b/Jellyfin.Api/Helpers/TranscodingJobHelper.cs @@ -1,13 +1,28 @@ using System; using System.Collections.Generic; +using System.Diagnostics; +using System.Globalization; using System.IO; using System.Linq; +using System.Text; +using System.Text.Json; using System.Threading; using System.Threading.Tasks; using Jellyfin.Api.Models.PlaybackDtos; +using Jellyfin.Api.Models.StreamingDtos; +using Jellyfin.Data.Enums; +using MediaBrowser.Common.Configuration; +using MediaBrowser.Controller.Configuration; using MediaBrowser.Controller.Library; using MediaBrowser.Controller.MediaEncoding; +using MediaBrowser.Controller.Net; +using MediaBrowser.Controller.Session; +using MediaBrowser.Model.Entities; using MediaBrowser.Model.IO; +using MediaBrowser.Model.MediaInfo; +using MediaBrowser.Model.Session; +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.Configuration; using Microsoft.Extensions.Logging; namespace Jellyfin.Api.Helpers @@ -27,9 +42,17 @@ namespace Jellyfin.Api.Helpers /// private static readonly Dictionary _transcodingLocks = new Dictionary(); + private readonly IAuthorizationContext _authorizationContext; + private readonly EncodingHelper _encodingHelper; + private readonly IFileSystem _fileSystem; + private readonly IIsoManager _isoManager; + private readonly ILogger _logger; + private readonly IMediaEncoder _mediaEncoder; private readonly IMediaSourceManager _mediaSourceManager; - private readonly IFileSystem _fileSystem; + private readonly IServerConfigurationManager _serverConfigurationManager; + private readonly ISessionManager _sessionManager; + private readonly ILoggerFactory _loggerFactory; /// /// Initializes a new instance of the class. @@ -37,14 +60,40 @@ namespace Jellyfin.Api.Helpers /// Instance of the interface. /// Instance of the interface. /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. + /// Instance of the interface. public TranscodingJobHelper( ILogger logger, IMediaSourceManager mediaSourceManager, - IFileSystem fileSystem) + IFileSystem fileSystem, + IMediaEncoder mediaEncoder, + IServerConfigurationManager serverConfigurationManager, + ISessionManager sessionManager, + IAuthorizationContext authorizationContext, + IIsoManager isoManager, + ISubtitleEncoder subtitleEncoder, + IConfiguration configuration, + ILoggerFactory loggerFactory) { _logger = logger; _mediaSourceManager = mediaSourceManager; _fileSystem = fileSystem; + _mediaEncoder = mediaEncoder; + _serverConfigurationManager = serverConfigurationManager; + _sessionManager = sessionManager; + _authorizationContext = authorizationContext; + _isoManager = isoManager; + _loggerFactory = loggerFactory; + + _encodingHelper = new EncodingHelper(mediaEncoder, fileSystem, subtitleEncoder, configuration); + + DeleteEncodedMediaCache(); } /// @@ -60,6 +109,20 @@ namespace Jellyfin.Api.Helpers } } + /// + /// Get transcoding job. + /// + /// Path to the transcoding file. + /// The . + /// The transcoding job. + public TranscodingJobDto GetTranscodingJob(string path, TranscodingJobType type) + { + lock (_activeTranscodingJobs) + { + return _activeTranscodingJobs.FirstOrDefault(j => j.Type == type && string.Equals(j.Path, path, StringComparison.OrdinalIgnoreCase)); + } + } + /// /// Ping transcoding job. /// @@ -349,5 +412,429 @@ namespace Jellyfin.Api.Helpers throw new AggregateException("Error deleting HLS files", exs); } } + + /// + /// Report the transcoding progress to the session manager. + /// + /// The of which the progress will be reported. + /// The of the current transcoding job. + /// The current transcoding position. + /// The framerate of the transcoding job. + /// The completion percentage of the transcode. + /// The number of bytes transcoded. + /// The bitrate of the transcoding job. + public void ReportTranscodingProgress( + TranscodingJobDto job, + StreamState state, + TimeSpan? transcodingPosition, + float? framerate, + double? percentComplete, + long? bytesTranscoded, + int? bitRate) + { + var ticks = transcodingPosition?.Ticks; + + if (job != null) + { + job.Framerate = framerate; + job.CompletionPercentage = percentComplete; + job.TranscodingPositionTicks = ticks; + job.BytesTranscoded = bytesTranscoded; + job.BitRate = bitRate; + } + + var deviceId = state.Request.DeviceId; + + if (!string.IsNullOrWhiteSpace(deviceId)) + { + var audioCodec = state.ActualOutputAudioCodec; + var videoCodec = state.ActualOutputVideoCodec; + + _sessionManager.ReportTranscodingInfo(deviceId, new TranscodingInfo + { + Bitrate = bitRate ?? state.TotalOutputBitrate, + AudioCodec = audioCodec, + VideoCodec = videoCodec, + Container = state.OutputContainer, + Framerate = framerate, + CompletionPercentage = percentComplete, + Width = state.OutputWidth, + Height = state.OutputHeight, + AudioChannels = state.OutputAudioChannels, + IsAudioDirect = EncodingHelper.IsCopyCodec(state.OutputAudioCodec), + IsVideoDirect = EncodingHelper.IsCopyCodec(state.OutputVideoCodec), + TranscodeReasons = state.TranscodeReasons + }); + } + } + + /// + /// Starts the FFMPEG. + /// + /// The state. + /// The output path. + /// The command line arguments for ffmpeg. + /// The . + /// The . + /// The cancellation token source. + /// The working directory. + /// Task. + public async Task StartFfMpeg( + StreamState state, + string outputPath, + string commandLineArguments, + HttpRequest request, + TranscodingJobType transcodingJobType, + CancellationTokenSource cancellationTokenSource, + string? workingDirectory = null) + { + Directory.CreateDirectory(Path.GetDirectoryName(outputPath)); + + await AcquireResources(state, cancellationTokenSource).ConfigureAwait(false); + + if (state.VideoRequest != null && !EncodingHelper.IsCopyCodec(state.OutputVideoCodec)) + { + var auth = _authorizationContext.GetAuthorizationInfo(request); + if (auth.User != null && !auth.User.HasPermission(PermissionKind.EnableVideoPlaybackTranscoding)) + { + this.OnTranscodeFailedToStart(outputPath, transcodingJobType, state); + + throw new ArgumentException("User does not have access to video transcoding"); + } + } + + var process = new Process() + { + StartInfo = new ProcessStartInfo() + { + WindowStyle = ProcessWindowStyle.Hidden, + CreateNoWindow = true, + UseShellExecute = false, + + // Must consume both stdout and stderr or deadlocks may occur + // RedirectStandardOutput = true, + RedirectStandardError = true, + RedirectStandardInput = true, + FileName = _mediaEncoder.EncoderPath, + Arguments = commandLineArguments, + WorkingDirectory = string.IsNullOrWhiteSpace(workingDirectory) ? null : workingDirectory, + ErrorDialog = false + }, + EnableRaisingEvents = true + }; + + var transcodingJob = this.OnTranscodeBeginning( + outputPath, + state.Request.PlaySessionId, + state.MediaSource.LiveStreamId, + Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture), + transcodingJobType, + process, + state.Request.DeviceId, + state, + cancellationTokenSource); + + var commandLineLogMessage = process.StartInfo.FileName + " " + process.StartInfo.Arguments; + _logger.LogInformation(commandLineLogMessage); + + var logFilePrefix = "ffmpeg-transcode"; + if (state.VideoRequest != null + && EncodingHelper.IsCopyCodec(state.OutputVideoCodec)) + { + logFilePrefix = EncodingHelper.IsCopyCodec(state.OutputAudioCodec) + ? "ffmpeg-remux" + : "ffmpeg-directstream"; + } + + var logFilePath = Path.Combine(_serverConfigurationManager.ApplicationPaths.LogDirectoryPath, logFilePrefix + "-" + Guid.NewGuid() + ".txt"); + + // FFMpeg writes debug/error info to stderr. This is useful when debugging so let's put it in the log directory. + Stream logStream = new FileStream(logFilePath, FileMode.Create, FileAccess.Write, FileShare.Read, IODefaults.FileStreamBufferSize, true); + + var commandLineLogMessageBytes = Encoding.UTF8.GetBytes(request.Path + Environment.NewLine + Environment.NewLine + JsonSerializer.Serialize(state.MediaSource) + Environment.NewLine + Environment.NewLine + commandLineLogMessage + Environment.NewLine + Environment.NewLine); + await logStream.WriteAsync(commandLineLogMessageBytes, 0, commandLineLogMessageBytes.Length, cancellationTokenSource.Token).ConfigureAwait(false); + + process.Exited += (sender, args) => OnFfMpegProcessExited(process, transcodingJob, state); + + try + { + process.Start(); + } + catch (Exception ex) + { + _logger.LogError(ex, "Error starting ffmpeg"); + + this.OnTranscodeFailedToStart(outputPath, transcodingJobType, state); + + throw; + } + + _logger.LogDebug("Launched ffmpeg process"); + state.TranscodingJob = transcodingJob; + + // Important - don't await the log task or we won't be able to kill ffmpeg when the user stops playback + _ = new JobLogger(_logger).StartStreamingLog(state, process.StandardError.BaseStream, logStream); + + // Wait for the file to exist before proceeeding + var ffmpegTargetFile = state.WaitForPath ?? outputPath; + _logger.LogDebug("Waiting for the creation of {0}", ffmpegTargetFile); + while (!File.Exists(ffmpegTargetFile) && !transcodingJob.HasExited) + { + await Task.Delay(100, cancellationTokenSource.Token).ConfigureAwait(false); + } + + _logger.LogDebug("File {0} created or transcoding has finished", ffmpegTargetFile); + + if (state.IsInputVideo && transcodingJob.Type == TranscodingJobType.Progressive && !transcodingJob.HasExited) + { + await Task.Delay(1000, cancellationTokenSource.Token).ConfigureAwait(false); + + if (state.ReadInputAtNativeFramerate && !transcodingJob.HasExited) + { + await Task.Delay(1500, cancellationTokenSource.Token).ConfigureAwait(false); + } + } + + if (!transcodingJob.HasExited) + { + StartThrottler(state, transcodingJob); + } + + _logger.LogDebug("StartFfMpeg() finished successfully"); + + return transcodingJob; + } + + private void StartThrottler(StreamState state, TranscodingJobDto transcodingJob) + { + if (EnableThrottling(state)) + { + transcodingJob.TranscodingThrottler = state.TranscodingThrottler = new TranscodingThrottler(transcodingJob, new Logger(new LoggerFactory()), _serverConfigurationManager, _fileSystem); + state.TranscodingThrottler.Start(); + } + } + + private bool EnableThrottling(StreamState state) + { + var encodingOptions = _serverConfigurationManager.GetEncodingOptions(); + + // enable throttling when NOT using hardware acceleration + if (string.IsNullOrEmpty(encodingOptions.HardwareAccelerationType)) + { + return state.InputProtocol == MediaProtocol.File && + state.RunTimeTicks.HasValue && + state.RunTimeTicks.Value >= TimeSpan.FromMinutes(5).Ticks && + state.IsInputVideo && + state.VideoType == VideoType.VideoFile && + !EncodingHelper.IsCopyCodec(state.OutputVideoCodec); + } + + return false; + } + + /// + /// Called when [transcode beginning]. + /// + /// The path. + /// The play session identifier. + /// The live stream identifier. + /// The transcoding job identifier. + /// The type. + /// The process. + /// The device id. + /// The state. + /// The cancellation token source. + /// TranscodingJob. + public TranscodingJobDto OnTranscodeBeginning( + string path, + string? playSessionId, + string? liveStreamId, + string transcodingJobId, + TranscodingJobType type, + Process process, + string? deviceId, + StreamState state, + CancellationTokenSource cancellationTokenSource) + { + lock (_activeTranscodingJobs) + { + var job = new TranscodingJobDto(_loggerFactory.CreateLogger()) + { + Type = type, + Path = path, + Process = process, + ActiveRequestCount = 1, + DeviceId = deviceId, + CancellationTokenSource = cancellationTokenSource, + Id = transcodingJobId, + PlaySessionId = playSessionId, + LiveStreamId = liveStreamId, + MediaSource = state.MediaSource + }; + + _activeTranscodingJobs.Add(job); + + ReportTranscodingProgress(job, state, null, null, null, null, null); + + return job; + } + } + + /// + /// + /// The progressive + /// + /// Called when [transcode failed to start]. + /// + /// The path. + /// The type. + /// The state. + public void OnTranscodeFailedToStart(string path, TranscodingJobType type, StreamState state) + { + lock (_activeTranscodingJobs) + { + var job = _activeTranscodingJobs.FirstOrDefault(j => j.Type == type && string.Equals(j.Path, path, StringComparison.OrdinalIgnoreCase)); + + if (job != null) + { + _activeTranscodingJobs.Remove(job); + } + } + + lock (_transcodingLocks) + { + _transcodingLocks.Remove(path); + } + + if (!string.IsNullOrWhiteSpace(state.Request.DeviceId)) + { + _sessionManager.ClearTranscodingInfo(state.Request.DeviceId); + } + } + + /// + /// Processes the exited. + /// + /// The process. + /// The job. + /// The state. + private void OnFfMpegProcessExited(Process process, TranscodingJobDto job, StreamState state) + { + if (job != null) + { + job.HasExited = true; + } + + _logger.LogDebug("Disposing stream resources"); + state.Dispose(); + + if (process.ExitCode == 0) + { + _logger.LogInformation("FFMpeg exited with code 0"); + } + else + { + _logger.LogError("FFMpeg exited with code {0}", process.ExitCode); + } + + process.Dispose(); + } + + private async Task AcquireResources(StreamState state, CancellationTokenSource cancellationTokenSource) + { + if (state.VideoType == VideoType.Iso && state.IsoType.HasValue && _isoManager.CanMount(state.MediaPath)) + { + state.IsoMount = await _isoManager.Mount(state.MediaPath, cancellationTokenSource.Token).ConfigureAwait(false); + } + + if (state.MediaSource.RequiresOpening && string.IsNullOrWhiteSpace(state.Request.LiveStreamId)) + { + var liveStreamResponse = await _mediaSourceManager.OpenLiveStream( + new LiveStreamRequest { OpenToken = state.MediaSource.OpenToken }, + cancellationTokenSource.Token) + .ConfigureAwait(false); + + _encodingHelper.AttachMediaSourceInfo(state, liveStreamResponse.MediaSource, state.RequestedUrl); + + if (state.VideoRequest != null) + { + _encodingHelper.TryStreamCopy(state); + } + } + + if (state.MediaSource.BufferMs.HasValue) + { + await Task.Delay(state.MediaSource.BufferMs.Value, cancellationTokenSource.Token).ConfigureAwait(false); + } + } + + /// + /// Called when [transcode begin request]. + /// + /// The path. + /// The type. + /// The . + public TranscodingJobDto? OnTranscodeBeginRequest(string path, TranscodingJobType type) + { + lock (_activeTranscodingJobs) + { + var job = _activeTranscodingJobs.FirstOrDefault(j => j.Type == type && string.Equals(j.Path, path, StringComparison.OrdinalIgnoreCase)); + + if (job == null) + { + return null; + } + + OnTranscodeBeginRequest(job); + + return job; + } + } + + private void OnTranscodeBeginRequest(TranscodingJobDto job) + { + job.ActiveRequestCount++; + + if (string.IsNullOrWhiteSpace(job.PlaySessionId) || job.Type == TranscodingJobType.Progressive) + { + job.StopKillTimer(); + } + } + + /// + /// Gets the transcoding lock. + /// + /// The output path of the transcoded file. + /// A . + public SemaphoreSlim GetTranscodingLock(string outputPath) + { + lock (_transcodingLocks) + { + if (!_transcodingLocks.TryGetValue(outputPath, out SemaphoreSlim result)) + { + result = new SemaphoreSlim(1, 1); + _transcodingLocks[outputPath] = result; + } + + return result; + } + } + + /// + /// Deletes the encoded media cache. + /// + private void DeleteEncodedMediaCache() + { + var path = _serverConfigurationManager.GetTranscodePath(); + if (!Directory.Exists(path)) + { + return; + } + + foreach (var file in _fileSystem.GetFilePaths(path, true)) + { + _fileSystem.DeleteFile(file); + } + } } } diff --git a/Jellyfin.Api/Models/StreamingDtos/StreamState.cs b/Jellyfin.Api/Models/StreamingDtos/StreamState.cs new file mode 100644 index 0000000000..e95f2d1f43 --- /dev/null +++ b/Jellyfin.Api/Models/StreamingDtos/StreamState.cs @@ -0,0 +1,201 @@ +using System; +using Jellyfin.Api.Helpers; +using Jellyfin.Api.Models.PlaybackDtos; +using MediaBrowser.Controller.Library; +using MediaBrowser.Controller.MediaEncoding; +using MediaBrowser.Model.Dlna; + +namespace Jellyfin.Api.Models.StreamingDtos +{ + /// + /// The stream state dto. + /// + public class StreamState : EncodingJobInfo, IDisposable + { + private readonly IMediaSourceManager _mediaSourceManager; + private readonly TranscodingJobHelper _transcodingJobHelper; + private bool _disposed; + + /// + /// Initializes a new instance of the class. + /// + /// Instance of the interface. + /// The . + /// The singleton. + public StreamState(IMediaSourceManager mediaSourceManager, TranscodingJobType transcodingType, TranscodingJobHelper transcodingJobHelper) + : base(transcodingType) + { + _mediaSourceManager = mediaSourceManager; + _transcodingJobHelper = transcodingJobHelper; + } + + /// + /// Gets or sets the requested url. + /// + public string? RequestedUrl { get; set; } + + /// + /// Gets or sets the request. + /// + public StreamingRequestDto Request + { + get => (StreamingRequestDto)BaseRequest; + set + { + BaseRequest = value; + IsVideoRequest = VideoRequest != null; + } + } + + /// + /// Gets or sets the transcoding throttler. + /// + public TranscodingThrottler? TranscodingThrottler { get; set; } + + /// + /// Gets the video request. + /// + public VideoRequestDto? VideoRequest => Request! as VideoRequestDto; + + /// + /// Gets or sets the direct stream provicer. + /// + public IDirectStreamProvider? DirectStreamProvider { get; set; } + + /// + /// Gets or sets the path to wait for. + /// + public string? WaitForPath { get; set; } + + /// + /// Gets a value indicating whether the request outputs video. + /// + public bool IsOutputVideo => Request is VideoRequestDto; + + /// + /// Gets the segment length. + /// + public int SegmentLength + { + get + { + if (Request.SegmentLength.HasValue) + { + return Request.SegmentLength.Value; + } + + if (EncodingHelper.IsCopyCodec(OutputVideoCodec)) + { + var userAgent = UserAgent ?? string.Empty; + + if (userAgent.IndexOf("AppleTV", StringComparison.OrdinalIgnoreCase) != -1 + || userAgent.IndexOf("cfnetwork", StringComparison.OrdinalIgnoreCase) != -1 + || userAgent.IndexOf("ipad", StringComparison.OrdinalIgnoreCase) != -1 + || userAgent.IndexOf("iphone", StringComparison.OrdinalIgnoreCase) != -1 + || userAgent.IndexOf("ipod", StringComparison.OrdinalIgnoreCase) != -1) + { + return 6; + } + + if (IsSegmentedLiveStream) + { + return 3; + } + + return 6; + } + + return 3; + } + } + + /// + /// Gets the minimum number of segments. + /// + public int MinSegments + { + get + { + if (Request.MinSegments.HasValue) + { + return Request.MinSegments.Value; + } + + return SegmentLength >= 10 ? 2 : 3; + } + } + + /// + /// Gets or sets the user agent. + /// + public string? UserAgent { get; set; } + + /// + /// Gets or sets a value indicating whether to estimate the content length. + /// + public bool EstimateContentLength { get; set; } + + /// + /// Gets or sets the transcode seek info. + /// + public TranscodeSeekInfo TranscodeSeekInfo { get; set; } + + /// + /// Gets or sets a value indicating whether to enable dlna headers. + /// + public bool EnableDlnaHeaders { get; set; } + + /// + /// Gets or sets the device profile. + /// + public DeviceProfile? DeviceProfile { get; set; } + + /// + /// Gets or sets the transcoding job. + /// + public TranscodingJobDto? TranscodingJob { get; set; } + + /// + public void Dispose() + { + Dispose(true); + GC.SuppressFinalize(this); + } + + /// + public override void ReportTranscodingProgress(TimeSpan? transcodingPosition, float? framerate, double? percentComplete, long? bytesTranscoded, int? bitRate) + { + _transcodingJobHelper.ReportTranscodingProgress(TranscodingJob!, this, transcodingPosition, framerate, percentComplete, bytesTranscoded, bitRate); + } + + /// + /// Disposes the stream state. + /// + /// Whether the object is currently beeing disposed. + protected virtual void Dispose(bool disposing) + { + if (_disposed) + { + return; + } + + if (disposing) + { + // REVIEW: Is this the right place for this? + if (MediaSource.RequiresClosing + && string.IsNullOrWhiteSpace(Request.LiveStreamId) + && !string.IsNullOrWhiteSpace(MediaSource.LiveStreamId)) + { + _mediaSourceManager.CloseLiveStream(MediaSource.LiveStreamId).GetAwaiter().GetResult(); + } + + TranscodingThrottler?.Dispose(); + } + + TranscodingThrottler = null; + TranscodingJob = null; + + _disposed = true; + } + } +} diff --git a/Jellyfin.Api/Models/StreamingDtos/StreamingRequestDto.cs b/Jellyfin.Api/Models/StreamingDtos/StreamingRequestDto.cs new file mode 100644 index 0000000000..1791b03706 --- /dev/null +++ b/Jellyfin.Api/Models/StreamingDtos/StreamingRequestDto.cs @@ -0,0 +1,45 @@ +using MediaBrowser.Controller.MediaEncoding; + +namespace Jellyfin.Api.Models.StreamingDtos +{ + /// + /// The audio streaming request dto. + /// + public class StreamingRequestDto : BaseEncodingJobOptions + { + /// + /// Gets or sets the device profile. + /// + public string? DeviceProfileId { get; set; } + + /// + /// Gets or sets the params. + /// + public string? Params { get; set; } + + /// + /// Gets or sets the play session id. + /// + public string? PlaySessionId { get; set; } + + /// + /// Gets or sets the tag. + /// + public string? Tag { get; set; } + + /// + /// Gets or sets the segment container. + /// + public string? SegmentContainer { get; set; } + + /// + /// Gets or sets the segment length. + /// + public int? SegmentLength { get; set; } + + /// + /// Gets or sets the min segments. + /// + public int? MinSegments { get; set; } + } +} diff --git a/Jellyfin.Api/Models/StreamingDtos/VideoRequestDto.cs b/Jellyfin.Api/Models/StreamingDtos/VideoRequestDto.cs new file mode 100644 index 0000000000..cce2a89d49 --- /dev/null +++ b/Jellyfin.Api/Models/StreamingDtos/VideoRequestDto.cs @@ -0,0 +1,19 @@ +namespace Jellyfin.Api.Models.StreamingDtos +{ + /// + /// The video request dto. + /// + public class VideoRequestDto : StreamingRequestDto + { + /// + /// Gets a value indicating whether this instance has fixed resolution. + /// + /// true if this instance has fixed resolution; otherwise, false. + public bool HasFixedResolution => Width.HasValue || Height.HasValue; + + /// + /// Gets or sets a value indicating whether to enable subtitles in the manifest. + /// + public bool EnableSubtitlesInManifest { get; set; } + } +} diff --git a/MediaBrowser.Api/Playback/Progressive/AudioService.cs b/MediaBrowser.Api/Playback/Progressive/AudioService.cs index d51787df27..14d402d303 100644 --- a/MediaBrowser.Api/Playback/Progressive/AudioService.cs +++ b/MediaBrowser.Api/Playback/Progressive/AudioService.cs @@ -17,10 +17,6 @@ namespace MediaBrowser.Api.Playback.Progressive /// /// Class GetAudioStream. /// - [Route("/Audio/{Id}/stream.{Container}", "GET", Summary = "Gets an audio stream")] - [Route("/Audio/{Id}/stream", "GET", Summary = "Gets an audio stream")] - [Route("/Audio/{Id}/stream.{Container}", "HEAD", Summary = "Gets an audio stream")] - [Route("/Audio/{Id}/stream", "HEAD", Summary = "Gets an audio stream")] public class GetAudioStream : StreamRequest { } diff --git a/MediaBrowser.Controller/MediaEncoding/EncodingHelper.cs b/MediaBrowser.Controller/MediaEncoding/EncodingHelper.cs index ffbda42c31..d534e5a595 100644 --- a/MediaBrowser.Controller/MediaEncoding/EncodingHelper.cs +++ b/MediaBrowser.Controller/MediaEncoding/EncodingHelper.cs @@ -1323,6 +1323,17 @@ namespace MediaBrowser.Controller.MediaEncoding return null; } + public int? GetAudioBitrateParam(int? audioBitRate, MediaStream audioStream) + { + if (audioBitRate.HasValue) + { + // Don't encode any higher than this + return Math.Min(384000, audioBitRate.Value); + } + + return null; + } + public string GetAudioFilterParam(EncodingJobInfo state, EncodingOptions encodingOptions, bool isHls) { var channels = state.OutputAudioChannels;