using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; using System.Threading.Tasks; using Jellyfin.Api.Attributes; using Jellyfin.Api.Helpers; using Jellyfin.Api.Models.StreamingDtos; using MediaBrowser.Controller.MediaEncoding; using MediaBrowser.Model.Dlna; using Microsoft.AspNetCore.Http; using Microsoft.AspNetCore.Mvc; namespace Jellyfin.Api.Controllers; /// /// The audio controller. /// public class AudioController : BaseJellyfinApiController { private readonly AudioHelper _audioHelper; private readonly TranscodingJobType _transcodingJobType = TranscodingJobType.Progressive; /// /// Initializes a new instance of the class. /// /// Instance of . public AudioController(AudioHelper audioHelper) { _audioHelper = audioHelper; } /// /// Gets an audio stream. /// /// The item id. /// The audio container. /// Optional. If true, the original file will be streamed statically without any encoding. Use either no url extension or the original file extension. true/false. /// The streaming parameters. /// The tag. /// Optional. The dlna device profile id to utilize. /// The play session id. /// The segment container. /// The segment length. /// The minimum number of segments. /// The media version id, if playing an alternate version. /// The device id of the client requesting. Used to stop encoding processes when needed. /// Optional. Specify a audio codec to encode to, e.g. mp3. If omitted the server will auto-select using the url's extension. Options: aac, mp3, vorbis, wma. /// Whether or not to allow automatic stream copy if requested values match the original source. Defaults to true. /// Whether or not to allow copying of the video stream url. /// Whether or not to allow copying of the audio stream url. /// Optional. Whether to break on non key frames. /// Optional. Specify a specific audio sample rate, e.g. 44100. /// Optional. The maximum audio bit depth. /// Optional. Specify an audio bitrate to encode to, e.g. 128000. If omitted this will be left to encoder defaults. /// Optional. Specify a specific number of audio channels to encode to, e.g. 2. /// Optional. Specify a maximum number of audio channels to encode to, e.g. 2. /// Optional. Specify a specific an encoder profile (varies by encoder), e.g. main, baseline, high. /// Optional. Specify a level for the encoder profile (varies by encoder), e.g. 3, 3.1. /// Optional. A specific video framerate to encode to, e.g. 23.976. Generally this should be omitted unless the device has specific requirements. /// Optional. A specific maximum video framerate to encode to, e.g. 23.976. Generally this should be omitted unless the device has specific requirements. /// Whether or not to copy timestamps when transcoding with an offset. Defaults to false. /// Optional. Specify a starting offset, in ticks. 1 tick = 10000 ms. /// Optional. The fixed horizontal resolution of the encoded video. /// Optional. The fixed vertical resolution of the encoded video. /// Optional. Specify a video bitrate to encode to, e.g. 500000. If omitted this will be left to encoder defaults. /// Optional. The index of the subtitle stream to use. If omitted no subtitles will be used. /// Optional. Specify the subtitle delivery method. /// Optional. /// Optional. The maximum video bit depth. /// Optional. Whether to require avc. /// Optional. Whether to deinterlace the video. /// Optional. Whether to require a non anamorphic stream. /// Optional. The maximum number of audio channels to transcode. /// Optional. The limit of how many cpu cores to use. /// The live stream id. /// Optional. Whether to enable the MpegtsM2Ts mode. /// Optional. Specify a video codec to encode to, e.g. h264. If omitted the server will auto-select using the url's extension. Options: h265, h264, mpeg4, theora, vp8, vp9, vpx (deprecated), wmv. /// Optional. Specify a subtitle codec to encode to. /// Optional. The transcoding reason. /// Optional. The index of the audio stream to use. If omitted the first audio stream will be used. /// Optional. The index of the video stream to use. If omitted the first video stream will be used. /// Optional. The . /// Optional. The streaming options. /// Audio stream returned. /// A containing the audio file. [HttpGet("{itemId}/stream", Name = "GetAudioStream")] [HttpHead("{itemId}/stream", Name = "HeadAudioStream")] [ProducesResponseType(StatusCodes.Status200OK)] [ProducesAudioFile] public async Task GetAudioStream( [FromRoute, Required] Guid itemId, [FromQuery] string? container, [FromQuery] bool? @static, [FromQuery] string? @params, [FromQuery] string? tag, [FromQuery, ParameterObsolete] string? deviceProfileId, [FromQuery] string? playSessionId, [FromQuery] string? segmentContainer, [FromQuery] int? segmentLength, [FromQuery] int? minSegments, [FromQuery] string? mediaSourceId, [FromQuery] string? deviceId, [FromQuery] string? audioCodec, [FromQuery] bool? enableAutoStreamCopy, [FromQuery] bool? allowVideoStreamCopy, [FromQuery] bool? allowAudioStreamCopy, [FromQuery] bool? breakOnNonKeyFrames, [FromQuery] int? audioSampleRate, [FromQuery] int? maxAudioBitDepth, [FromQuery] int? audioBitRate, [FromQuery] int? audioChannels, [FromQuery] int? maxAudioChannels, [FromQuery] string? profile, [FromQuery] string? level, [FromQuery] float? framerate, [FromQuery] float? maxFramerate, [FromQuery] bool? copyTimestamps, [FromQuery] long? startTimeTicks, [FromQuery] int? width, [FromQuery] int? height, [FromQuery] int? videoBitRate, [FromQuery] int? subtitleStreamIndex, [FromQuery] SubtitleDeliveryMethod? subtitleMethod, [FromQuery] int? maxRefFrames, [FromQuery] int? maxVideoBitDepth, [FromQuery] bool? requireAvc, [FromQuery] bool? deInterlace, [FromQuery] bool? requireNonAnamorphic, [FromQuery] int? transcodingMaxAudioChannels, [FromQuery] int? cpuCoreLimit, [FromQuery] string? liveStreamId, [FromQuery] bool? enableMpegtsM2TsMode, [FromQuery] string? videoCodec, [FromQuery] string? subtitleCodec, [FromQuery] string? transcodeReasons, [FromQuery] int? audioStreamIndex, [FromQuery] int? videoStreamIndex, [FromQuery] EncodingContext? context, [FromQuery] Dictionary? streamOptions) { StreamingRequestDto streamingRequest = new StreamingRequestDto { Id = itemId, Container = container, Static = @static ?? false, Params = @params, Tag = tag, PlaySessionId = playSessionId, SegmentContainer = segmentContainer, SegmentLength = segmentLength, MinSegments = minSegments, MediaSourceId = mediaSourceId, DeviceId = deviceId, AudioCodec = audioCodec, EnableAutoStreamCopy = enableAutoStreamCopy ?? true, AllowAudioStreamCopy = allowAudioStreamCopy ?? true, AllowVideoStreamCopy = allowVideoStreamCopy ?? true, BreakOnNonKeyFrames = breakOnNonKeyFrames ?? false, AudioSampleRate = audioSampleRate, MaxAudioChannels = maxAudioChannels, AudioBitRate = audioBitRate, MaxAudioBitDepth = maxAudioBitDepth, AudioChannels = audioChannels, Profile = profile, Level = level, Framerate = framerate, MaxFramerate = maxFramerate, CopyTimestamps = copyTimestamps ?? false, StartTimeTicks = startTimeTicks, Width = width, Height = height, VideoBitRate = videoBitRate, SubtitleStreamIndex = subtitleStreamIndex, SubtitleMethod = subtitleMethod ?? SubtitleDeliveryMethod.Encode, MaxRefFrames = maxRefFrames, MaxVideoBitDepth = maxVideoBitDepth, RequireAvc = requireAvc ?? false, DeInterlace = deInterlace ?? false, RequireNonAnamorphic = requireNonAnamorphic ?? false, TranscodingMaxAudioChannels = transcodingMaxAudioChannels, CpuCoreLimit = cpuCoreLimit, LiveStreamId = liveStreamId, EnableMpegtsM2TsMode = enableMpegtsM2TsMode ?? false, VideoCodec = videoCodec, SubtitleCodec = subtitleCodec, TranscodeReasons = transcodeReasons, AudioStreamIndex = audioStreamIndex, VideoStreamIndex = videoStreamIndex, Context = context ?? EncodingContext.Static, StreamOptions = streamOptions }; return await _audioHelper.GetAudioStream(_transcodingJobType, streamingRequest).ConfigureAwait(false); } /// /// Gets an audio stream. /// /// The item id. /// The audio container. /// Optional. If true, the original file will be streamed statically without any encoding. Use either no url extension or the original file extension. true/false. /// The streaming parameters. /// The tag. /// Optional. The dlna device profile id to utilize. /// The play session id. /// The segment container. /// The segment length. /// The minimum number of segments. /// The media version id, if playing an alternate version. /// The device id of the client requesting. Used to stop encoding processes when needed. /// Optional. Specify a audio codec to encode to, e.g. mp3. If omitted the server will auto-select using the url's extension. Options: aac, mp3, vorbis, wma. /// Whether or not to allow automatic stream copy if requested values match the original source. Defaults to true. /// Whether or not to allow copying of the video stream url. /// Whether or not to allow copying of the audio stream url. /// Optional. Whether to break on non key frames. /// Optional. Specify a specific audio sample rate, e.g. 44100. /// Optional. The maximum audio bit depth. /// Optional. Specify an audio bitrate to encode to, e.g. 128000. If omitted this will be left to encoder defaults. /// Optional. Specify a specific number of audio channels to encode to, e.g. 2. /// Optional. Specify a maximum number of audio channels to encode to, e.g. 2. /// Optional. Specify a specific an encoder profile (varies by encoder), e.g. main, baseline, high. /// Optional. Specify a level for the encoder profile (varies by encoder), e.g. 3, 3.1. /// Optional. A specific video framerate to encode to, e.g. 23.976. Generally this should be omitted unless the device has specific requirements. /// Optional. A specific maximum video framerate to encode to, e.g. 23.976. Generally this should be omitted unless the device has specific requirements. /// Whether or not to copy timestamps when transcoding with an offset. Defaults to false. /// Optional. Specify a starting offset, in ticks. 1 tick = 10000 ms. /// Optional. The fixed horizontal resolution of the encoded video. /// Optional. The fixed vertical resolution of the encoded video. /// Optional. Specify a video bitrate to encode to, e.g. 500000. If omitted this will be left to encoder defaults. /// Optional. The index of the subtitle stream to use. If omitted no subtitles will be used. /// Optional. Specify the subtitle delivery method. /// Optional. /// Optional. The maximum video bit depth. /// Optional. Whether to require avc. /// Optional. Whether to deinterlace the video. /// Optional. Whether to require a non anamporphic stream. /// Optional. The maximum number of audio channels to transcode. /// Optional. The limit of how many cpu cores to use. /// The live stream id. /// Optional. Whether to enable the MpegtsM2Ts mode. /// Optional. Specify a video codec to encode to, e.g. h264. If omitted the server will auto-select using the url's extension. Options: h265, h264, mpeg4, theora, vp8, vp9, vpx (deprecated), wmv. /// Optional. Specify a subtitle codec to encode to. /// Optional. The transcoding reason. /// Optional. The index of the audio stream to use. If omitted the first audio stream will be used. /// Optional. The index of the video stream to use. If omitted the first video stream will be used. /// Optional. The . /// Optional. The streaming options. /// Audio stream returned. /// A containing the audio file. [HttpGet("{itemId}/stream.{container}", Name = "GetAudioStreamByContainer")] [HttpHead("{itemId}/stream.{container}", Name = "HeadAudioStreamByContainer")] [ProducesResponseType(StatusCodes.Status200OK)] [ProducesAudioFile] public async Task GetAudioStreamByContainer( [FromRoute, Required] Guid itemId, [FromRoute, Required] string container, [FromQuery] bool? @static, [FromQuery] string? @params, [FromQuery] string? tag, [FromQuery, ParameterObsolete] string? deviceProfileId, [FromQuery] string? playSessionId, [FromQuery] string? segmentContainer, [FromQuery] int? segmentLength, [FromQuery] int? minSegments, [FromQuery] string? mediaSourceId, [FromQuery] string? deviceId, [FromQuery] string? audioCodec, [FromQuery] bool? enableAutoStreamCopy, [FromQuery] bool? allowVideoStreamCopy, [FromQuery] bool? allowAudioStreamCopy, [FromQuery] bool? breakOnNonKeyFrames, [FromQuery] int? audioSampleRate, [FromQuery] int? maxAudioBitDepth, [FromQuery] int? audioBitRate, [FromQuery] int? audioChannels, [FromQuery] int? maxAudioChannels, [FromQuery] string? profile, [FromQuery] string? level, [FromQuery] float? framerate, [FromQuery] float? maxFramerate, [FromQuery] bool? copyTimestamps, [FromQuery] long? startTimeTicks, [FromQuery] int? width, [FromQuery] int? height, [FromQuery] int? videoBitRate, [FromQuery] int? subtitleStreamIndex, [FromQuery] SubtitleDeliveryMethod? subtitleMethod, [FromQuery] int? maxRefFrames, [FromQuery] int? maxVideoBitDepth, [FromQuery] bool? requireAvc, [FromQuery] bool? deInterlace, [FromQuery] bool? requireNonAnamorphic, [FromQuery] int? transcodingMaxAudioChannels, [FromQuery] int? cpuCoreLimit, [FromQuery] string? liveStreamId, [FromQuery] bool? enableMpegtsM2TsMode, [FromQuery] string? videoCodec, [FromQuery] string? subtitleCodec, [FromQuery] string? transcodeReasons, [FromQuery] int? audioStreamIndex, [FromQuery] int? videoStreamIndex, [FromQuery] EncodingContext? context, [FromQuery] Dictionary? streamOptions) { StreamingRequestDto streamingRequest = new StreamingRequestDto { Id = itemId, Container = container, Static = @static ?? false, Params = @params, Tag = tag, PlaySessionId = playSessionId, SegmentContainer = segmentContainer, SegmentLength = segmentLength, MinSegments = minSegments, MediaSourceId = mediaSourceId, DeviceId = deviceId, AudioCodec = audioCodec, EnableAutoStreamCopy = enableAutoStreamCopy ?? true, AllowAudioStreamCopy = allowAudioStreamCopy ?? true, AllowVideoStreamCopy = allowVideoStreamCopy ?? true, BreakOnNonKeyFrames = breakOnNonKeyFrames ?? false, AudioSampleRate = audioSampleRate, MaxAudioChannels = maxAudioChannels, AudioBitRate = audioBitRate, MaxAudioBitDepth = maxAudioBitDepth, AudioChannels = audioChannels, Profile = profile, Level = level, Framerate = framerate, MaxFramerate = maxFramerate, CopyTimestamps = copyTimestamps ?? false, StartTimeTicks = startTimeTicks, Width = width, Height = height, VideoBitRate = videoBitRate, SubtitleStreamIndex = subtitleStreamIndex, SubtitleMethod = subtitleMethod ?? SubtitleDeliveryMethod.Encode, MaxRefFrames = maxRefFrames, MaxVideoBitDepth = maxVideoBitDepth, RequireAvc = requireAvc ?? false, DeInterlace = deInterlace ?? false, RequireNonAnamorphic = requireNonAnamorphic ?? false, TranscodingMaxAudioChannels = transcodingMaxAudioChannels, CpuCoreLimit = cpuCoreLimit, LiveStreamId = liveStreamId, EnableMpegtsM2TsMode = enableMpegtsM2TsMode ?? false, VideoCodec = videoCodec, SubtitleCodec = subtitleCodec, TranscodeReasons = transcodeReasons, AudioStreamIndex = audioStreamIndex, VideoStreamIndex = videoStreamIndex, Context = context ?? EncodingContext.Static, StreamOptions = streamOptions }; return await _audioHelper.GetAudioStream(_transcodingJobType, streamingRequest).ConfigureAwait(false); } }