|
|
|
|
using System;
|
|
|
|
|
using System.Collections.Generic;
|
|
|
|
|
using System.Linq;
|
|
|
|
|
using System.Net;
|
|
|
|
|
using FluentValidation.Results;
|
|
|
|
|
using NLog;
|
|
|
|
|
using NzbDrone.Common.Extensions;
|
|
|
|
|
using NzbDrone.Common.Http;
|
|
|
|
|
using NzbDrone.Common.TPL;
|
|
|
|
|
using NzbDrone.Core.Configuration;
|
|
|
|
|
using NzbDrone.Core.Indexers.Exceptions;
|
|
|
|
|
using NzbDrone.Core.IndexerSearch.Definitions;
|
|
|
|
|
using NzbDrone.Core.Parser;
|
|
|
|
|
using NzbDrone.Core.Parser.Model;
|
|
|
|
|
using NzbDrone.Core.ThingiProvider;
|
|
|
|
|
|
|
|
|
|
namespace NzbDrone.Core.Indexers
|
|
|
|
|
{
|
|
|
|
|
public abstract class HttpIndexerBase<TSettings> : IndexerBase<TSettings>
|
|
|
|
|
where TSettings : IProviderConfig, new()
|
|
|
|
|
{
|
|
|
|
|
protected const int MaxNumResultsPerQuery = 1000;
|
|
|
|
|
|
|
|
|
|
private readonly IHttpClient _httpClient;
|
|
|
|
|
|
|
|
|
|
public override bool SupportsRss { get { return true; } }
|
|
|
|
|
public override bool SupportsSearch { get { return true; } }
|
|
|
|
|
public bool SupportsPaging { get { return PageSize > 0; } }
|
|
|
|
|
|
|
|
|
|
public virtual int PageSize { get { return 0; } }
|
|
|
|
|
public virtual TimeSpan RateLimit { get { return TimeSpan.FromSeconds(2); } }
|
|
|
|
|
|
|
|
|
|
public abstract IIndexerRequestGenerator GetRequestGenerator();
|
|
|
|
|
public abstract IParseIndexerResponse GetParser();
|
|
|
|
|
|
|
|
|
|
public HttpIndexerBase(IHttpClient httpClient, IIndexerStatusService indexerStatusService, IConfigService configService, IParsingService parsingService, Logger logger)
|
|
|
|
|
: base(indexerStatusService, configService, parsingService, logger)
|
|
|
|
|
{
|
|
|
|
|
_httpClient = httpClient;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public override IList<ReleaseInfo> FetchRecent()
|
|
|
|
|
{
|
|
|
|
|
if (!SupportsRss)
|
|
|
|
|
{
|
|
|
|
|
return new List<ReleaseInfo>();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
var generator = GetRequestGenerator();
|
|
|
|
|
|
|
|
|
|
return FetchReleases(generator.GetRecentRequests(), true);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public override IList<ReleaseInfo> Fetch(SingleEpisodeSearchCriteria searchCriteria)
|
|
|
|
|
{
|
|
|
|
|
if (!SupportsSearch)
|
|
|
|
|
{
|
|
|
|
|
return new List<ReleaseInfo>();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
var generator = GetRequestGenerator();
|
|
|
|
|
|
|
|
|
|
return FetchReleases(generator.GetSearchRequests(searchCriteria));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public override IList<ReleaseInfo> Fetch(SeasonSearchCriteria searchCriteria)
|
|
|
|
|
{
|
|
|
|
|
if (!SupportsSearch)
|
|
|
|
|
{
|
|
|
|
|
return new List<ReleaseInfo>();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
var generator = GetRequestGenerator();
|
|
|
|
|
|
|
|
|
|
return FetchReleases(generator.GetSearchRequests(searchCriteria));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public override IList<ReleaseInfo> Fetch(DailyEpisodeSearchCriteria searchCriteria)
|
|
|
|
|
{
|
|
|
|
|
if (!SupportsSearch)
|
|
|
|
|
{
|
|
|
|
|
return new List<ReleaseInfo>();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
var generator = GetRequestGenerator();
|
|
|
|
|
|
|
|
|
|
return FetchReleases(generator.GetSearchRequests(searchCriteria));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public override IList<ReleaseInfo> Fetch(AnimeEpisodeSearchCriteria searchCriteria)
|
|
|
|
|
{
|
|
|
|
|
if (!SupportsSearch)
|
|
|
|
|
{
|
|
|
|
|
return new List<ReleaseInfo>();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
var generator = GetRequestGenerator();
|
|
|
|
|
|
|
|
|
|
return FetchReleases(generator.GetSearchRequests(searchCriteria));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public override IList<ReleaseInfo> Fetch(SpecialEpisodeSearchCriteria searchCriteria)
|
|
|
|
|
{
|
|
|
|
|
if (!SupportsSearch)
|
|
|
|
|
{
|
|
|
|
|
return new List<ReleaseInfo>();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
var generator = GetRequestGenerator();
|
|
|
|
|
|
|
|
|
|
return FetchReleases(generator.GetSearchRequests(searchCriteria));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
protected virtual IList<ReleaseInfo> FetchReleases(IndexerPageableRequestChain pageableRequestChain, bool isRecent = false)
|
|
|
|
|
{
|
|
|
|
|
var releases = new List<ReleaseInfo>();
|
|
|
|
|
var url = string.Empty;
|
|
|
|
|
|
|
|
|
|
var parser = GetParser();
|
|
|
|
|
|
|
|
|
|
try
|
|
|
|
|
{
|
|
|
|
|
var fullyUpdated = false;
|
|
|
|
|
ReleaseInfo lastReleaseInfo = null;
|
|
|
|
|
if (isRecent)
|
|
|
|
|
{
|
|
|
|
|
lastReleaseInfo = _indexerStatusService.GetLastRssSyncReleaseInfo(Definition.Id);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for (int i = 0; i < pageableRequestChain.Tiers; i++)
|
|
|
|
|
{
|
|
|
|
|
var pageableRequests = pageableRequestChain.GetTier(i);
|
|
|
|
|
|
|
|
|
|
foreach (var pageableRequest in pageableRequests)
|
|
|
|
|
{
|
|
|
|
|
var pagedReleases = new List<ReleaseInfo>();
|
|
|
|
|
|
|
|
|
|
foreach (var request in pageableRequest)
|
|
|
|
|
{
|
|
|
|
|
url = request.Url.ToString();
|
|
|
|
|
|
|
|
|
|
var page = FetchPage(request, parser);
|
|
|
|
|
|
|
|
|
|
pagedReleases.AddRange(page);
|
|
|
|
|
|
|
|
|
|
if (isRecent && page.Any())
|
|
|
|
|
{
|
|
|
|
|
if (lastReleaseInfo == null)
|
|
|
|
|
{
|
|
|
|
|
fullyUpdated = true;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
var oldestReleaseDate = page.Select(v => v.PublishDate).Min();
|
|
|
|
|
if (oldestReleaseDate < lastReleaseInfo.PublishDate || page.Any(v => v.DownloadUrl == lastReleaseInfo.DownloadUrl))
|
|
|
|
|
{
|
|
|
|
|
fullyUpdated = true;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (pagedReleases.Count >= MaxNumResultsPerQuery &&
|
|
|
|
|
oldestReleaseDate < DateTime.UtcNow - TimeSpan.FromHours(24))
|
|
|
|
|
{
|
|
|
|
|
fullyUpdated = false;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else if (pagedReleases.Count >= MaxNumResultsPerQuery)
|
|
|
|
|
{
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (!IsFullPage(page))
|
|
|
|
|
{
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
releases.AddRange(pagedReleases);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (releases.Any())
|
|
|
|
|
{
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (isRecent && !releases.Empty())
|
|
|
|
|
{
|
|
|
|
|
var ordered = releases.OrderByDescending(v => v.PublishDate).ToList();
|
|
|
|
|
|
|
|
|
|
if (!fullyUpdated && lastReleaseInfo != null)
|
|
|
|
|
{
|
|
|
|
|
var gapStart = lastReleaseInfo.PublishDate;
|
|
|
|
|
var gapEnd = ordered.Last().PublishDate;
|
|
|
|
|
_logger.Warn("Indexer {0} rss sync didn't cover the period between {1} and {2} UTC. Search may be required.", Definition.Name, gapStart, gapEnd);
|
|
|
|
|
}
|
|
|
|
|
lastReleaseInfo = ordered.First();
|
|
|
|
|
_indexerStatusService.UpdateRssSyncStatus(Definition.Id, lastReleaseInfo);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
_indexerStatusService.RecordSuccess(Definition.Id);
|
|
|
|
|
}
|
|
|
|
|
catch (WebException webException)
|
|
|
|
|
{
|
|
|
|
|
_indexerStatusService.RecordFailure(Definition.Id);
|
|
|
|
|
if (webException.Message.Contains("502") || webException.Message.Contains("503") ||
|
|
|
|
|
webException.Message.Contains("timed out"))
|
|
|
|
|
{
|
|
|
|
|
_logger.Warn("{0} server is currently unavailable. {1} {2}", this, url, webException.Message);
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
_logger.Warn("{0} {1} {2}", this, url, webException.Message);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
catch (HttpException httpException)
|
|
|
|
|
{
|
|
|
|
|
if ((int)httpException.Response.StatusCode == 429)
|
|
|
|
|
{
|
|
|
|
|
_indexerStatusService.RecordFailure(Definition.Id, TimeSpan.FromHours(1));
|
|
|
|
|
_logger.Warn("API Request Limit reached for {0}", this);
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
_indexerStatusService.RecordFailure(Definition.Id);
|
|
|
|
|
_logger.Warn("{0} {1}", this, httpException.Message);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
catch (RequestLimitReachedException)
|
|
|
|
|
{
|
|
|
|
|
_indexerStatusService.RecordFailure(Definition.Id, TimeSpan.FromHours(1));
|
|
|
|
|
_logger.Warn("API Request Limit reached for {0}", this);
|
|
|
|
|
}
|
|
|
|
|
catch (ApiKeyException)
|
|
|
|
|
{
|
|
|
|
|
_indexerStatusService.RecordFailure(Definition.Id);
|
|
|
|
|
_logger.Warn("Invalid API Key for {0} {1}", this, url);
|
|
|
|
|
}
|
|
|
|
|
catch (IndexerException ex)
|
|
|
|
|
{
|
|
|
|
|
_indexerStatusService.RecordFailure(Definition.Id);
|
|
|
|
|
var message = string.Format("{0} - {1}", ex.Message, url);
|
|
|
|
|
_logger.WarnException(message, ex);
|
|
|
|
|
}
|
|
|
|
|
catch (Exception feedEx)
|
|
|
|
|
{
|
|
|
|
|
_indexerStatusService.RecordFailure(Definition.Id);
|
|
|
|
|
feedEx.Data.Add("FeedUrl", url);
|
|
|
|
|
_logger.ErrorException("An error occurred while processing feed. " + url, feedEx);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return CleanupReleases(releases);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
protected virtual bool IsFullPage(IList<ReleaseInfo> page)
|
|
|
|
|
{
|
|
|
|
|
return PageSize != 0 && page.Count >= PageSize;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
protected virtual IList<ReleaseInfo> FetchPage(IndexerRequest request, IParseIndexerResponse parser)
|
|
|
|
|
{
|
|
|
|
|
var response = FetchIndexerResponse(request);
|
|
|
|
|
|
|
|
|
|
return parser.ParseResponse(response).ToList();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
protected virtual IndexerResponse FetchIndexerResponse(IndexerRequest request)
|
|
|
|
|
{
|
|
|
|
|
_logger.Debug("Downloading Feed " + request.Url);
|
|
|
|
|
|
|
|
|
|
if (request.HttpRequest.RateLimit < RateLimit)
|
|
|
|
|
{
|
|
|
|
|
request.HttpRequest.RateLimit = RateLimit;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return new IndexerResponse(request, _httpClient.Execute(request.HttpRequest));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
protected override void Test(List<ValidationFailure> failures)
|
|
|
|
|
{
|
|
|
|
|
failures.AddIfNotNull(TestConnection());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
protected virtual ValidationFailure TestConnection()
|
|
|
|
|
{
|
|
|
|
|
try
|
|
|
|
|
{
|
|
|
|
|
var parser = GetParser();
|
|
|
|
|
var generator = GetRequestGenerator();
|
|
|
|
|
var releases = FetchPage(generator.GetRecentRequests().GetAllTiers().First().First(), parser);
|
|
|
|
|
|
|
|
|
|
if (releases.Empty())
|
|
|
|
|
{
|
|
|
|
|
return new ValidationFailure(string.Empty, "No results were returned from your indexer, please check your settings.");
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
catch (ApiKeyException)
|
|
|
|
|
{
|
|
|
|
|
_logger.Warn("Indexer returned result for RSS URL, API Key appears to be invalid");
|
|
|
|
|
|
|
|
|
|
return new ValidationFailure("ApiKey", "Invalid API Key");
|
|
|
|
|
}
|
|
|
|
|
catch (RequestLimitReachedException)
|
|
|
|
|
{
|
|
|
|
|
_logger.Warn("Request limit reached");
|
|
|
|
|
}
|
|
|
|
|
catch (UnsupportedFeedException ex)
|
|
|
|
|
{
|
|
|
|
|
_logger.WarnException("Indexer feed is not supported: " + ex.Message, ex);
|
|
|
|
|
|
|
|
|
|
return new ValidationFailure(string.Empty, "Indexer feed is not supported: " + ex.Message);
|
|
|
|
|
}
|
|
|
|
|
catch (Exception ex)
|
|
|
|
|
{
|
|
|
|
|
_logger.WarnException("Unable to connect to indexer: " + ex.Message, ex);
|
|
|
|
|
|
|
|
|
|
return new ValidationFailure(string.Empty, "Unable to connect to indexer, check the log for more details");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return null;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
}
|