New: (Cardigann) Paging Support

pull/1479/head
Qstick 2 years ago
parent 712d95e6ce
commit b42bf2cf20

@ -29,7 +29,7 @@ namespace NzbDrone.Core.IndexerVersions
/* Update Service will fall back if version # does not exist for an indexer per Ta */
private const string DEFINITION_BRANCH = "master";
private const int DEFINITION_VERSION = 8;
private const int DEFINITION_VERSION = 9;
// Used when moving yml to C#
private readonly List<string> _definitionBlocklist = new ()

@ -28,10 +28,6 @@ namespace NzbDrone.Core.Indexers.Cardigann
public override DownloadProtocol Protocol => DownloadProtocol.Torrent;
public override IndexerPrivacy Privacy => IndexerPrivacy.Private;
// Page size is different per indexer, setting to 1 ensures we don't break out of paging logic
// thinking its a partial page and instead all search_path requests are run for each indexer
public override int PageSize => 1;
public override TimeSpan RateLimit
{
get
@ -47,6 +43,21 @@ namespace NzbDrone.Core.Indexers.Cardigann
}
}
public override int PageSize
{
get
{
var definition = _definitionService.GetCachedDefinition(Settings.DefinitionFile);
if (definition.Search != null && definition.Search.PageSize > 0)
{
return definition.Search.PageSize;
}
return 0;
}
}
public override IIndexerRequestGenerator GetRequestGenerator()
{
var generator = _generatorCache.Get(Settings.DefinitionFile, () =>
@ -88,6 +99,25 @@ namespace NzbDrone.Core.Indexers.Cardigann
cleanReleases = FilterReleasesByQuery(releases, searchCriteria).ToList();
}
// Only take the request results using Offset and Limit from the search
var pageSize = PageSize;
if (pageSize > 0)
{
var minPage = searchCriteria.Offset / pageSize;
var firstResult = searchCriteria.Offset - (pageSize * minPage);
cleanReleases = cleanReleases
.Skip(firstResult)
.Take(searchCriteria.Limit).ToList();
}
else
{
cleanReleases = cleanReleases
.Skip(searchCriteria.Offset)
.Take(searchCriteria.Limit).ToList();
}
return cleanReleases;
}

@ -142,6 +142,8 @@ namespace NzbDrone.Core.Indexers.Cardigann
public class SearchBlock
{
public int PageSize { get; set; }
public int FirstPageNumber { get; set; }
public string Path { get; set; }
public List<SearchPathBlock> Paths { get; set; }
public Dictionary<string, List<string>> Headers { get; set; }

@ -997,15 +997,6 @@ namespace NzbDrone.Core.Indexers.Cardigann
private IEnumerable<IndexerRequest> GetRequest(Dictionary<string, object> variables, SearchCriteriaBase searchCriteria)
{
var limit = searchCriteria.Limit;
var offset = searchCriteria.Offset;
if (offset > 0 && limit > 0 && offset / limit > 0)
{
// Pagination doesn't work yet, this is to prevent fetching the first page multiple times.
yield break;
}
var search = _definition.Search;
var mappedCategories = _categories.MapTorznabCapsToTrackers((int[])variables[".Query.Categories"]);
@ -1035,117 +1026,140 @@ namespace NzbDrone.Core.Indexers.Cardigann
variables[".Query.Keywords"] = string.Join(" ", keywordTokens);
variables[".Keywords"] = ApplyFilters((string)variables[".Query.Keywords"], search.Keywordsfilters, variables);
var pageSize = search.PageSize;
var minPage = 0;
var maxPage = 0;
if (pageSize > 0)
{
variables[".PageSize"] = pageSize;
minPage = (searchCriteria.Offset / pageSize) + search.FirstPageNumber;
maxPage = ((searchCriteria.Offset + searchCriteria.Limit - 1) / pageSize) + search.FirstPageNumber;
}
if (pageSize == 0 && searchCriteria.Offset >= 100)
{
// Indexer doesn't support pagination
yield break;
}
// TODO: prepare queries first and then send them parallel
var searchPaths = search.Paths;
foreach (var searchPath in searchPaths)
// Grab all pages we will need to return user requested limit and offset
for (var page = minPage; page <= maxPage; page++)
{
// skip path if categories don't match
if (searchPath.Categories != null && mappedCategories.Count > 0)
variables[".Query.Page"] = page;
foreach (var searchPath in searchPaths)
{
var invertMatch = searchPath.Categories[0] == "!";
var hasIntersect = mappedCategories.Intersect(searchPath.Categories).Any();
if (invertMatch)
// skip path if categories don't match
if (searchPath.Categories != null && mappedCategories.Count > 0)
{
hasIntersect = !hasIntersect;
}
var invertMatch = searchPath.Categories[0] == "!";
var hasIntersect = mappedCategories.Intersect(searchPath.Categories).Any();
if (invertMatch)
{
hasIntersect = !hasIntersect;
}
if (!hasIntersect)
{
continue;
if (!hasIntersect)
{
continue;
}
}
}
// build search URL
// HttpUtility.UrlPathEncode seems to only encode spaces, we use UrlEncode and replace + with %20 as a workaround
var searchUrl = ResolvePath(ApplyGoTemplateText(searchPath.Path, variables, WebUtility.UrlEncode).Replace("+", "%20")).AbsoluteUri;
var queryCollection = new List<KeyValuePair<string, string>>();
var method = HttpMethod.Get;
// build search URL
// HttpUtility.UrlPathEncode seems to only encode spaces, we use UrlEncode and replace + with %20 as a workaround
var searchUrl = ResolvePath(ApplyGoTemplateText(searchPath.Path, variables, WebUtility.UrlEncode).Replace("+", "%20")).AbsoluteUri;
var queryCollection = new List<KeyValuePair<string, string>>();
var method = HttpMethod.Get;
if (string.Equals(searchPath.Method, "post", StringComparison.OrdinalIgnoreCase))
{
method = HttpMethod.Post;
}
if (string.Equals(searchPath.Method, "post", StringComparison.OrdinalIgnoreCase))
{
method = HttpMethod.Post;
}
var inputsList = new List<Dictionary<string, string>>();
if (searchPath.Inheritinputs)
{
inputsList.Add(search.Inputs);
}
var inputsList = new List<Dictionary<string, string>>();
if (searchPath.Inheritinputs)
{
inputsList.Add(search.Inputs);
}
inputsList.Add(searchPath.Inputs);
inputsList.Add(searchPath.Inputs);
foreach (var inputs in inputsList)
{
if (inputs != null)
foreach (var inputs in inputsList)
{
foreach (var input in inputs)
if (inputs != null)
{
if (input.Key == "$raw")
foreach (var input in inputs)
{
var rawStr = ApplyGoTemplateText(input.Value, variables, WebUtility.UrlEncode);
foreach (var part in rawStr.Split('&'))
if (input.Key == "$raw")
{
var parts = part.Split(new char[] { '=' }, 2);
var key = parts[0];
if (key.Length == 0)
{
continue;
}
var value = "";
if (parts.Length == 2)
var rawStr = ApplyGoTemplateText(input.Value, variables, WebUtility.UrlEncode);
foreach (var part in rawStr.Split('&'))
{
value = parts[1];
var parts = part.Split(new char[] { '=' }, 2);
var key = parts[0];
if (key.Length == 0)
{
continue;
}
var value = "";
if (parts.Length == 2)
{
value = parts[1];
}
queryCollection.Add(key, value);
}
queryCollection.Add(key, value);
}
}
else
{
queryCollection.Add(input.Key, ApplyGoTemplateText(input.Value, variables));
else
{
queryCollection.Add(input.Key, ApplyGoTemplateText(input.Value, variables));
}
}
}
}
}
if (method == HttpMethod.Get)
{
if (queryCollection.Count > 0)
if (method == HttpMethod.Get)
{
searchUrl += "?" + queryCollection.GetQueryString(_encoding);
if (queryCollection.Count > 0)
{
searchUrl += "?" + queryCollection.GetQueryString(_encoding);
}
}
}
_logger.Info($"Adding request: {searchUrl}");
_logger.Info($"Adding request: {searchUrl}");
var requestBuilder = new HttpRequestBuilder(searchUrl)
{
Method = method,
Encoding = _encoding
};
var requestBuilder = new HttpRequestBuilder(searchUrl)
{
Method = method,
Encoding = _encoding
};
// Add FormData for searchs that POST
if (method == HttpMethod.Post)
{
foreach (var param in queryCollection)
// Add FormData for searchs that POST
if (method == HttpMethod.Post)
{
requestBuilder.AddFormParameter(param.Key, param.Value);
foreach (var param in queryCollection)
{
requestBuilder.AddFormParameter(param.Key, param.Value);
}
}
}
// send HTTP request
if (search.Headers != null)
{
var headers = ParseCustomHeaders(search.Headers, variables);
requestBuilder.SetHeaders(headers ?? new Dictionary<string, string>());
}
// send HTTP request
if (search.Headers != null)
{
var headers = ParseCustomHeaders(search.Headers, variables);
requestBuilder.SetHeaders(headers ?? new Dictionary<string, string>());
}
var request = requestBuilder
.WithRateLimit(_rateLimit.TotalSeconds)
.Build();
var request = requestBuilder
.WithRateLimit(_rateLimit.TotalSeconds)
.Build();
var cardigannRequest = new CardigannRequest(request, variables, searchPath)
var cardigannRequest = new CardigannRequest(request, variables, searchPath)
{
HttpRequest =
{
@ -1153,7 +1167,8 @@ namespace NzbDrone.Core.Indexers.Cardigann
}
};
yield return cardigannRequest;
yield return cardigannRequest;
}
}
}
}

Loading…
Cancel
Save