|
|
|
@ -997,15 +997,6 @@ namespace NzbDrone.Core.Indexers.Cardigann
|
|
|
|
|
|
|
|
|
|
private IEnumerable<IndexerRequest> GetRequest(Dictionary<string, object> variables, SearchCriteriaBase searchCriteria)
|
|
|
|
|
{
|
|
|
|
|
var limit = searchCriteria.Limit;
|
|
|
|
|
var offset = searchCriteria.Offset;
|
|
|
|
|
|
|
|
|
|
if (offset > 0 && limit > 0 && offset / limit > 0)
|
|
|
|
|
{
|
|
|
|
|
// Pagination doesn't work yet, this is to prevent fetching the first page multiple times.
|
|
|
|
|
yield break;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
var search = _definition.Search;
|
|
|
|
|
|
|
|
|
|
var mappedCategories = _categories.MapTorznabCapsToTrackers((int[])variables[".Query.Categories"]);
|
|
|
|
@ -1035,117 +1026,140 @@ namespace NzbDrone.Core.Indexers.Cardigann
|
|
|
|
|
variables[".Query.Keywords"] = string.Join(" ", keywordTokens);
|
|
|
|
|
variables[".Keywords"] = ApplyFilters((string)variables[".Query.Keywords"], search.Keywordsfilters, variables);
|
|
|
|
|
|
|
|
|
|
var pageSize = search.PageSize;
|
|
|
|
|
var minPage = 0;
|
|
|
|
|
var maxPage = 0;
|
|
|
|
|
|
|
|
|
|
if (pageSize > 0)
|
|
|
|
|
{
|
|
|
|
|
variables[".PageSize"] = pageSize;
|
|
|
|
|
minPage = (searchCriteria.Offset / pageSize) + search.FirstPageNumber;
|
|
|
|
|
maxPage = ((searchCriteria.Offset + searchCriteria.Limit - 1) / pageSize) + search.FirstPageNumber;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (pageSize == 0 && searchCriteria.Offset >= 100)
|
|
|
|
|
{
|
|
|
|
|
// Indexer doesn't support pagination
|
|
|
|
|
yield break;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// TODO: prepare queries first and then send them parallel
|
|
|
|
|
var searchPaths = search.Paths;
|
|
|
|
|
foreach (var searchPath in searchPaths)
|
|
|
|
|
|
|
|
|
|
// Grab all pages we will need to return user requested limit and offset
|
|
|
|
|
for (var page = minPage; page <= maxPage; page++)
|
|
|
|
|
{
|
|
|
|
|
// skip path if categories don't match
|
|
|
|
|
if (searchPath.Categories != null && mappedCategories.Count > 0)
|
|
|
|
|
variables[".Query.Page"] = page;
|
|
|
|
|
|
|
|
|
|
foreach (var searchPath in searchPaths)
|
|
|
|
|
{
|
|
|
|
|
var invertMatch = searchPath.Categories[0] == "!";
|
|
|
|
|
var hasIntersect = mappedCategories.Intersect(searchPath.Categories).Any();
|
|
|
|
|
if (invertMatch)
|
|
|
|
|
// skip path if categories don't match
|
|
|
|
|
if (searchPath.Categories != null && mappedCategories.Count > 0)
|
|
|
|
|
{
|
|
|
|
|
hasIntersect = !hasIntersect;
|
|
|
|
|
}
|
|
|
|
|
var invertMatch = searchPath.Categories[0] == "!";
|
|
|
|
|
var hasIntersect = mappedCategories.Intersect(searchPath.Categories).Any();
|
|
|
|
|
if (invertMatch)
|
|
|
|
|
{
|
|
|
|
|
hasIntersect = !hasIntersect;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (!hasIntersect)
|
|
|
|
|
{
|
|
|
|
|
continue;
|
|
|
|
|
if (!hasIntersect)
|
|
|
|
|
{
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// build search URL
|
|
|
|
|
// HttpUtility.UrlPathEncode seems to only encode spaces, we use UrlEncode and replace + with %20 as a workaround
|
|
|
|
|
var searchUrl = ResolvePath(ApplyGoTemplateText(searchPath.Path, variables, WebUtility.UrlEncode).Replace("+", "%20")).AbsoluteUri;
|
|
|
|
|
var queryCollection = new List<KeyValuePair<string, string>>();
|
|
|
|
|
var method = HttpMethod.Get;
|
|
|
|
|
// build search URL
|
|
|
|
|
// HttpUtility.UrlPathEncode seems to only encode spaces, we use UrlEncode and replace + with %20 as a workaround
|
|
|
|
|
var searchUrl = ResolvePath(ApplyGoTemplateText(searchPath.Path, variables, WebUtility.UrlEncode).Replace("+", "%20")).AbsoluteUri;
|
|
|
|
|
var queryCollection = new List<KeyValuePair<string, string>>();
|
|
|
|
|
var method = HttpMethod.Get;
|
|
|
|
|
|
|
|
|
|
if (string.Equals(searchPath.Method, "post", StringComparison.OrdinalIgnoreCase))
|
|
|
|
|
{
|
|
|
|
|
method = HttpMethod.Post;
|
|
|
|
|
}
|
|
|
|
|
if (string.Equals(searchPath.Method, "post", StringComparison.OrdinalIgnoreCase))
|
|
|
|
|
{
|
|
|
|
|
method = HttpMethod.Post;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
var inputsList = new List<Dictionary<string, string>>();
|
|
|
|
|
if (searchPath.Inheritinputs)
|
|
|
|
|
{
|
|
|
|
|
inputsList.Add(search.Inputs);
|
|
|
|
|
}
|
|
|
|
|
var inputsList = new List<Dictionary<string, string>>();
|
|
|
|
|
if (searchPath.Inheritinputs)
|
|
|
|
|
{
|
|
|
|
|
inputsList.Add(search.Inputs);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
inputsList.Add(searchPath.Inputs);
|
|
|
|
|
inputsList.Add(searchPath.Inputs);
|
|
|
|
|
|
|
|
|
|
foreach (var inputs in inputsList)
|
|
|
|
|
{
|
|
|
|
|
if (inputs != null)
|
|
|
|
|
foreach (var inputs in inputsList)
|
|
|
|
|
{
|
|
|
|
|
foreach (var input in inputs)
|
|
|
|
|
if (inputs != null)
|
|
|
|
|
{
|
|
|
|
|
if (input.Key == "$raw")
|
|
|
|
|
foreach (var input in inputs)
|
|
|
|
|
{
|
|
|
|
|
var rawStr = ApplyGoTemplateText(input.Value, variables, WebUtility.UrlEncode);
|
|
|
|
|
foreach (var part in rawStr.Split('&'))
|
|
|
|
|
if (input.Key == "$raw")
|
|
|
|
|
{
|
|
|
|
|
var parts = part.Split(new char[] { '=' }, 2);
|
|
|
|
|
var key = parts[0];
|
|
|
|
|
if (key.Length == 0)
|
|
|
|
|
{
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
var value = "";
|
|
|
|
|
if (parts.Length == 2)
|
|
|
|
|
var rawStr = ApplyGoTemplateText(input.Value, variables, WebUtility.UrlEncode);
|
|
|
|
|
foreach (var part in rawStr.Split('&'))
|
|
|
|
|
{
|
|
|
|
|
value = parts[1];
|
|
|
|
|
var parts = part.Split(new char[] { '=' }, 2);
|
|
|
|
|
var key = parts[0];
|
|
|
|
|
if (key.Length == 0)
|
|
|
|
|
{
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
var value = "";
|
|
|
|
|
if (parts.Length == 2)
|
|
|
|
|
{
|
|
|
|
|
value = parts[1];
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
queryCollection.Add(key, value);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
queryCollection.Add(key, value);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
queryCollection.Add(input.Key, ApplyGoTemplateText(input.Value, variables));
|
|
|
|
|
else
|
|
|
|
|
{
|
|
|
|
|
queryCollection.Add(input.Key, ApplyGoTemplateText(input.Value, variables));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (method == HttpMethod.Get)
|
|
|
|
|
{
|
|
|
|
|
if (queryCollection.Count > 0)
|
|
|
|
|
if (method == HttpMethod.Get)
|
|
|
|
|
{
|
|
|
|
|
searchUrl += "?" + queryCollection.GetQueryString(_encoding);
|
|
|
|
|
if (queryCollection.Count > 0)
|
|
|
|
|
{
|
|
|
|
|
searchUrl += "?" + queryCollection.GetQueryString(_encoding);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
_logger.Info($"Adding request: {searchUrl}");
|
|
|
|
|
_logger.Info($"Adding request: {searchUrl}");
|
|
|
|
|
|
|
|
|
|
var requestBuilder = new HttpRequestBuilder(searchUrl)
|
|
|
|
|
{
|
|
|
|
|
Method = method,
|
|
|
|
|
Encoding = _encoding
|
|
|
|
|
};
|
|
|
|
|
var requestBuilder = new HttpRequestBuilder(searchUrl)
|
|
|
|
|
{
|
|
|
|
|
Method = method,
|
|
|
|
|
Encoding = _encoding
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Add FormData for searchs that POST
|
|
|
|
|
if (method == HttpMethod.Post)
|
|
|
|
|
{
|
|
|
|
|
foreach (var param in queryCollection)
|
|
|
|
|
// Add FormData for searchs that POST
|
|
|
|
|
if (method == HttpMethod.Post)
|
|
|
|
|
{
|
|
|
|
|
requestBuilder.AddFormParameter(param.Key, param.Value);
|
|
|
|
|
foreach (var param in queryCollection)
|
|
|
|
|
{
|
|
|
|
|
requestBuilder.AddFormParameter(param.Key, param.Value);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// send HTTP request
|
|
|
|
|
if (search.Headers != null)
|
|
|
|
|
{
|
|
|
|
|
var headers = ParseCustomHeaders(search.Headers, variables);
|
|
|
|
|
requestBuilder.SetHeaders(headers ?? new Dictionary<string, string>());
|
|
|
|
|
}
|
|
|
|
|
// send HTTP request
|
|
|
|
|
if (search.Headers != null)
|
|
|
|
|
{
|
|
|
|
|
var headers = ParseCustomHeaders(search.Headers, variables);
|
|
|
|
|
requestBuilder.SetHeaders(headers ?? new Dictionary<string, string>());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
var request = requestBuilder
|
|
|
|
|
.WithRateLimit(_rateLimit.TotalSeconds)
|
|
|
|
|
.Build();
|
|
|
|
|
var request = requestBuilder
|
|
|
|
|
.WithRateLimit(_rateLimit.TotalSeconds)
|
|
|
|
|
.Build();
|
|
|
|
|
|
|
|
|
|
var cardigannRequest = new CardigannRequest(request, variables, searchPath)
|
|
|
|
|
var cardigannRequest = new CardigannRequest(request, variables, searchPath)
|
|
|
|
|
{
|
|
|
|
|
HttpRequest =
|
|
|
|
|
{
|
|
|
|
@ -1153,7 +1167,8 @@ namespace NzbDrone.Core.Indexers.Cardigann
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
yield return cardigannRequest;
|
|
|
|
|
yield return cardigannRequest;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|