parent
22c9bc402f
commit
5e62c2335f
@ -0,0 +1,37 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
|
||||
<channel>
|
||||
<title>Anime :: Fanzub</title>
|
||||
<link>http://www.fanzub.com/</link>
|
||||
<description>A Usenet Search Engine for Japanese Media</description>
|
||||
<language>en-us</language>
|
||||
<atom:link href="http://fanzub.com/rss?cat=anime" rel="self" type="application/rss+xml" />
|
||||
<item>
|
||||
<title>[Vivid] Hanayamata - 10 [A33D6606]</title>
|
||||
<link>http://fanzub.com/nzb/296464</link>
|
||||
<description><i>Age</i>: 0 days<br /><i>Size</i>: 530.48 MiB<br /><i>Parts</i>: 100%<br /><i>Files</i>: 1 other, 8 par2<br /><i>Subject</i>: [9/9] [Vivid] Hanayamata - 10 [A33D6606].vol63+27.par2 (1/28)</description>
|
||||
<category>Anime</category>
|
||||
<pubDate>Sat, 13 Sep 2014 12:56:53 +0000</pubDate>
|
||||
<enclosure url="http://fanzub.com/nzb/296464/Vivid%20Hanayamata%20-%2010.nzb" length="556246858" type="application/x-nzb" />
|
||||
<guid isPermaLink="false">http://fanzub.com/nzb/296464</guid>
|
||||
</item>
|
||||
<item>
|
||||
<title>(Sniper2000) - Pokemon HD - XY 37</title>
|
||||
<link>http://fanzub.com/nzb/296456</link>
|
||||
<description><i>Age</i>: 0 days<br /><i>Size</i>: 2.79 GiB<br /><i>Parts</i>: 100%<br /><i>Files</i>: 1 nzb, 1 other, 77 par2, 30 rar<br /><i>Subject</i>: (Sniper2000) [108/108] - "XY 37.vol183+176.PAR2"Pokemon HD (1/272)</description>
|
||||
<category>Anime</category>
|
||||
<pubDate>Sat, 13 Sep 2014 12:38:03 +0000</pubDate>
|
||||
<enclosure url="http://fanzub.com/nzb/296456/Sniper2000%20-%20Pokemon%20HD%20-%20XY%2037.nzb" length="2995093986" type="application/x-nzb" />
|
||||
<guid isPermaLink="false">http://fanzub.com/nzb/296456</guid>
|
||||
</item>
|
||||
<item>
|
||||
<title>[HorribleSubs] Kindaichi Case Files R - 23 [480p].mkv</title>
|
||||
<link>http://fanzub.com/nzb/296472</link>
|
||||
<description><i>Age</i>: 0 days<br /><i>Size</i>: 153.87 MiB<br /><i>Parts</i>: 100%<br /><i>Files</i>: 7 par2, 6 split<br /><i>Subject</i>: [HorribleSubs] Kindaichi Case Files R - 23 [480p] [13/13] - "[HorribleSubs] Kindaichi Case Files R - 23 [480p].mkv.vol31+06.par2" yEnc (1/7)</description>
|
||||
<category>Anime</category>
|
||||
<pubDate>Sat, 13 Sep 2014 11:51:59 +0000</pubDate>
|
||||
<enclosure url="http://fanzub.com/nzb/296472/HorribleSubs%20Kindaichi%20Case%20Files%20R%20-%2023%20480p.nzb" length="161341092" type="application/x-nzb" />
|
||||
<guid isPermaLink="false">http://fanzub.com/nzb/296472</guid>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>
|
File diff suppressed because it is too large
Load Diff
@ -1,47 +0,0 @@
|
||||
using System;
|
||||
using System.Xml.Linq;
|
||||
using NzbDrone.Core.Parser.Model;
|
||||
|
||||
namespace NzbDrone.Core.Indexers
|
||||
{
|
||||
public class BasicTorrentRssParser : RssParserBase
|
||||
{
|
||||
protected override ReleaseInfo CreateNewReleaseInfo()
|
||||
{
|
||||
return new TorrentInfo();
|
||||
}
|
||||
|
||||
protected override ReleaseInfo PostProcessor(XElement item, ReleaseInfo currentResult)
|
||||
{
|
||||
var torrentInfo = (TorrentInfo)currentResult;
|
||||
|
||||
torrentInfo.MagnetUrl = MagnetUrl(item);
|
||||
torrentInfo.InfoHash = InfoHash(item);
|
||||
|
||||
return torrentInfo;
|
||||
}
|
||||
|
||||
protected override long GetSize(XElement item)
|
||||
{
|
||||
var elementLength = GetTorrentElement(item).Element("contentLength");
|
||||
return Convert.ToInt64(elementLength.Value);
|
||||
}
|
||||
|
||||
protected virtual string MagnetUrl(XElement item)
|
||||
{
|
||||
var elementLength = GetTorrentElement(item).Element("magnetURI");
|
||||
return elementLength.Value;
|
||||
}
|
||||
|
||||
protected virtual string InfoHash(XElement item)
|
||||
{
|
||||
var elementLength = GetTorrentElement(item).Element("infoHash");
|
||||
return elementLength.Value;
|
||||
}
|
||||
|
||||
private static XElement GetTorrentElement(XElement item)
|
||||
{
|
||||
return item.Element("torrent");
|
||||
}
|
||||
}
|
||||
}
|
@ -1,23 +1,22 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using NzbDrone.Core.IndexerSearch.Definitions;
|
||||
using NzbDrone.Core.Parser.Model;
|
||||
using NzbDrone.Core.ThingiProvider;
|
||||
|
||||
namespace NzbDrone.Core.Indexers
|
||||
{
|
||||
public interface IIndexer : IProvider
|
||||
{
|
||||
IParseFeed Parser { get; }
|
||||
DownloadProtocol Protocol { get; }
|
||||
Int32 SupportedPageSize { get; }
|
||||
Boolean SupportsPaging { get; }
|
||||
Boolean SupportsRss { get; }
|
||||
Boolean SupportsSearch { get; }
|
||||
|
||||
IEnumerable<string> RecentFeed { get; }
|
||||
IEnumerable<string> GetEpisodeSearchUrls(List<String> titles, int tvRageId, int seasonNumber, int episodeNumber);
|
||||
IEnumerable<string> GetDailyEpisodeSearchUrls(List<String> titles, int tvRageId, DateTime date);
|
||||
IEnumerable<string> GetAnimeEpisodeSearchUrls(List<String> titles, int tvRageId, int absoluteEpisodeNumber);
|
||||
IEnumerable<string> GetSeasonSearchUrls(List<String> titles, int tvRageId, int seasonNumber, int offset);
|
||||
IEnumerable<string> GetSearchUrls(string query, int offset = 0);
|
||||
DownloadProtocol Protocol { get; }
|
||||
|
||||
IList<ReleaseInfo> FetchRecent();
|
||||
IList<ReleaseInfo> Fetch(SeasonSearchCriteria searchCriteria);
|
||||
IList<ReleaseInfo> Fetch(SingleEpisodeSearchCriteria searchCriteria);
|
||||
IList<ReleaseInfo> Fetch(DailyEpisodeSearchCriteria searchCriteria);
|
||||
IList<ReleaseInfo> Fetch(AnimeEpisodeSearchCriteria searchCriteria);
|
||||
IList<ReleaseInfo> Fetch(SpecialEpisodeSearchCriteria searchCriteria);
|
||||
}
|
||||
}
|
@ -1,10 +0,0 @@
|
||||
using System.Collections.Generic;
|
||||
using NzbDrone.Core.Parser.Model;
|
||||
|
||||
namespace NzbDrone.Core.Indexers
|
||||
{
|
||||
public interface IParseFeed
|
||||
{
|
||||
IEnumerable<ReleaseInfo> Process(string xml, string url);
|
||||
}
|
||||
}
|
@ -1,197 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Net;
|
||||
using NLog;
|
||||
using NzbDrone.Common;
|
||||
using NzbDrone.Common.Http;
|
||||
using NzbDrone.Core.Indexers.Exceptions;
|
||||
using NzbDrone.Core.IndexerSearch.Definitions;
|
||||
using NzbDrone.Core.Parser.Model;
|
||||
using System.Linq;
|
||||
|
||||
namespace NzbDrone.Core.Indexers
|
||||
{
|
||||
public interface IFetchFeedFromIndexers
|
||||
{
|
||||
IList<ReleaseInfo> FetchRss(IIndexer indexer);
|
||||
IList<ReleaseInfo> Fetch(IIndexer indexer, SeasonSearchCriteria searchCriteria);
|
||||
IList<ReleaseInfo> Fetch(IIndexer indexer, SingleEpisodeSearchCriteria searchCriteria);
|
||||
IList<ReleaseInfo> Fetch(IIndexer indexer, DailyEpisodeSearchCriteria searchCriteria);
|
||||
IList<ReleaseInfo> Fetch(IIndexer indexer, AnimeEpisodeSearchCriteria searchCriteria);
|
||||
IList<ReleaseInfo> Fetch(IIndexer indexer, SpecialEpisodeSearchCriteria searchCriteria);
|
||||
}
|
||||
|
||||
public class FetchFeedService : IFetchFeedFromIndexers
|
||||
{
|
||||
private readonly Logger _logger;
|
||||
private readonly IHttpClient _httpClient;
|
||||
|
||||
public FetchFeedService(IHttpClient httpClient, Logger logger)
|
||||
{
|
||||
_httpClient = httpClient;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public virtual IList<ReleaseInfo> FetchRss(IIndexer indexer)
|
||||
{
|
||||
_logger.Debug("Fetching feeds from " + indexer);
|
||||
|
||||
var result = Fetch(indexer, indexer.RecentFeed);
|
||||
|
||||
_logger.Debug("Finished processing feeds from {0} found {1} releases", indexer, result.Count);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public IList<ReleaseInfo> Fetch(IIndexer indexer, SeasonSearchCriteria searchCriteria)
|
||||
{
|
||||
return Fetch(indexer, searchCriteria, 0).DistinctBy(c => c.DownloadUrl).ToList();
|
||||
}
|
||||
|
||||
private IList<ReleaseInfo> Fetch(IIndexer indexer, SeasonSearchCriteria searchCriteria, int offset)
|
||||
{
|
||||
var searchUrls = indexer.GetSeasonSearchUrls(searchCriteria.QueryTitles, searchCriteria.Series.TvRageId, searchCriteria.SeasonNumber, offset).ToList();
|
||||
|
||||
if (searchUrls.Any())
|
||||
{
|
||||
_logger.Debug("Searching for {0} offset: {1}", searchCriteria, offset);
|
||||
|
||||
var result = Fetch(indexer, searchUrls);
|
||||
|
||||
_logger.Info("{0} offset {1}. Found {2}", indexer, offset, result.Count);
|
||||
|
||||
if (indexer.SupportsPaging && result.Count >= indexer.SupportedPageSize && offset < 900)
|
||||
{
|
||||
result.AddRange(Fetch(indexer, searchCriteria, offset + indexer.SupportedPageSize));
|
||||
}
|
||||
|
||||
//Only log finish for the first call to this recursive method
|
||||
if (offset == 0)
|
||||
{
|
||||
_logger.Info("Finished searching {0} for {1}. Found {2}", indexer, searchCriteria, result.Count);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
return new List<ReleaseInfo>();
|
||||
}
|
||||
|
||||
public IList<ReleaseInfo> Fetch(IIndexer indexer, SingleEpisodeSearchCriteria searchCriteria)
|
||||
{
|
||||
var searchUrls = indexer.GetEpisodeSearchUrls(searchCriteria.QueryTitles, searchCriteria.Series.TvRageId, searchCriteria.SeasonNumber, searchCriteria.EpisodeNumber).ToList();
|
||||
return Fetch(indexer, searchUrls, searchCriteria);
|
||||
}
|
||||
|
||||
public IList<ReleaseInfo> Fetch(IIndexer indexer, DailyEpisodeSearchCriteria searchCriteria)
|
||||
{
|
||||
var searchUrls = indexer.GetDailyEpisodeSearchUrls(searchCriteria.QueryTitles, searchCriteria.Series.TvRageId, searchCriteria.AirDate).ToList();
|
||||
return Fetch(indexer, searchUrls, searchCriteria);
|
||||
}
|
||||
|
||||
public IList<ReleaseInfo> Fetch(IIndexer indexer, AnimeEpisodeSearchCriteria searchCriteria)
|
||||
{
|
||||
var searchUrls = indexer.GetAnimeEpisodeSearchUrls(searchCriteria.SceneTitles, searchCriteria.Series.TvRageId, searchCriteria.AbsoluteEpisodeNumber).ToList();
|
||||
return Fetch(indexer, searchUrls, searchCriteria);
|
||||
}
|
||||
|
||||
public IList<ReleaseInfo> Fetch(IIndexer indexer, SpecialEpisodeSearchCriteria searchCriteria)
|
||||
{
|
||||
var searchUrls = new List<String>();
|
||||
|
||||
foreach (var episodeQueryTitle in searchCriteria.EpisodeQueryTitles)
|
||||
{
|
||||
var urls = indexer.GetSearchUrls(episodeQueryTitle).ToList();
|
||||
|
||||
if (urls.Any())
|
||||
{
|
||||
_logger.Debug("Performing query of {0} for {1}", indexer, episodeQueryTitle);
|
||||
searchUrls.AddRange(urls);
|
||||
}
|
||||
}
|
||||
|
||||
return Fetch(indexer, searchUrls, searchCriteria);
|
||||
}
|
||||
|
||||
private List<ReleaseInfo> Fetch(IIndexer indexer, IEnumerable<string> urls, SearchCriteriaBase searchCriteria)
|
||||
{
|
||||
var urlList = urls.ToList();
|
||||
|
||||
if (urlList.Empty())
|
||||
{
|
||||
return new List<ReleaseInfo>();
|
||||
}
|
||||
|
||||
_logger.Debug("Searching for {0}", searchCriteria);
|
||||
|
||||
var result = Fetch(indexer, urlList);
|
||||
|
||||
_logger.Info("Finished searching {0} for {1}. Found {2}", indexer, searchCriteria, result.Count);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private List<ReleaseInfo> Fetch(IIndexer indexer, IEnumerable<string> urls)
|
||||
{
|
||||
var result = new List<ReleaseInfo>();
|
||||
|
||||
foreach (var url in urls)
|
||||
{
|
||||
try
|
||||
{
|
||||
_logger.Debug("Downloading Feed " + url);
|
||||
var request = new HttpRequest(url);
|
||||
request.Headers.Accept = "text/xml, text/rss+xml, application/rss+xml";
|
||||
var response = _httpClient.Get(request);
|
||||
|
||||
if (response.Headers.ContentType != null && response.Headers.ContentType.Split(';')[0] == "text/html")
|
||||
{
|
||||
throw new WebException("Indexer responded with html content. Site is likely blocked or unavailable.");
|
||||
}
|
||||
|
||||
var xml = response.Content;
|
||||
if (!string.IsNullOrWhiteSpace(xml))
|
||||
{
|
||||
result.AddRange(indexer.Parser.Process(xml, url));
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.Warn("{0} returned empty response.", url);
|
||||
}
|
||||
|
||||
}
|
||||
catch (WebException webException)
|
||||
{
|
||||
if (webException.Message.Contains("502") || webException.Message.Contains("503") ||
|
||||
webException.Message.Contains("timed out"))
|
||||
{
|
||||
_logger.Warn("{0} server is currently unavailable. {1} {2}", indexer, url, webException.Message);
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.Warn("{0} {1} {2}", indexer, url, webException.Message);
|
||||
}
|
||||
}
|
||||
catch (ApiKeyException)
|
||||
{
|
||||
_logger.Warn("Invalid API Key for {0} {1}", indexer, url);
|
||||
}
|
||||
catch (Exception feedEx)
|
||||
{
|
||||
feedEx.Data.Add("FeedUrl", url);
|
||||
_logger.ErrorException("An error occurred while processing feed. " + url, feedEx);
|
||||
}
|
||||
}
|
||||
|
||||
result = result.DistinctBy(v => v.Guid).ToList();
|
||||
|
||||
result.ForEach(c =>
|
||||
{
|
||||
c.Indexer = indexer.Definition.Name;
|
||||
c.DownloadProtocol = indexer.Protocol;
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
@ -1,168 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Xml;
|
||||
using System.Xml.Linq;
|
||||
using NLog;
|
||||
using NzbDrone.Common.Instrumentation;
|
||||
using NzbDrone.Core.Indexers.Newznab;
|
||||
using NzbDrone.Core.Parser.Model;
|
||||
|
||||
namespace NzbDrone.Core.Indexers
|
||||
{
|
||||
public abstract class RssParserBase : IParseFeed
|
||||
{
|
||||
protected readonly Logger _logger;
|
||||
|
||||
protected virtual ReleaseInfo CreateNewReleaseInfo()
|
||||
{
|
||||
return new ReleaseInfo();
|
||||
}
|
||||
|
||||
protected RssParserBase()
|
||||
{
|
||||
_logger = NzbDroneLogger.GetLogger(this);
|
||||
}
|
||||
|
||||
public virtual IEnumerable<ReleaseInfo> Process(string xml, string url)
|
||||
{
|
||||
PreProcess(xml, url);
|
||||
|
||||
using (var xmlTextReader = XmlReader.Create(new StringReader(xml), new XmlReaderSettings { DtdProcessing = DtdProcessing.Ignore, IgnoreComments = true }))
|
||||
{
|
||||
|
||||
var document = XDocument.Load(xmlTextReader);
|
||||
var items = document.Descendants("item");
|
||||
|
||||
var result = new List<ReleaseInfo>();
|
||||
|
||||
foreach (var item in items)
|
||||
{
|
||||
try
|
||||
{
|
||||
var reportInfo = ParseFeedItem(item.StripNameSpace(), url);
|
||||
|
||||
if (reportInfo != null)
|
||||
{
|
||||
result.Add(reportInfo);
|
||||
}
|
||||
}
|
||||
catch (Exception itemEx)
|
||||
{
|
||||
itemEx.Data.Add("Item", item.Title());
|
||||
_logger.ErrorException("An error occurred while processing feed item from " + url, itemEx);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
private ReleaseInfo ParseFeedItem(XElement item, string url)
|
||||
{
|
||||
var reportInfo = CreateNewReleaseInfo();
|
||||
|
||||
reportInfo.Guid = GetGuid(item);
|
||||
reportInfo.Title = GetTitle(item);
|
||||
reportInfo.PublishDate = GetPublishDate(item);
|
||||
reportInfo.DownloadUrl = GetNzbUrl(item);
|
||||
reportInfo.InfoUrl = GetNzbInfoUrl(item);
|
||||
|
||||
try
|
||||
{
|
||||
reportInfo.Size = GetSize(item);
|
||||
}
|
||||
catch (Exception)
|
||||
{
|
||||
throw new SizeParsingException("Unable to parse size from: {0} [{1}]", reportInfo.Title, url);
|
||||
}
|
||||
|
||||
_logger.Trace("Parsed: {0}", reportInfo.Title);
|
||||
|
||||
return PostProcessor(item, reportInfo);
|
||||
}
|
||||
|
||||
protected virtual String GetGuid(XElement item)
|
||||
{
|
||||
return item.TryGetValue("guid", Guid.NewGuid().ToString());
|
||||
}
|
||||
|
||||
protected virtual string GetTitle(XElement item)
|
||||
{
|
||||
return item.Title();
|
||||
}
|
||||
|
||||
protected virtual DateTime GetPublishDate(XElement item)
|
||||
{
|
||||
return item.PublishDate();
|
||||
}
|
||||
|
||||
protected virtual string GetNzbUrl(XElement item)
|
||||
{
|
||||
return item.Links().First();
|
||||
}
|
||||
|
||||
protected virtual string GetNzbInfoUrl(XElement item)
|
||||
{
|
||||
return String.Empty;
|
||||
}
|
||||
|
||||
protected abstract long GetSize(XElement item);
|
||||
|
||||
protected virtual void PreProcess(string source, string url)
|
||||
{
|
||||
}
|
||||
|
||||
protected virtual ReleaseInfo PostProcessor(XElement item, ReleaseInfo currentResult)
|
||||
{
|
||||
return currentResult;
|
||||
}
|
||||
|
||||
private static readonly Regex ReportSizeRegex = new Regex(@"(?<value>\d+\.\d{1,2}|\d+\,\d+\.\d{1,2}|\d+)\W?(?<unit>GB|MB|GiB|MiB)",
|
||||
RegexOptions.IgnoreCase | RegexOptions.Compiled);
|
||||
|
||||
public static Int64 ParseSize(String sizeString, Boolean defaultToBinaryPrefix)
|
||||
{
|
||||
var match = ReportSizeRegex.Matches(sizeString);
|
||||
|
||||
if (match.Count != 0)
|
||||
{
|
||||
var cultureInfo = new CultureInfo("en-US");
|
||||
var value = Decimal.Parse(Regex.Replace(match[0].Groups["value"].Value, "\\,", ""), cultureInfo);
|
||||
|
||||
var unit = match[0].Groups["unit"].Value.ToLower();
|
||||
|
||||
switch (unit)
|
||||
{
|
||||
case "kb":
|
||||
return ConvertToBytes(Convert.ToDouble(value), 1, defaultToBinaryPrefix);
|
||||
case "mb":
|
||||
return ConvertToBytes(Convert.ToDouble(value), 2, defaultToBinaryPrefix);
|
||||
case "gb":
|
||||
return ConvertToBytes(Convert.ToDouble(value), 3, defaultToBinaryPrefix);
|
||||
case "kib":
|
||||
return ConvertToBytes(Convert.ToDouble(value), 1, true);
|
||||
case "mib":
|
||||
return ConvertToBytes(Convert.ToDouble(value), 2, true);
|
||||
case "gib":
|
||||
return ConvertToBytes(Convert.ToDouble(value), 3, true);
|
||||
default:
|
||||
return (Int64)value;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
private static Int64 ConvertToBytes(Double value, Int32 power, Boolean binaryPrefix)
|
||||
{
|
||||
var prefix = binaryPrefix ? 1024 : 1000;
|
||||
var multiplier = Math.Pow(prefix, power);
|
||||
var result = value * multiplier;
|
||||
|
||||
return Convert.ToInt64(result);
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Reference in new issue