Backend work for nzbx.co

pull/6/head
Mark McDowall 12 years ago
parent 8581896f58
commit 117edd4286

@ -0,0 +1 @@
[{"ID":"571777","name":"Chicago.Fire.S01E10.720p.WEB-DL.DD5.1.H.264-KiNGS","totalpart":"10","groupID":"99","size":"890190951","postdate":"2012-12-20 18:14:13","guid":"48714abb00a095e00fbcbe161253abf6","fromname":"#cripples <masturb@ting.in.wheelchairs>","completion":"100","categoryID":"5050","imdbID":null,"anidbID":null,"rageID":"-1","comments":"0","downloads":"3","votes":{"upvotes":0,"downvotes":0}}]

@ -0,0 +1 @@
[{"name":"30.Rock.S06E06E07.HDTV.XviD-LOL","fromname":"teevee@4u.tv (teevee)","size":418067671,"groupid":4,"categoryid":5030,"totalpart":36,"completion":100,"rageid":"-1","imdbid":"","comments":"0","guid":"97be14dbf1776eec4fb8f2bb835935c0","adddate":1355343562,"postdate":1328839361,"downloads":"0","votes":{"upvotes":0,"downvotes":0},"nzb":"https:\/\/nzbx.co\/nzb?97be14dbf1776eec4fb8f2bb835935c0*|*30.Rock.S06E06E07.HDTV.XviD-LOL"}]

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

@ -229,6 +229,38 @@ namespace NzbDrone.Core.Test
parseResults[0].Size.Should().Be(236820890); parseResults[0].Size.Should().Be(236820890);
} }
[Test]
public void size_nzbx_recent()
{
WithConfiguredIndexers();
Mocker.GetMock<HttpProvider>()
.Setup(h => h.DownloadString("https://nzbx.co/api/recent?category=tv", It.IsAny<NetworkCredential>()))
.Returns(File.ReadAllText(".\\Files\\Rss\\SizeParsing\\nzbx_recent.json"));
//Act
var parseResults = Mocker.Resolve<Nzbx>().FetchRss();
parseResults.Should().HaveCount(1);
parseResults[0].Size.Should().Be(890190951);
}
[Test]
public void size_nzbx_search()
{
WithConfiguredIndexers();
Mocker.GetMock<HttpProvider>()
.Setup(h => h.DownloadString("https://nzbx.co/api/search?q=30+Rock+S01E01", It.IsAny<NetworkCredential>()))
.Returns(File.ReadAllText(".\\Files\\Rss\\SizeParsing\\nzbx_search.json"));
//Act
var parseResults = Mocker.Resolve<Nzbx>().FetchEpisode("30 Rock", 1, 1);
parseResults.Should().HaveCount(1);
parseResults[0].Size.Should().Be(418067671);
}
[Test] [Test]
public void Server_Unavailable_503_should_not_log_exception() public void Server_Unavailable_503_should_not_log_exception()
{ {
@ -488,5 +520,39 @@ namespace NzbDrone.Core.Test
parseResults.Should().HaveCount(1); parseResults.Should().HaveCount(1);
parseResults[0].NzbInfoUrl.Should().Be("http://omgwtfnzbs.com/details.php?id=OAl4g"); parseResults[0].NzbInfoUrl.Should().Be("http://omgwtfnzbs.com/details.php?id=OAl4g");
} }
[Test]
public void nzbx_parse_recent()
{
WithConfiguredIndexers();
Mocker.GetMock<HttpProvider>()
.Setup(h => h.DownloadString(It.IsAny<String>(), It.IsAny<NetworkCredential>()))
.Returns(File.ReadAllText(".\\Files\\Rss\\nzbx_recent.json"));
var parseResults = Mocker.Resolve<Nzbx>().FetchRss();
parseResults.Should().NotBeEmpty();
parseResults.Should().OnlyContain(s => s.Indexer == "nzbx");
parseResults.Should().OnlyContain(s => !String.IsNullOrEmpty(s.OriginalString));
parseResults.Should().OnlyContain(s => s.Age >= 0);
}
[Test]
public void nzbx_parse_search()
{
WithConfiguredIndexers();
Mocker.GetMock<HttpProvider>()
.Setup(h => h.DownloadString(It.IsAny<String>(), It.IsAny<NetworkCredential>()))
.Returns(File.ReadAllText(".\\Files\\Rss\\nzbx_search.json"));
var parseResults = Mocker.Resolve<Nzbx>().FetchEpisode("30 Rock", 1, 1);
parseResults.Should().NotBeEmpty();
parseResults.Should().OnlyContain(s => s.Indexer == "nzbx");
parseResults.Should().OnlyContain(s => !String.IsNullOrEmpty(s.OriginalString));
parseResults.Should().OnlyContain(s => s.Age >= 0);
}
} }
} }

@ -287,6 +287,12 @@
<Content Include="Files\JsonError.txt"> <Content Include="Files\JsonError.txt">
<CopyToOutputDirectory>Always</CopyToOutputDirectory> <CopyToOutputDirectory>Always</CopyToOutputDirectory>
</Content> </Content>
<None Include="Files\RSS\nzbx_search.json">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Include="Files\RSS\nzbx_recent.json">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<Content Include="Files\RSS\omgwtfnzbs.xml"> <Content Include="Files\RSS\omgwtfnzbs.xml">
<CopyToOutputDirectory>Always</CopyToOutputDirectory> <CopyToOutputDirectory>Always</CopyToOutputDirectory>
</Content> </Content>
@ -367,6 +373,12 @@
<SubType>Designer</SubType> <SubType>Designer</SubType>
<CopyToOutputDirectory>Always</CopyToOutputDirectory> <CopyToOutputDirectory>Always</CopyToOutputDirectory>
</Content> </Content>
<None Include="Files\RSS\SizeParsing\nzbx_recent.json">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Include="Files\RSS\SizeParsing\nzbx_search.json">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Include="Files\SceneMappings.json"> <None Include="Files\SceneMappings.json">
<CopyToOutputDirectory>Always</CopyToOutputDirectory> <CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None> </None>

@ -93,6 +93,7 @@ namespace NzbDrone.Core
Kernel.Bind<IndexerBase>().To<NzbIndex>(); Kernel.Bind<IndexerBase>().To<NzbIndex>();
Kernel.Bind<IndexerBase>().To<NzbClub>(); Kernel.Bind<IndexerBase>().To<NzbClub>();
Kernel.Bind<IndexerBase>().To<Omgwtfnzbs>(); Kernel.Bind<IndexerBase>().To<Omgwtfnzbs>();
Kernel.Bind<IndexerBase>().To<Nzbx>();
var indexers = Kernel.GetAll<IndexerBase>(); var indexers = Kernel.GetAll<IndexerBase>();
Kernel.Get<IndexerProvider>().InitializeIndexers(indexers.ToList()); Kernel.Get<IndexerProvider>().InitializeIndexers(indexers.ToList());

@ -0,0 +1,50 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
namespace NzbDrone.Core.Helpers.Converters
{
public class EpochDateTimeConverter : DateTimeConverterBase
{
public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer)
{
long ticks;
if (value is DateTime)
{
var epoch = new DateTime(1970, 1, 1);
var delta = ((DateTime)value) - epoch;
if (delta.TotalSeconds < 0)
{
throw new ArgumentOutOfRangeException("value",
"Unix epoc starts January 1st, 1970");
}
ticks = (long)delta.TotalSeconds;
}
else
{
throw new Exception("Expected date object value.");
}
writer.WriteValue(ticks);
}
public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
if (reader.TokenType != JsonToken.Integer)
{
throw new Exception(
String.Format("Unexpected token parsing date. Expected Integer, got {0}.",
reader.TokenType));
}
var ticks = (long)reader.Value;
var date = new DateTime(1970, 1, 1);
date = date.AddSeconds(ticks);
return date;
}
}
}

@ -0,0 +1,47 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace NzbDrone.Core.Model.Nzbx
{
public class NzbxRecentItem
{
//"ID": "571777",
//"name": "Cak4QCQG",
//"totalpart": "10",
//"groupID": "99",
//"size": "890190951",
//"postdate": "2012-12-20 18:14:13",
//"guid": "48714abb00a095e00fbcbe161253abf6",
//"fromname": "#cripples <masturb@ting.in.wheelchairs>",
//"completion": "100",
//"categoryID": "5050",
//"imdbID": null,
//"anidbID": null,
//"rageID": "-1",
//"comments": "0",
//"downloads": "3",
//"votes": {
// "upvotes": 0,
// "downvotes": 0
//}
public int Id { get; set; }
public string Name { get; set; }
public int TotalPart { get; set; }
public int GroupId { get; set; }
public long Size { get; set; }
public DateTime PostDate { get; set; }
public string Guid { get; set; }
public string FromName { get; set; }
public int Completion { get; set; }
public int CategoryId { get; set; }
public string ImdbId { get; set; }
public string AnidbId { get; set; }
public int RageId { get; set; }
public int Comments { get; set; }
public int Downloads { get; set; }
public NzbxVotesModel Votes { get; set; }
}
}

@ -0,0 +1,56 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Newtonsoft.Json;
using NzbDrone.Core.Helpers.Converters;
namespace NzbDrone.Core.Model.Nzbx
{
public class NzbxSearchItem
{
//"name": "30.Rock.S06E06E07.HDTV.XviD-LOL",
//"fromname": "teevee@4u.tv (teevee)",
//"size": 418067671,
//"groupid": 4,
//"categoryid": 5030,
//"totalpart": 36,
//"completion": 100,
//"rageid": "-1",
//"imdbid": "",
//"comments": "0",
//"guid": "97be14dbf1776eec4fb8f2bb835935c0",
//"adddate": 1355343562,
//"postdate": 1328839361,
//"downloads": "0",
//"votes": {
// "upvotes": 0,
// "downvotes": 0
//},
//"nzb": "https://nzbx.co/nzb?97be14dbf1776eec4fb8f2bb835935c0*|*30.Rock.S06E06E07.HDTV.XviD-LOL
public string Name { get; set; }
public int TotalPart { get; set; }
public int GroupId { get; set; }
public long Size { get; set; }
[JsonConverter(typeof(EpochDateTimeConverter))]
public DateTime AddDate { get; set; }
[JsonConverter(typeof(EpochDateTimeConverter))]
public DateTime PostDate { get; set; }
public string Guid { get; set; }
public string FromName { get; set; }
public int Completion { get; set; }
public int CategoryId { get; set; }
public string ImdbId { get; set; }
public int RageId { get; set; }
public int Comments { get; set; }
public int Downloads { get; set; }
public NzbxVotesModel Votes { get; set; }
public string Nzb { get; set; }
}
}

@ -0,0 +1,13 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace NzbDrone.Core.Model.Nzbx
{
public class NzbxVotesModel
{
public int Up { get; set; }
public int Down { get; set; }
}
}

@ -258,6 +258,7 @@
<Compile Include="Datastore\Migrations\SchemaInfo.cs" /> <Compile Include="Datastore\Migrations\SchemaInfo.cs" />
<Compile Include="Datastore\PetaPoco\EpisodeSeasonRelator.cs" /> <Compile Include="Datastore\PetaPoco\EpisodeSeasonRelator.cs" />
<Compile Include="Fluent.cs" /> <Compile Include="Fluent.cs" />
<Compile Include="Helpers\Converters\EpochDateTimeConverter.cs" />
<Compile Include="Helpers\SabnzbdQueueTimeConverter.cs" /> <Compile Include="Helpers\SabnzbdQueueTimeConverter.cs" />
<Compile Include="Helpers\EpisodeSortingHelper.cs" /> <Compile Include="Helpers\EpisodeSortingHelper.cs" />
<Compile Include="Helpers\SortHelper.cs" /> <Compile Include="Helpers\SortHelper.cs" />
@ -277,6 +278,9 @@
<Compile Include="Model\AtomicParsleyTitleType.cs" /> <Compile Include="Model\AtomicParsleyTitleType.cs" />
<Compile Include="Model\ConnectionInfoModel.cs" /> <Compile Include="Model\ConnectionInfoModel.cs" />
<Compile Include="Model\BacklogSettingType.cs" /> <Compile Include="Model\BacklogSettingType.cs" />
<Compile Include="Model\Nzbx\NzbxSearchItem.cs" />
<Compile Include="Model\Nzbx\NzbxRecentItem.cs" />
<Compile Include="Model\Nzbx\NzbxVotesModel.cs" />
<Compile Include="Model\PostDownloadStatusType.cs" /> <Compile Include="Model\PostDownloadStatusType.cs" />
<Compile Include="Model\JobQueueItem.cs" /> <Compile Include="Model\JobQueueItem.cs" />
<Compile Include="Model\LanguageType.cs" /> <Compile Include="Model\LanguageType.cs" />
@ -312,6 +316,7 @@
<Compile Include="Providers\DecisionEngine\AllowedReleaseGroupSpecification.cs" /> <Compile Include="Providers\DecisionEngine\AllowedReleaseGroupSpecification.cs" />
<Compile Include="Providers\DecisionEngine\CustomStartDateSpecification.cs" /> <Compile Include="Providers\DecisionEngine\CustomStartDateSpecification.cs" />
<Compile Include="Providers\DownloadClients\PneumaticProvider.cs" /> <Compile Include="Providers\DownloadClients\PneumaticProvider.cs" />
<Compile Include="Providers\Indexer\Nzbx.cs" />
<Compile Include="Providers\Indexer\NzbClub.cs" /> <Compile Include="Providers\Indexer\NzbClub.cs" />
<Compile Include="Providers\Indexer\NzbIndex.cs" /> <Compile Include="Providers\Indexer\NzbIndex.cs" />
<Compile Include="Providers\Indexer\FileSharingTalk.cs" /> <Compile Include="Providers\Indexer\FileSharingTalk.cs" />

@ -15,7 +15,7 @@ namespace NzbDrone.Core.Providers.Indexer
public abstract class IndexerBase public abstract class IndexerBase
{ {
protected readonly Logger _logger; protected readonly Logger _logger;
private readonly HttpProvider _httpProvider; protected readonly HttpProvider _httpProvider;
protected readonly ConfigProvider _configProvider; protected readonly ConfigProvider _configProvider;
protected static readonly Regex TitleSearchRegex = new Regex(@"[\W]", RegexOptions.IgnoreCase | RegexOptions.Compiled); protected static readonly Regex TitleSearchRegex = new Regex(@"[\W]", RegexOptions.IgnoreCase | RegexOptions.Compiled);
@ -170,7 +170,7 @@ namespace NzbDrone.Core.Providers.Indexer
} }
private List<EpisodeParseResult> Fetch(IEnumerable<string> urls) protected virtual List<EpisodeParseResult> Fetch(IEnumerable<string> urls)
{ {
var result = new List<EpisodeParseResult>(); var result = new List<EpisodeParseResult>();

@ -0,0 +1,232 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.ServiceModel.Syndication;
using System.Text;
using System.Text.RegularExpressions;
using Newtonsoft.Json;
using Ninject;
using NzbDrone.Common;
using NzbDrone.Core.Model;
using NzbDrone.Core.Model.Nzbx;
using NzbDrone.Core.Providers.Core;
namespace NzbDrone.Core.Providers.Indexer
{
class Nzbx : IndexerBase
{
[Inject]
public Nzbx(HttpProvider httpProvider, ConfigProvider configProvider)
: base(httpProvider, configProvider)
{
}
public override string Name
{
get { return "nzbx"; }
}
protected override string[] Urls
{
get
{
return new string[]
{
String.Format("https://nzbx.co/api/recent?category=tv")
};
}
}
public override bool IsConfigured
{
get
{
return true;
//return !string.IsNullOrWhiteSpace(_configProvider.OmgwtfnzbsUsername) &&
// !string.IsNullOrWhiteSpace(_configProvider.OmgwtfnzbsApiKey);
}
}
protected override IList<string> GetEpisodeSearchUrls(string seriesTitle, int seasonNumber, int episodeNumber)
{
var searchUrls = new List<String>();
searchUrls.Add(String.Format("https://nzbx.co/api/search?q={0}+S{1:00}E{2:00}", seriesTitle, seasonNumber, episodeNumber));
return searchUrls;
}
protected override IList<string> GetDailyEpisodeSearchUrls(string seriesTitle, DateTime date)
{
var searchUrls = new List<String>();
searchUrls.Add(String.Format("https://nzbx.co/api/search?q={0}+{1:yyyy MM dd}", seriesTitle, date));
return searchUrls;
}
protected override IList<string> GetSeasonSearchUrls(string seriesTitle, int seasonNumber)
{
var searchUrls = new List<String>();
searchUrls.Add(String.Format("https://nzbx.co/api/search?q={0}+S{1:00}", seriesTitle, seasonNumber));
return searchUrls;
}
protected override IList<string> GetPartialSeasonSearchUrls(string seriesTitle, int seasonNumber, int episodeWildcard)
{
var searchUrls = new List<String>();
searchUrls.Add(String.Format("https://nzbx.co/api/search?q={0}+S{1:00}E{2}", seriesTitle, seasonNumber, episodeWildcard));
return searchUrls;
}
protected override string NzbDownloadUrl(SyndicationItem item)
{
throw new NotImplementedException();
}
protected override string NzbInfoUrl(SyndicationItem item)
{
throw new NotImplementedException();
}
protected override EpisodeParseResult CustomParser(SyndicationItem item, EpisodeParseResult currentResult)
{
throw new NotImplementedException();
}
public override IList<EpisodeParseResult> FetchRss()
{
_logger.Debug("Fetching feeds from " + Name);
var result = new List<EpisodeParseResult>();
if (!IsConfigured)
{
_logger.Warn("Indexer '{0}' isn't configured correctly. please reconfigure the indexer in settings page.", Name);
return result;
}
foreach (var url in Urls)
{
var response = Download(url);
if (response != null)
{
var feed = JsonConvert.DeserializeObject<List<NzbxRecentItem>>(response);
foreach (var item in feed)
{
try
{
var episodeParseResult = Parser.ParseTitle(item.Name);
if (episodeParseResult != null)
{
episodeParseResult.Age = DateTime.Now.Date.Subtract(item.PostDate).Days;
episodeParseResult.OriginalString = item.Name;
episodeParseResult.SceneSource = true;
episodeParseResult.NzbUrl = String.Format("http://nzbx.co/nzb?{0}", item.Guid);
episodeParseResult.Indexer = Name;
episodeParseResult.Size = item.Size;
result.Add(episodeParseResult);
}
}
catch (Exception itemEx)
{
itemEx.Data.Add("FeedUrl", url);
itemEx.Data.Add("Item", item.Name);
_logger.ErrorException("An error occurred while processing feed item", itemEx);
}
}
}
}
_logger.Debug("Finished processing feeds from " + Name);
return result;
}
protected override List<EpisodeParseResult> Fetch(IEnumerable<string> urls)
{
var result = new List<EpisodeParseResult>();
if (!IsConfigured)
{
_logger.Warn("Indexer '{0}' isn't configured correctly. please reconfigure the indexer in settings page.", Name);
return result;
}
foreach (var url in urls)
{
var response = Download(url);
if(response != null)
{
var feed = JsonConvert.DeserializeObject<List<NzbxSearchItem>>(response);
foreach (var item in feed)
{
try
{
var episodeParseResult = Parser.ParseTitle(item.Name);
if (episodeParseResult != null)
{
episodeParseResult.Age = DateTime.Now.Date.Subtract(item.PostDate).Days;
episodeParseResult.OriginalString = item.Name;
episodeParseResult.SceneSource = true;
episodeParseResult.NzbUrl = item.Nzb;
episodeParseResult.Indexer = Name;
episodeParseResult.Size = item.Size;
result.Add(episodeParseResult);
}
}
catch (Exception itemEx)
{
itemEx.Data.Add("FeedUrl", url);
itemEx.Data.Add("Item", item.Name);
_logger.ErrorException("An error occurred while processing feed item", itemEx);
}
}
}
}
return result;
}
private string Download(string url)
{
try
{
_logger.Trace("Downloading RSS " + url);
return _httpProvider.DownloadString(url, Credentials);
}
catch (WebException webException)
{
if (webException.Message.Contains("503"))
{
_logger.Warn("{0} server is currently unavailable.{1} {2}", Name, url, webException.Message);
}
else
{
webException.Data.Add("FeedUrl", url);
_logger.ErrorException("An error occurred while processing feed. " + url, webException);
}
}
catch (Exception feedEx)
{
feedEx.Data.Add("FeedUrl", url);
_logger.ErrorException("An error occurred while processing feed. " + url, feedEx);
}
return null;
}
}
}
Loading…
Cancel
Save