removed indexer Fanzub - site shutdown

pull/2/head
Devin Buhl 8 years ago
parent cfe55d00ae
commit cd68eea790

@ -1,51 +0,0 @@
using System;
using System.Linq;
using FluentAssertions;
using Moq;
using NUnit.Framework;
using NzbDrone.Common.Http;
using NzbDrone.Core.Indexers;
using NzbDrone.Core.Indexers.Fanzub;
using NzbDrone.Core.Test.Framework;
namespace NzbDrone.Core.Test.IndexerTests.FanzubTests
{
[TestFixture]
public class FanzubFixture : CoreTest<Fanzub>
{
[SetUp]
public void Setup()
{
Subject.Definition = new IndexerDefinition()
{
Name = "Fanzub",
Settings = new FanzubSettings()
};
}
[Test]
public void should_parse_recent_feed_from_fanzub()
{
var recentFeed = ReadAllText(@"Files/Indexers/Fanzub/fanzub.xml");
Mocker.GetMock<IHttpClient>()
.Setup(o => o.Execute(It.Is<HttpRequest>(v => v.Method == HttpMethod.GET)))
.Returns<HttpRequest>(r => new HttpResponse(r, new HttpHeader(), recentFeed));
var releases = Subject.FetchRecent();
releases.Should().HaveCount(3);
var releaseInfo = releases.First();
releaseInfo.Title.Should().Be("[Vivid] Hanayamata - 10 [A33D6606]");
releaseInfo.DownloadProtocol.Should().Be(DownloadProtocol.Usenet);
releaseInfo.DownloadUrl.Should().Be("http://fanzub.com/nzb/296464/Vivid%20Hanayamata%20-%2010.nzb");
releaseInfo.InfoUrl.Should().BeNullOrEmpty();
releaseInfo.CommentUrl.Should().BeNullOrEmpty();
releaseInfo.Indexer.Should().Be(Subject.Definition.Name);
releaseInfo.PublishDate.Should().Be(DateTime.Parse("2014/09/13 12:56:53"));
releaseInfo.Size.Should().Be(556246858);
}
}
}

@ -249,7 +249,6 @@
<Compile Include="IndexerTests\NewznabTests\NewznabFixture.cs" />
<Compile Include="IndexerTests\NewznabTests\NewznabRequestGeneratorFixture.cs" />
<Compile Include="IndexerTests\NewznabTests\NewznabSettingFixture.cs" />
<Compile Include="IndexerTests\FanzubTests\FanzubFixture.cs" />
<Compile Include="IndexerTests\OmgwtfnzbsTests\OmgwtfnzbsFixture.cs" />
<Compile Include="IndexerTests\SeasonSearchFixture.cs" />
<Compile Include="IndexerTests\TestIndexer.cs" />

@ -0,0 +1,14 @@
using FluentMigrator;
using NzbDrone.Core.Datastore.Migration.Framework;
namespace NzbDrone.Core.Datastore.Migration
{
[Migration(114)]
public class remove_fanzub : NzbDroneMigrationBase
{
protected override void MainDbUpgrade()
{
Delete.FromTable("Indexers").Row(new { Implementation = "Fanzub" });
}
}
}

@ -1,30 +0,0 @@
using NLog;
using NzbDrone.Common.Http;
using NzbDrone.Core.Configuration;
using NzbDrone.Core.Parser;
namespace NzbDrone.Core.Indexers.Fanzub
{
public class Fanzub : HttpIndexerBase<FanzubSettings>
{
public override string Name => "Fanzub";
public override DownloadProtocol Protocol => DownloadProtocol.Usenet;
public Fanzub(IHttpClient httpClient, IIndexerStatusService indexerStatusService, IConfigService configService, IParsingService parsingService, Logger logger)
: base(httpClient, indexerStatusService, configService, parsingService, logger)
{
}
public override IIndexerRequestGenerator GetRequestGenerator()
{
return new FanzubRequestGenerator() { Settings = Settings };
}
public override IParseIndexerResponse GetParser()
{
return new RssParser() { UseEnclosureUrl = true, UseEnclosureLength = true };
}
}
}

@ -1,94 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using NzbDrone.Common.Extensions;
using NzbDrone.Common.Http;
using NzbDrone.Core.IndexerSearch.Definitions;
namespace NzbDrone.Core.Indexers.Fanzub
{
public class FanzubRequestGenerator : IIndexerRequestGenerator
{
private static readonly Regex RemoveCharactersRegex = new Regex(@"[!?`]", RegexOptions.Compiled);
public FanzubSettings Settings { get; set; }
public int PageSize { get; set; }
public FanzubRequestGenerator()
{
PageSize = 100;
}
public virtual IndexerPageableRequestChain GetRecentRequests()
{
var pageableRequests = new IndexerPageableRequestChain();
pageableRequests.Add(GetPagedRequests(null));
return pageableRequests;
}
public virtual IndexerPageableRequestChain GetSearchRequests(SingleEpisodeSearchCriteria searchCriteria)
{
return new IndexerPageableRequestChain();
}
public virtual IndexerPageableRequestChain GetSearchRequests(SeasonSearchCriteria searchCriteria)
{
return new IndexerPageableRequestChain();
}
public virtual IndexerPageableRequestChain GetSearchRequests(DailyEpisodeSearchCriteria searchCriteria)
{
return new IndexerPageableRequestChain();
}
public virtual IndexerPageableRequestChain GetSearchRequests(AnimeEpisodeSearchCriteria searchCriteria)
{
var pageableRequests = new IndexerPageableRequestChain();
var searchTitles = searchCriteria.QueryTitles.SelectMany(v => GetTitleSearchStrings(v, searchCriteria.AbsoluteEpisodeNumber)).ToList();
pageableRequests.Add(GetPagedRequests(string.Join("|", searchTitles)));
return pageableRequests;
}
public virtual IndexerPageableRequestChain GetSearchRequests(SpecialEpisodeSearchCriteria searchCriteria)
{
return new IndexerPageableRequestChain();
}
private IEnumerable<IndexerRequest> GetPagedRequests(string query)
{
var url = new StringBuilder();
url.AppendFormat("{0}?cat=anime&max={1}", Settings.BaseUrl, PageSize);
if (query.IsNotNullOrWhiteSpace())
{
url.AppendFormat("&q={0}", query);
}
yield return new IndexerRequest(url.ToString(), HttpAccept.Rss);
}
private IEnumerable<string> GetTitleSearchStrings(string title, int absoluteEpisodeNumber)
{
var formats = new[] { "{0}%20{1:00}", "{0}%20-%20{1:00}" };
return formats.Select(s => "\"" + string.Format(s, CleanTitle(title), absoluteEpisodeNumber) + "\"");
}
private string CleanTitle(string title)
{
return RemoveCharactersRegex.Replace(title, "");
}
public IndexerPageableRequestChain GetSearchRequests(MovieSearchCriteria searchCriteria)
{
return new IndexerPageableRequestChain();
}
}
}

@ -1,33 +0,0 @@
using FluentValidation;
using NzbDrone.Core.Annotations;
using NzbDrone.Core.ThingiProvider;
using NzbDrone.Core.Validation;
namespace NzbDrone.Core.Indexers.Fanzub
{
public class FanzubSettingsValidator : AbstractValidator<FanzubSettings>
{
public FanzubSettingsValidator()
{
RuleFor(c => c.BaseUrl).ValidRootUrl();
}
}
public class FanzubSettings : IProviderConfig
{
private static readonly FanzubSettingsValidator Validator = new FanzubSettingsValidator();
public FanzubSettings()
{
BaseUrl = "http://fanzub.com/rss/";
}
[FieldDefinition(0, Label = "Rss URL", HelpText = "Enter to URL to an Fanzub compatible RSS feed")]
public string BaseUrl { get; set; }
public NzbDroneValidationResult Validate()
{
return new NzbDroneValidationResult(Validator.Validate(this));
}
}
}

@ -185,6 +185,7 @@
<Compile Include="Datastore\Migration\004_updated_history.cs" />
<Compile Include="Datastore\Migration\111_remove_bitmetv.cs" />
<Compile Include="Datastore\Migration\112_remove_torrentleech.cs" />
<Compile Include="Datastore\Migration\114_remove_fanzub.cs" />
<Compile Include="Datastore\Migration\113_remove_broadcasthenet.cs" />
<Compile Include="Datastore\Migration\108_update_schedule_interval.cs" />
<Compile Include="Datastore\Migration\107_fix_movie_files.cs" />
@ -582,9 +583,6 @@
<Compile Include="Indexers\Exceptions\RequestLimitReachedException.cs" />
<Compile Include="Indexers\Exceptions\UnsupportedFeedException.cs" />
<Compile Include="Indexers\EzrssTorrentRssParser.cs" />
<Compile Include="Indexers\Fanzub\Fanzub.cs" />
<Compile Include="Indexers\Fanzub\FanzubRequestGenerator.cs" />
<Compile Include="Indexers\Fanzub\FanzubSettings.cs" />
<Compile Include="Indexers\FetchAndParseRssService.cs" />
<Compile Include="Indexers\PassThePopcorn\PassThePopcorn.cs" />
<Compile Include="Indexers\PassThePopcorn\PassThePopcornApi.cs" />

Loading…
Cancel
Save