Cleanup duplicate episode metadata and images

pull/6/head
Mark McDowall 11 years ago
parent eeacf9ef9f
commit 344e6b0f9c

@ -66,5 +66,113 @@ namespace NzbDrone.Core.Test.Housekeeping.Housekeepers
Subject.Clean();
AllStoredModels.Count.Should().Be(1);
}
[Test]
public void should_not_delete_metadata_files_when_they_are_for_the_same_episode_but_different_consumers()
{
var files = Builder<MetadataFile>.CreateListOfSize(2)
.All()
.With(m => m.Type = MetadataType.EpisodeMetadata)
.With(m => m.EpisodeFileId = 1)
.BuildListOfNew();
Db.InsertMany(files);
Subject.Clean();
AllStoredModels.Count.Should().Be(files.Count);
}
[Test]
public void should_not_delete_metadata_files_for_different_episode()
{
var files = Builder<MetadataFile>.CreateListOfSize(2)
.All()
.With(m => m.Type = MetadataType.EpisodeMetadata)
.With(m => m.Consumer = "XbmcMetadata")
.BuildListOfNew();
Db.InsertMany(files);
Subject.Clean();
AllStoredModels.Count.Should().Be(files.Count);
}
[Test]
public void should_delete_metadata_files_when_they_are_for_the_same_episode_and_consumer()
{
var files = Builder<MetadataFile>.CreateListOfSize(2)
.All()
.With(m => m.Type = MetadataType.EpisodeMetadata)
.With(m => m.EpisodeFileId = 1)
.With(m => m.Consumer = "XbmcMetadata")
.BuildListOfNew();
Db.InsertMany(files);
Subject.Clean();
AllStoredModels.Count.Should().Be(1);
}
[Test]
public void should_not_delete_metadata_files_when_there_is_only_one_for_that_episode_and_consumer()
{
var file = Builder<MetadataFile>.CreateNew()
.BuildNew();
Db.Insert(file);
Subject.Clean();
AllStoredModels.Count.Should().Be(1);
}
[Test]
public void should_not_delete_image_when_they_are_for_the_same_episode_but_different_consumers()
{
var files = Builder<MetadataFile>.CreateListOfSize(2)
.All()
.With(m => m.Type = MetadataType.EpisodeImage)
.With(m => m.EpisodeFileId = 1)
.BuildListOfNew();
Db.InsertMany(files);
Subject.Clean();
AllStoredModels.Count.Should().Be(files.Count);
}
[Test]
public void should_not_delete_image_for_different_episode()
{
var files = Builder<MetadataFile>.CreateListOfSize(2)
.All()
.With(m => m.Type = MetadataType.EpisodeImage)
.With(m => m.Consumer = "XbmcMetadata")
.BuildListOfNew();
Db.InsertMany(files);
Subject.Clean();
AllStoredModels.Count.Should().Be(files.Count);
}
[Test]
public void should_delete_image_when_they_are_for_the_same_episode_and_consumer()
{
var files = Builder<MetadataFile>.CreateListOfSize(2)
.All()
.With(m => m.Type = MetadataType.EpisodeImage)
.With(m => m.EpisodeFileId = 1)
.With(m => m.Consumer = "XbmcMetadata")
.BuildListOfNew();
Db.InsertMany(files);
Subject.Clean();
AllStoredModels.Count.Should().Be(1);
}
[Test]
public void should_not_delete_image_when_there_is_only_one_for_that_episode_and_consumer()
{
var file = Builder<MetadataFile>.CreateNew()
.BuildNew();
Db.Insert(file);
Subject.Clean();
AllStoredModels.Count.Should().Be(1);
}
}
}

@ -19,6 +19,8 @@ namespace NzbDrone.Core.Housekeeping.Housekeepers
_logger.Debug("Running cleanup of duplicate metadata files");
DeleteDuplicateSeriesMetadata();
DeleteDuplicateEpisodeMetadata();
DeleteDuplicateEpisodeImages();
}
private void DeleteDuplicateSeriesMetadata()
@ -33,5 +35,31 @@ namespace NzbDrone.Core.Housekeeping.Housekeepers
HAVING COUNT(SeriesId) > 1
)");
}
private void DeleteDuplicateEpisodeMetadata()
{
var mapper = _database.GetDataMapper();
mapper.ExecuteNonQuery(@"DELETE FROM MetadataFiles
WHERE Id IN (
SELECT Id FROM MetadataFiles
WHERE Type = 2
GROUP BY EpisodeFileId, Consumer
HAVING COUNT(EpisodeFileId) > 1
)");
}
private void DeleteDuplicateEpisodeImages()
{
var mapper = _database.GetDataMapper();
mapper.ExecuteNonQuery(@"DELETE FROM MetadataFiles
WHERE Id IN (
SELECT Id FROM MetadataFiles
WHERE Type = 5
GROUP BY EpisodeFileId, Consumer
HAVING COUNT(EpisodeFileId) > 1
)");
}
}
}

Loading…
Cancel
Save