diff --git a/Libraries/Migrator.NET/Migrator.Framework.dll b/Libraries/Migrator.NET/Migrator.Framework.dll new file mode 100644 index 000000000..97ceaeea3 Binary files /dev/null and b/Libraries/Migrator.NET/Migrator.Framework.dll differ diff --git a/Libraries/Migrator.NET/Migrator.Framework.pdb b/Libraries/Migrator.NET/Migrator.Framework.pdb new file mode 100644 index 000000000..01826492f Binary files /dev/null and b/Libraries/Migrator.NET/Migrator.Framework.pdb differ diff --git a/Libraries/Migrator.NET/Migrator.Framework.xml b/Libraries/Migrator.NET/Migrator.Framework.xml new file mode 100644 index 000000000..41fba991d --- /dev/null +++ b/Libraries/Migrator.NET/Migrator.Framework.xml @@ -0,0 +1,914 @@ + + + + Migrator.Framework + + + + + A migration is a group of transformation applied to the database schema + (or sometimes data) to port the database from one version to another. + The Up() method must apply the modifications (eg.: create a table) + and the Down() method must revert, or rollback the modifications + (eg.: delete a table). + + Each migration must be decorated with the [Migration(0)] attribute. + Each migration number (0) must be unique, or else a + DuplicatedVersionException will be trown. + + + All migrations are executed inside a transaction. If an exception is + thrown, the transaction will be rolledback and transformations wont be + applied. + + + It is best to keep a limited number of transformation inside a migration + so you can easely move from one version of to another with fine grain + modifications. + You should give meaningful name to the migration class and prepend the + migration number to the filename so they keep ordered, eg.: + 002_CreateTableTest.cs. + + + Use the Database property to apply transformation and the + Logger property to output informations in the console (or other). + For more details on transformations see + ITransformationProvider. + + + + The following migration creates a new Customer table. + (File 003_AddCustomerTable.cs) + + [Migration(3)] + public class AddCustomerTable : Migration + { + public override void Up() + { + Database.AddTable("Customer", + new Column("Name", typeof(string), 50), + new Column("Address", typeof(string), 100) + ); + } + public override void Down() + { + Database.RemoveTable("Customer"); + } + } + + + + + + Defines tranformations to port the database to the current version. + + + + + This is run after the Up transaction has been committed + + + + + Defines transformations to revert things done in Up. + + + + + This is run after the Down transaction has been committed + + + + + This gets called once on the first migration object. + + + + + Represents the database. + . + + Migration.Framework.ITransformationProvider + + + + Defines tranformations to port the database to the current version. + + + + + This is run after the Up transaction has been committed + + + + + Defines transformations to revert things done in Up. + + + + + This is run after the Down transaction has been committed + + + + + This gets called once on the first migration object. + + + + + Represents the database. + . + + Migration.Framework.ITransformationProvider + + + + Log that we have started a migration + + Start list of versions + Final Version + + + + Log that we are migrating up + + Version we are migrating to + Migration name + + + + Log that we are migrating down + + Version we are migrating to + Migration name + + + + Inform that a migration corresponding to the number of + version is untraceable (not found?) and will be ignored. + + Version we couldnt find + + + + Log that we are rolling back to version + + + version + + + + + Log a Sql statement that changes the schema or content of the database as part of a migration + + + SELECT statements should not be logged using this method as they do not alter the data or schema of the + database. + + The Sql statement to log + + + + Log that we had an exception on a migration + + The version of the migration that caused the exception. + The name of the migration that caused the exception. + The exception itself + + + + Log that we had an exception on a migration + + An informative message to show to the user. + The exception itself + + + + Log that we have finished a migration + + List of versions with which we started + Final Version + + + + Log a message + + The format string ("{0}, blabla {1}"). + Parameters to apply to the format string. + + + + Log a Warning + + The format string ("{0}, blabla {1}"). + Parameters to apply to the format string. + + + + Log a Trace Message + + The format string ("{0}, blabla {1}"). + Parameters to apply to the format string. + + + + Base class for migration errors. + + + + + Return the plural of a word. + + The singular form + The plural form of + + + + Return the singular of a word. + + The plural form + The singular form of + + + + Capitalizes a word. + + The word to be capitalized. + capitalized. + + + + Convert a classname to something more readable. + ex.: CreateATable => Create a table + + + + + + + + + + + + + + + + Handles writing a message to the log medium (i.e. file, console) + + + + + Write this message + + + + + + + Write this message, as a line + + + + + + + Represents a table column properties. + + + + + Null is allowable + + + + + Null is not allowable + + + + + Identity column, autoinc + + + + + Unique Column + + + + + Indexed Column + + + + + Unsigned Column + + + + + Foreign Key + + + + + Primary Key + + + + + Primary key. Make the column a PrimaryKey and unsigned + + + + + A set of extension methods for the transformation provider to make it easier to + build many-to-many joining tables (takes care of adding the joining table and foreign + key constraints as necessary. + This functionality was useful when bootstrapping a number of projects a few years ago, but + now that most changes are brown-field I'm thinking of removing these methods as it's easier to maintain + code that creates the tables etc. directly within migration. + + + + + Describe a migration + + + + + Describe the migration + + The unique version of the migration. + + + + The version reflected by the migration + + + + + Set to true to ignore this migration. + + + + + The main interface to use in Migrations to make changes on a database schema. + + + + + Add a column to an existing table + + The name of the table that will get the new column + The name of the new column + The data type for the new columnd + The precision or size of the column + Properties that can be ORed together + The default value of the column if no value is given in a query + + + + Add a column to an existing table + + The name of the table that will get the new column + The name of the new column + The data type for the new columnd + + + + Add a column to an existing table + + The name of the table that will get the new column + The name of the new column + The data type for the new columnd + The precision or size of the column + + + + Add a column to an existing table + + The name of the table that will get the new column + The name of the new column + The data type for the new columnd + The precision or size of the column + Properties that can be ORed together + + + + Add a column to an existing table + + The name of the table that will get the new column + The name of the new column + The data type for the new columnd + Properties that can be ORed together + + + + Add a column to an existing table with the default column size. + + The name of the table that will get the new column + The name of the new column + The data type for the new columnd + The default value of the column if no value is given in a query + + + + Add a column to an existing table + + The name of the table that will get the new column + An instance of a Column with the specified properties + + + + Add a foreign key constraint + + The name of the foreign key. e.g. FK_TABLE_REF + The table that the foreign key will be created in (eg. Table.FK_id) + The columns that are the foreign keys (eg. FK_id) + The table that holds the primary keys (eg. Table.PK_id) + The columns that are the primary keys (eg. PK_id) + + + + Add a foreign key constraint + + The name of the foreign key. e.g. FK_TABLE_REF + The table that the foreign key will be created in (eg. Table.FK_id) + The columns that are the foreign keys (eg. FK_id) + The table that holds the primary keys (eg. Table.PK_id) + The columns that are the primary keys (eg. PK_id) + Constraint parameters + + + + Add a foreign key constraint + + + The name of the foreign key. e.g. FK_TABLE_REF + The table that the foreign key will be created in (eg. Table.FK_id) + The column that is the foreign key (eg. FK_id) + The table that holds the primary keys (eg. Table.PK_id) + The column that is the primary key (eg. PK_id) + + + + Add a foreign key constraint + + The name of the foreign key. e.g. FK_TABLE_REF + The table that the foreign key will be created in (eg. Table.FK_id) + The column that is the foreign key (eg. FK_id) + The table that holds the primary key (eg. Table.PK_id) + The column that is the primary key (eg. PK_id) + Constraint parameters + + + + Add a foreign key constraint when you don't care about the name of the constraint. + Warning: This will prevent you from dropping the constraint since you won't know the name. + + The table that the foreign key will be created in (eg. Table.FK_id) + The column that is the foreign key (eg. FK_id) + The table that holds the primary key (eg. Table.PK_id) + The column that is the primary key (eg. PK_id) + + + + Add a foreign key constraint when you don't care about the name of the constraint. + Warning: This will prevent you from dropping the constraint since you won't know the name. + + The table that the foreign key will be created in (eg. Table.FK_id) + The columns that are the foreign keys (eg. FK_id) + The table that holds the primary key (eg. Table.PK_id) + The column that is the primary key (eg. PK_id) + + + + Add a foreign key constraint when you don't care about the name of the constraint. + Warning: This will prevent you from dropping the constraint since you won't know the name. + + The table that the foreign key will be created in (eg. Table.FK_id) + The columns that are the foreign keys (eg. FK_id) + The table that holds the primary key (eg. Table.PK_id) + The columns that are the primary keys (eg. PK_id) + Constraint parameters + + + + Add a foreign key constraint when you don't care about the name of the constraint. + Warning: This will prevent you from dropping the constraint since you won't know the name. + + The table that the foreign key will be created in (eg. Table.FK_id) + The columns that are the foreign keys (eg. FK_id) + The table that holds the primary key (eg. Table.PK_id) + The column that is the primary key (eg. PK_id) + Constraint parameters + + + + Add a foreign key constraint when you don't care about the name of the constraint. + Warning: This will prevent you from dropping the constraint since you won't know the name. + + The current expectations are that there is a column named the same as the foreignTable present in + the table. This is subject to change because I think it's not a good convention. + + The table that the foreign key will be created in (eg. Table.FK_id) + The table that holds the primary key (eg. Table.PK_id) + + + + Add a foreign key constraint when you don't care about the name of the constraint. + Warning: This will prevent you from dropping the constraint since you won't know the name. + + The current expectations are that there is a column named the same as the foreignTable present in + the table. This is subject to change because I think it's not a good convention. + + The table that the foreign key will be created in (eg. Table.FK_id) + The table that holds the primary key (eg. Table.PK_id) + + + + + Add a primary key to a table + + The name of the primary key to add. + The name of the table that will get the primary key. + The name of the column or columns that are in the primary key. + + + + Add a constraint to a table + + The name of the constraint to add. + The name of the table that will get the constraint + The name of the column or columns that will get the constraint. + + + + Add a constraint to a table + + The name of the constraint to add. + The name of the table that will get the constraint + The check constraint definition. + + + + Add a table + + The name of the table to add. + The columns that are part of the table. + + + + Add a table + + The name of the table to add. + The name of the database engine to use. (MySQL) + The columns that are part of the table. + + + + Start a transction + + + + + Change the definition of an existing column. + + The name of the table that will get the new column + An instance of a Column with the specified properties and the name of an existing column + + + + Check to see if a column exists + + + + + + + + Commit the running transction + + + + + Check to see if a constraint exists + + The name of the constraint + The table that the constraint lives on. + + + + + Check to see if a primary key constraint exists on the table + + The name of the primary key + The table that the constraint lives on. + + + + + Execute an arbitrary SQL query + + The SQL to execute. + + + + + Execute an arbitrary SQL query + + The SQL to execute. + + + + + Execute an arbitrary SQL query + + The SQL to execute. + A single value that is returned. + + + + Get the information about the columns in a table + + The table name that you want the columns for. + + + + + Get information about a single column in a table + + The table name that you want the columns for. + The column name for which you want information. + + + + + Get the names of all of the tables + + The names of all the tables. + + + + Insert data into a table + + The table that will get the new data + The names of the columns + The values in the same order as the columns + + + + + Delete data from a table + + The table that will have the data deleted + The names of the columns used in a where clause + The values in the same order as the columns + + + + + Delete data from a table + + The table that will have the data deleted + The name of the column used in a where clause + The value for the where clause + + + + + Marks a Migration version number as having been applied + + The version number of the migration that was applied + + + + Marks a Migration version number as having been rolled back from the database + + The version number of the migration that was removed + + + + Remove an existing column from a table + + The name of the table to remove the column from + The column to remove + + + + Remove an existing foreign key constraint + + The table that contains the foreign key. + The name of the foreign key to remove + + + + Remove an existing constraint + + The table that contains the foreign key. + The name of the constraint to remove + + + + Remove an existing table + + The name of the table + + + + Rename an existing table + + The old name of the table + The new name of the table + + + + Rename an existing table + + The name of the table + The old name of the column + The new name of the column + + + + Rollback the currently running transaction. + + + + + Get values from a table + + The columns to select + The table to select from + The where clause to limit the selection + + + + + Get values from a table + + The columns to select + The table to select from + + + + + Get a single value from a table + + The columns to select + The table to select from + + + + + + Get a single value from a table + + The columns to select + The table to select from + + + + + Check if a table already exists + + The name of the table that you want to check on. + + + + + Update the values in a table + + The name of the table to update + The names of the columns. + The values for the columns in the same order as the names. + + + + + Update the values in a table + + The name of the table to update + The names of the columns. + The values for the columns in the same order as the names. + A where clause to limit the update + + + + + Get a command instance + + + + + + Execute a schema builder + + + + + + Quote a multiple column names, if required + + + + + + + Quaote column if required + + + + + + + Quote table name if required + + + + + + + Encodes a guid value as a string, suitable for inclusion in sql statement + + + + + + + Get this provider or a NoOp provider if you are not running in the context of 'provider'. + + + + + The list of Migrations currently applied to the database. + + + + + Logger used to log details of operations performed during migration + + + + + ILogger interface. + Implicit in this interface is that the logger will delegate actual + logging to the (s) that have been attached + + + + + Attach an + + + + + + Detach an + + + + + + Represents a table column. + + + + + Adds a Table to be created to the Schema + + Table name to be created + SchemaBuilder for chaining + + + + Reference an existing table. + + Table to reference + SchemaBuilder for chaining + + + + Reference an existing table. + + Table to reference + SchemaBuilder for chaining + + + + Adds a Column to be created + + Column name to be added + IColumnOptions to restrict chaining + + + + Text logger for the migration mediator + + + + diff --git a/Libraries/Migrator.NET/Migrator.Providers.dll b/Libraries/Migrator.NET/Migrator.Providers.dll new file mode 100644 index 000000000..85fe26cef Binary files /dev/null and b/Libraries/Migrator.NET/Migrator.Providers.dll differ diff --git a/Libraries/Migrator.NET/Migrator.Providers.pdb b/Libraries/Migrator.NET/Migrator.Providers.pdb new file mode 100644 index 000000000..a9136da55 Binary files /dev/null and b/Libraries/Migrator.NET/Migrator.Providers.pdb differ diff --git a/Libraries/Migrator.NET/Migrator.Providers.xml b/Libraries/Migrator.NET/Migrator.Providers.xml new file mode 100644 index 000000000..9b02d1e53 --- /dev/null +++ b/Libraries/Migrator.NET/Migrator.Providers.xml @@ -0,0 +1,389 @@ + + + + Migrator.Providers + + + + + Summary description for SQLiteTransformationProvider. + + + + + Base class for every transformation providers. + A 'tranformation' is an operation that modifies the database. + + + + + Add a new table + + Table name + Columns + + Adds the Test table with two columns: + + Database.AddTable("Test", + new Column("Id", typeof(int), ColumnProperty.PrimaryKey), + new Column("Title", typeof(string), 100) + ); + + + + + + Add a new table + + Table name + Columns + the database storage engine to use + + Adds the Test table with two columns: + + Database.AddTable("Test", "INNODB", + new Column("Id", typeof(int), ColumnProperty.PrimaryKey), + new Column("Title", typeof(string), 100) + ); + + + + + + Add a new column to an existing table. + + Table to which to add the column + Column name + Date type of the column + Max length of the column + Properties of the column, see ColumnProperty, + Default value + + + + + AddColumn(string, string, Type, int, ColumnProperty, object) + + + + + + + AddColumn(string, string, Type, int, ColumnProperty, object) + + + + + + + AddColumn(string, string, Type, int, ColumnProperty, object) + + + + + + + AddColumn(string, string, Type, int, ColumnProperty, object) + + + + + + Append a primary key to a table. + + Constraint name + Table name + Primary column names + + + + Guesses the name of the foreign key and add it + + + + + Guesses the name of the foreign key and add it + + + + + Guesses the name of the foreign key and add it + + + + + Guesses the name of the foreign key and add it + + + + + Append a foreign key (relation) between two tables. + tables. + + Constraint name + Table name containing the primary key + Primary key column name + Foreign table name + Foreign column name + + + + + AddForeignKey(string, string, string, string, string) + + + + + + Determines if a constraint exists. + + Constraint name + Table owning the constraint + true if the constraint exists. + + + + Execute an SQL query returning results. + + The SQL command. + A data iterator, IDataReader. + + + + Starts a transaction. Called by the migration mediator. + + + + + Rollback the current migration. Called by the migration mediator. + + + + + Commit the current transaction. Called by the migrations mediator. + + + + + Marks a Migration version number as having been applied + + The version number of the migration that was applied + + + + Marks a Migration version number as having been rolled back from the database + + The version number of the migration that was removed + + + + Returns the event logger + + + + + The list of Migrations currently applied to the database. + + + + + Turn something like 'columnName INTEGER NOT NULL' into just 'columnName' + + + + + Name is the first value before the space. + + + + + + + No Op (Null Object Pattern) implementation of the ITransformationProvider + + + + + Defines the implementations specific details for a particular database. + + + + + Subclasses register a typename for the given type code and maximum + column length. $l in the type name will be replaced by the column + length (if appropriate) + + The typecode + Maximum length of database type + The database type name + + + + Suclasses register a typename for the given type code. $l in the + typename will be replaced by the column length (if appropriate). + + The typecode + The database type name + + + + Get the name of the database type associated with the given + + The DbType + The database type name used by ddl. + + + + Get the name of the database type associated with the given + + The DbType + The database type name used by ddl. + + + + + Get the name of the database type associated with the given + + The DbType + The database type name used by ddl. + + + + + + + Subclasses register which DbTypes are unsigned-compatible (ie, available in signed and unsigned variants) + + + + + + Determine if a particular database type has an unsigned variant + + The DbType + True if the database type has an unsigned variant, otherwise false + + + + Migration transformations provider for Microsoft SQL Server. + + + + + Migration transformations provider for Microsoft SQL Server. + + + + + Migration transformations provider for PostgreSql (using NPGSql .Net driver) + + + + + This is basically a just a helper base class + per-database implementors may want to override ColumnSql + + + + The SQL type + + + The name of the column + + + + the type of the column + + + + + Sql if This column is Indexed + + + + + Sql if this column has a default value + + + + + The sql for this column, override in database-specific implementation classes + + + + + This class maps a DbType to names. + + + Associations may be marked with a capacity. Calling the Get() + method with a type and actual size n will return the associated + name with smallest capacity >= n, if available and an unmarked + default type otherwise. + Eg, setting + + Names.Put(DbType, "TEXT" ); + Names.Put(DbType, 255, "VARCHAR($l)" ); + Names.Put(DbType, 65534, "LONGVARCHAR($l)" ); + + will give you back the following: + + Names.Get(DbType) // --> "TEXT" (default) + Names.Get(DbType,100) // --> "VARCHAR(100)" (100 is in [0:255]) + Names.Get(DbType,1000) // --> "LONGVARCHAR(1000)" (100 is in [256:65534]) + Names.Get(DbType,100000) // --> "TEXT" (default) + + On the other hand, simply putting + + Names.Put(DbType, "VARCHAR($l)" ); + + would result in + + Names.Get(DbType) // --> "VARCHAR($l)" (will cause trouble) + Names.Get(DbType,100) // --> "VARCHAR(100)" + Names.Get(DbType,1000) // --> "VARCHAR(1000)" + Names.Get(DbType,10000) // --> "VARCHAR(10000)" + + + + + + Get default type name for specified type + + the type key + the default type name associated with the specified key + + + + Get the type name specified type and size + + the type key + the SQL length + the SQL scale + the SQL precision + + The associated name with smallest capacity >= size if available and the + default type name otherwise + + + + + Set a type name for specified type key and capacity + + the type key + the (maximum) type size/length + The associated name + + + + + + + + + + + Summary description for MySqlTransformationProvider. + + + + diff --git a/Libraries/Migrator.NET/Migrator.dll b/Libraries/Migrator.NET/Migrator.dll new file mode 100644 index 000000000..fbcf23e2a Binary files /dev/null and b/Libraries/Migrator.NET/Migrator.dll differ diff --git a/Libraries/Migrator.NET/Migrator.pdb b/Libraries/Migrator.NET/Migrator.pdb new file mode 100644 index 000000000..832d105b1 Binary files /dev/null and b/Libraries/Migrator.NET/Migrator.pdb differ diff --git a/NzbDrone.Core.Test/DiskScanProviderTest.cs b/NzbDrone.Core.Test/DiskScanProviderTest.cs index b33778588..386766810 100644 --- a/NzbDrone.Core.Test/DiskScanProviderTest.cs +++ b/NzbDrone.Core.Test/DiskScanProviderTest.cs @@ -5,6 +5,7 @@ using FizzWare.NBuilder; using FluentAssertions; using Moq; using NUnit.Framework; +using NzbDrone.Core.Model; using NzbDrone.Core.Providers; using NzbDrone.Core.Providers.Core; using NzbDrone.Core.Repository; @@ -18,122 +19,78 @@ namespace NzbDrone.Core.Test public class DiskScanProviderTest : TestBase { [Test] - public void import_new_file() + public void import_new_file_should_succeed() { - //Arrange - ///////////////////////////////////////// + const string newFile = @"WEEDS.S03E01.DUAL.dvd.HELLYWOOD.avi"; - //Constants - const string fileName = @"WEEDS.S03E01.DUAL.BDRip.XviD.AC3.-HELLYWOOD.avi"; - const int seasonNumber = 3; - const int episodeNumner = 1; - const int size = 12345; - - //Fakes var fakeSeries = Builder.CreateNew().Build(); - var fakeEpisode = Builder.CreateNew() - .With(c => c.SeriesId = fakeSeries.SeriesId) - .With(c => c.SeasonNumber = seasonNumber) - .Build(); + var fakeEpisode = Builder.CreateNew().Build(); //Mocks var mocker = new AutoMoqer(); mocker.GetMock() - .Setup(e => e.GetSize(fileName)).Returns(12345).Verifiable(); - - var database = mocker.GetMock(MockBehavior.Strict); - database.Setup(r => r.Exists(It.IsAny(), It.IsAny())).Returns(false).Verifiable(); - database.Setup(r => r.Insert(It.IsAny())).Returns(1).Verifiable(); + .Setup(e => e.GetSize(newFile)).Returns(12345).Verifiable(); + mocker.GetMock() + .Setup(p => p.Exists(It.IsAny())) + .Returns(false); mocker.GetMock() - .Setup(e => e.GetEpisode(fakeSeries.SeriesId, seasonNumber, episodeNumner)).Returns(fakeEpisode); + .Setup(e => e.GetEpisodesByParseResult(It.IsAny(), false)).Returns(new List { fakeEpisode }); //Act - var result = mocker.Resolve().ImportFile(fakeSeries, fileName); + var result = mocker.Resolve().ImportFile(fakeSeries, newFile); //Assert - Assert.IsNotNull(result); - mocker.GetMock().Verify(r => r.Insert(result), Times.Once()); - mocker.VerifyAllMocks(); + VerifyFileImport(result, mocker, fakeEpisode, 12345); - result.SeasonNumber.Should().Be(fakeEpisode.SeasonNumber); - - Assert.AreEqual(fakeEpisode.SeriesId, result.SeriesId); - Assert.AreEqual(QualityTypes.DVD, result.Quality); - Assert.AreEqual(Parser.NormalizePath(fileName), result.Path); - Assert.AreEqual(size, result.Size); - Assert.AreEqual(false, result.Proper); - Assert.AreNotEqual(new DateTime(), result.DateAdded); } - [TestCase(QualityTypes.SDTV, true)] + [TestCase(QualityTypes.SDTV, false)] [TestCase(QualityTypes.DVD, true)] [TestCase(QualityTypes.HDTV, false)] - public void import_new_file_with_better_same_quality(QualityTypes currentFileQuality, bool currentFileProper) + public void import_new_file_with_better_same_quality_should_succeed(QualityTypes currentFileQuality, bool currentFileProper) { - const string newFile = @"WEEDS.S03E01.DUAL.1080p.-HELLYWOOD.mkv"; - const int seasonNumber = 3; - const int episodeNumner = 1; + const string newFile = @"WEEDS.S03E01.DUAL.1080p.HELLYWOOD.mkv"; const int size = 12345; //Fakes var fakeSeries = Builder.CreateNew().Build(); var fakeEpisode = Builder.CreateNew() - .With(c => c.SeriesId = fakeSeries.SeriesId) - .With(c => c.SeasonNumber = seasonNumber) .With(e => e.EpisodeFile = Builder.CreateNew() .With(g => g.Quality = (QualityTypes)currentFileQuality) .And(g => g.Proper = currentFileProper).Build() - ) - .Build(); + ).Build(); - //Mocks var mocker = new AutoMoqer(); mocker.GetMock() .Setup(e => e.GetSize(newFile)).Returns(12345).Verifiable(); - var database = mocker.GetMock(MockBehavior.Strict); - database.Setup(r => r.Exists(It.IsAny(), It.IsAny())).Returns(false).Verifiable(); - database.Setup(r => r.Insert(It.IsAny())).Returns(1).Verifiable(); mocker.GetMock() - .Setup(e => e.GetEpisode(fakeSeries.SeriesId, seasonNumber, episodeNumner)).Returns(fakeEpisode); + .Setup(e => e.GetEpisodesByParseResult(It.IsAny(), false)).Returns(new List { fakeEpisode }); //Act var result = mocker.Resolve().ImportFile(fakeSeries, newFile); //Assert - Assert.IsNotNull(result); - mocker.GetMock().Verify(r => r.Insert(result), Times.Once()); - mocker.VerifyAllMocks(); + VerifyFileImport(result, mocker, fakeEpisode, size); + } - result.SeasonNumber.Should().Be(fakeEpisode.SeasonNumber); - Assert.AreEqual(fakeEpisode.SeriesId, result.SeriesId); - Assert.AreEqual(QualityTypes.HDTV, result.Quality); - Assert.AreEqual(Parser.NormalizePath(newFile), result.Path); - Assert.AreEqual(size, result.Size); - Assert.AreEqual(false, result.Proper); - Assert.AreNotEqual(new DateTime(), result.DateAdded); - } [TestCase("WEEDS.S03E01.DUAL.DVD.XviD.AC3.-HELLYWOOD.avi")] [TestCase("WEEDS.S03E01.DUAL.SDTV.XviD.AC3.-HELLYWOOD.avi")] - public void import_new_file_skip_if_episode_has_same_or_better_quality(string fileName) + public void import_new_file_episode_has_same_or_better_quality_should_skip(string fileName) { - const int seasonNumber = 3; - const int episodeNumner = 1; //Fakes var fakeSeries = Builder.CreateNew().Build(); var fakeEpisode = Builder.CreateNew() - .With(c => c.SeriesId = fakeSeries.SeriesId) - .With(c => c.SeasonNumber = seasonNumber) .With(c => c.EpisodeFile = Builder.CreateNew() .With(e => e.Quality = QualityTypes.Bluray720p).Build() ) @@ -145,116 +102,34 @@ namespace NzbDrone.Core.Test mocker.GetMock() .Setup(e => e.GetSize(fileName)).Returns(12345).Verifiable(); - var database = mocker.GetMock(MockBehavior.Strict); - database.Setup(r => r.Exists(It.IsAny(), It.IsAny())).Returns(false).Verifiable(); + mocker.GetMock() + .Setup(p => p.Exists(It.IsAny())) + .Returns(false); mocker.GetMock() - .Setup(e => e.GetEpisode(fakeSeries.SeriesId, seasonNumber, episodeNumner)).Returns(fakeEpisode); + .Setup(e => e.GetEpisodesByParseResult(It.IsAny(), false)).Returns(new List { fakeEpisode }); //Act var result = mocker.Resolve().ImportFile(fakeSeries, fileName); //Assert - result.Should().BeNull(); - mocker.GetMock().Verify(r => r.Insert(result), Times.Never()); - mocker.VerifyAllMocks(); + VerifySkipImport(result, mocker); } [Test] - public void import_new_daily_file() - { - //Arrange - ///////////////////////////////////////// - - //Constants - const string fileName = @"2011.01.10 - Denis Leary - HD TV.mkv"; - var airDate = new DateTime(2011, 01, 10); - const int size = 12345; - //Fakes - var fakeSeries = Builder.CreateNew().Build(); - var fakeEpisode = Builder.CreateNew().With(c => c.SeriesId = fakeSeries.SeriesId).Build(); - - //Mocks - var mocker = new AutoMoqer(); - - var database = mocker.GetMock(MockBehavior.Strict); - database.Setup(r => r.Exists(It.IsAny(), It.IsAny())).Returns(false).Verifiable(); - database.Setup(r => r.Insert(It.IsAny())).Returns(1).Verifiable(); - - mocker.GetMock() - .Setup(e => e.GetEpisode(fakeSeries.SeriesId, airDate)).Returns(fakeEpisode). - Verifiable(); - - mocker.GetMock() - .Setup(e => e.GetSize(fileName)).Returns(size).Verifiable(); - - //Act - var result = mocker.Resolve().ImportFile(fakeSeries, fileName); - - //Assert - Assert.IsNotNull(result); - mocker.GetMock().VerifyAll(); - mocker.GetMock().Verify(r => r.Insert(result), Times.Once()); - mocker.GetMock().VerifyAll(); - mocker.GetMock().VerifyAll(); - - //Currently can't verify this since the list of episodes are loaded - //Dynamically by SubSonic - //Assert.AreEqual(fakeEpisode, result.EpisodeNumbers[0]); - - Assert.AreEqual(fakeEpisode.SeriesId, result.SeriesId); - Assert.AreEqual(QualityTypes.HDTV, result.Quality); - Assert.AreEqual(Parser.NormalizePath(fileName), result.Path); - Assert.AreEqual(size, result.Size); - Assert.AreEqual(false, result.Proper); - Assert.AreNotEqual(new DateTime(), result.DateAdded); - } - - [Test] - public void import_existing_season_file_should_skip() + public void import_unparsable_file_should_skip() { - //Arrange - ///////////////////////////////////////// - - //Constants - const string fileName = @"WEEDS.S03E01.DUAL.BDRip.XviD.AC3.-HELLYWOOD.avi"; - - //Fakes - var fakeSeries = Builder.CreateNew().Build(); - - //Mocks - var mocker = new AutoMoqer(); - - mocker.GetMock(MockBehavior.Strict) - .Setup(r => r.Exists(It.IsAny(), It.IsAny())).Returns(true).Verifiable(); - - //Act - var result = mocker.Resolve().ImportFile(fakeSeries, fileName); - - //Assert - mocker.VerifyAllMocks(); - result.Should().BeNull(); - } - - [Test] - public void import_unparsable_file() - { - //Arrange - ///////////////////////////////////////// - - //Constants const string fileName = @"WEEDS.avi"; const int size = 12345; - //Fakes var fakeSeries = Builder.CreateNew().Build(); - //Mocks + var mocker = new AutoMoqer(); - mocker.GetMock(MockBehavior.Strict) - .Setup(r => r.Exists(It.IsAny(), It.IsAny())).Returns(false).Verifiable(); + mocker.GetMock() + .Setup(p => p.Exists(It.IsAny())).Returns(false); mocker.GetMock() .Setup(e => e.GetSize(fileName)).Returns(size).Verifiable(); @@ -263,103 +138,81 @@ namespace NzbDrone.Core.Test var result = mocker.Resolve().ImportFile(fakeSeries, fileName); //Assert - mocker.VerifyAllMocks(); - Assert.IsNull(result); + VerifySkipImport(result, mocker); ExceptionVerification.ExcpectedWarns(1); } [Test] - public void import_sample_file() + public void import_sample_file_should_skip() { - //Arrange - ///////////////////////////////////////// - - //Constants const string fileName = @"2011.01.10 - Denis Leary - sample - HD TV.mkv"; - var airDate = new DateTime(2011, 01, 10); const int size = 12345; //Fakes var fakeSeries = Builder.CreateNew().Build(); - var fakeEpisode = Builder.CreateNew().With(c => c.SeriesId = fakeSeries.SeriesId).Build(); //Mocks var mocker = new AutoMoqer(); - mocker.GetMock() - .Setup(r => r.Exists(It.IsAny())).Returns(false).Verifiable(); - mocker.GetMock() - .Setup(r => r.Insert(It.IsAny())).Returns(0).Verifiable(); - - mocker.GetMock() - .Setup(e => e.GetEpisode(fakeSeries.SeriesId, airDate)).Returns(fakeEpisode). - Verifiable(); + mocker.GetMock() + .Setup(p => p.Exists(It.IsAny())).Returns(false); mocker.GetMock() .Setup(e => e.GetSize(fileName)).Returns(size).Verifiable(); - //Act var result = mocker.Resolve().ImportFile(fakeSeries, fileName); //Assert - Assert.IsNull(result); + VerifySkipImport(result, mocker); } [Test] - public void import_existing_file() + public void import_existing_file_should_skip() { const string fileName = "WEEDS.S03E01-06.DUAL.BDRip.XviD.AC3.-HELLYWOOD.avi"; var fakeSeries = Builder.CreateNew().Build(); - var mocker = new AutoMoqer(); - mocker.GetMock(MockBehavior.Strict) - .Setup(r => r.Exists(It.IsAny(), It.IsAny())).Returns(true).Verifiable(); - - mocker.GetMock(MockBehavior.Strict); + var mocker = new AutoMoqer(MockBehavior.Strict); + mocker.GetMock() + .Setup(p => p.Exists(It.IsAny())) + .Returns(true); //Act var result = mocker.Resolve().ImportFile(fakeSeries, fileName); //Assert - result.Should().BeNull(); - mocker.GetMock().Verify(r => r.Insert(result), Times.Never()); - mocker.VerifyAllMocks(); + VerifySkipImport(result, mocker); } [Test] - public void import_file_with_no_episode_in_db_should_return_null() + public void import_file_with_no_episode_in_db_should_skip() { - //Constants const string fileName = "WEEDS.S03E01.DUAL.BDRip.XviD.AC3.-HELLYWOOD.avi"; - const int seasonNumber = 3; - const int episodeNumner = 01; //Fakes var fakeSeries = Builder.CreateNew().Build(); //Mocks var mocker = new AutoMoqer(); - mocker.GetMock(MockBehavior.Strict) - .Setup(r => r.Exists(It.IsAny(), It.IsAny())).Returns(false).Verifiable(); - - mocker.GetMock(MockBehavior.Strict) - .Setup(e => e.GetEpisode(fakeSeries.SeriesId, seasonNumber, episodeNumner)).Returns(null). - Verifiable(); + mocker.GetMock() + .Setup(p => p.Exists(It.IsAny())) + .Returns(false); mocker.GetMock(MockBehavior.Strict) .Setup(e => e.GetSize(fileName)).Returns(90000000000); + mocker.GetMock() + .Setup(c => c.GetEpisodesByParseResult(It.IsAny(), false)) + .Returns(new List()); + //Act var result = mocker.Resolve().ImportFile(fakeSeries, fileName); //Assert - mocker.VerifyAllMocks(); - result.Should().BeNull(); - mocker.GetMock().Verify(r => r.Insert(result), Times.Never()); - ExceptionVerification.ExcpectedWarns(1); + VerifySkipImport(result, mocker); } [Test] @@ -384,5 +237,30 @@ namespace NzbDrone.Core.Test mocker.VerifyAllMocks(); } + + + + private static void VerifyFileImport(EpisodeFile result, AutoMoqer mocker, Episode fakeEpisode, int size) + { + mocker.VerifyAllMocks(); + result.Should().NotBeNull(); + result.SeriesId.Should().Be(fakeEpisode.SeriesId); + result.Size.Should().Be(size); + result.DateAdded.Should().HaveDay(DateTime.Now.Day); + mocker.GetMock().Verify(p => p.Add(It.IsAny()), Times.Once()); + + //Get the count of episodes linked + var count = mocker.GetMock().Object.GetEpisodesByParseResult(null, false).Count; + + mocker.GetMock().Verify(p => p.UpdateEpisode(It.Is(e => e.EpisodeFileId == result.EpisodeFileId)), Times.Exactly(count)); + } + + private static void VerifySkipImport(EpisodeFile result, AutoMoqer mocker) + { + mocker.VerifyAllMocks(); + result.Should().BeNull(); + mocker.GetMock().Verify(p => p.Add(It.IsAny()), Times.Never()); + mocker.GetMock().Verify(p => p.UpdateEpisode(It.IsAny()), Times.Never()); + } } } diff --git a/NzbDrone.Core.Test/DownloadProviderTest.cs b/NzbDrone.Core.Test/DownloadProviderTest.cs index 16a532786..492f7c347 100644 --- a/NzbDrone.Core.Test/DownloadProviderTest.cs +++ b/NzbDrone.Core.Test/DownloadProviderTest.cs @@ -21,13 +21,15 @@ namespace NzbDrone.Core.Test { var mocker = new AutoMoqer(MockBehavior.Strict); var parseResult = Builder.CreateNew() - .With(e => e.Episodes = Builder.CreateListOfSize(2) + .With(c => c.Quality = new Quality(QualityTypes.DVD, false)) + .Build(); + + var episodes = Builder.CreateListOfSize(2) .WhereTheFirst(1).Has(s => s.EpisodeId = 12) .AndTheNext(1).Has(s => s.EpisodeId = 99) .WhereAll().Has(s => s.SeriesId = 5) - .Build()) - .With(c => c.Quality = new Quality(QualityTypes.DVD, false)) - .Build(); + .Build(); + const string sabTitle = "My fake sab title"; mocker.GetMock() @@ -47,6 +49,9 @@ namespace NzbDrone.Core.Test mocker.GetMock() .Setup(s => s.Add(It.Is(h => h.EpisodeId == 99 && h.SeriesId == 5))); + mocker.GetMock() + .Setup(c => c.GetEpisodesByParseResult(It.IsAny(), false)).Returns(episodes); + mocker.Resolve().DownloadReport(parseResult); mocker.VerifyAllMocks(); diff --git a/NzbDrone.Core.Test/EpisodeProviderTest.cs b/NzbDrone.Core.Test/EpisodeProviderTest.cs index 02c40742c..2dea94e39 100644 --- a/NzbDrone.Core.Test/EpisodeProviderTest.cs +++ b/NzbDrone.Core.Test/EpisodeProviderTest.cs @@ -588,6 +588,22 @@ namespace NzbDrone.Core.Test episodes.Should().NotBeEmpty(); } + [Test] + public void GetEpisode_by_Season_Episode_none_existing() + { + var mocker = new AutoMoqer(); + var db = MockLib.GetEmptyDatabase(); + mocker.SetConstant(db); + + + //Act + var episode = mocker.Resolve().GetEpisode(1, 1, 1); + + //Assert + episode.Should().BeNull(); + } + + [Test] public void GetEpisode_by_Season_Episode_with_EpisodeFile() { @@ -644,6 +660,9 @@ namespace NzbDrone.Core.Test episode.EpisodeFile.Should().BeNull(); } + + + [Test] public void GetEpisode_by_AirDate_with_EpisodeFile() { @@ -697,5 +716,8 @@ namespace NzbDrone.Core.Test episode.Series.ShouldHave().AllProperties().EqualTo(fakeSeries); episode.EpisodeFile.Should().BeNull(); } + + + } } \ No newline at end of file diff --git a/NzbDrone.Core.Test/EpisodeProviderTest_GetEpisodesByParseResult.cs b/NzbDrone.Core.Test/EpisodeProviderTest_GetEpisodesByParseResult.cs new file mode 100644 index 000000000..1338bc061 --- /dev/null +++ b/NzbDrone.Core.Test/EpisodeProviderTest_GetEpisodesByParseResult.cs @@ -0,0 +1,190 @@ +// ReSharper disable RedundantUsingDirective +using System; +using System.Collections.Generic; +using System.Linq; +using AutoMoq; +using FizzWare.NBuilder; +using FluentAssertions; +using Moq; +using NUnit.Framework; +using NzbDrone.Core.Model; +using NzbDrone.Core.Providers; +using NzbDrone.Core.Providers.Core; +using NzbDrone.Core.Repository; +using NzbDrone.Core.Repository.Quality; +using NzbDrone.Core.Test.Framework; +using PetaPoco; +using TvdbLib.Data; + +namespace NzbDrone.Core.Test +{ + [TestFixture] + // ReSharper disable InconsistentNaming + public class EpisodeProviderTest_GetEpisodesByParseResult : TestBase + { + + [Test] + public void Single_GetSeason_Episode_Exists() + { + var db = MockLib.GetEmptyDatabase(); + var mocker = new AutoMoqer(); + mocker.SetConstant(db); + + var fakeEpisode = Builder.CreateNew() + .With(e => e.SeriesId = 1) + .With(e => e.SeasonNumber = 2) + .With(e => e.EpisodeNumber = 10) + .Build(); + + var fakeSeries = Builder.CreateNew().Build(); + + db.Insert(fakeEpisode); + db.Insert(fakeSeries); + + var parseResult = new EpisodeParseResult + { + Series = fakeSeries, + SeasonNumber = 2, + EpisodeNumbers = new List { 10 } + }; + + var ep = mocker.Resolve().GetEpisodesByParseResult(parseResult); + + ep.Should().HaveCount(1); + ep.First().ShouldHave().AllPropertiesBut(e => e.Series); + } + + [Test] + public void Single_GetSeason_Episode_Doesnt_exists_should_not_add() + { + var mocker = new AutoMoqer(); + var db = MockLib.GetEmptyDatabase(); + mocker.SetConstant(db); + + var fakeSeries = Builder.CreateNew().Build(); + + + var parseResult = new EpisodeParseResult + { + Series = fakeSeries, + SeasonNumber = 2, + EpisodeNumbers = new List { 10 } + }; + + var ep = mocker.Resolve().GetEpisodesByParseResult(parseResult); + + ep.Should().BeEmpty(); + db.Fetch().Should().HaveCount(0); + } + + [Test] + public void Single_GetSeason_Episode_Doesnt_exists_should_add() + { + var mocker = new AutoMoqer(); + var db = MockLib.GetEmptyDatabase(); + mocker.SetConstant(db); + + var fakeSeries = Builder.CreateNew().Build(); + + + var parseResult = new EpisodeParseResult + { + Series = fakeSeries, + SeasonNumber = 2, + EpisodeNumbers = new List { 10 } + }; + + var ep = mocker.Resolve().GetEpisodesByParseResult(parseResult, true); + + ep.Should().HaveCount(1); + db.Fetch().Should().HaveCount(1); + } + + + [Test] + public void Multi_GetSeason_Episode_Exists() + { + var db = MockLib.GetEmptyDatabase(); + var mocker = new AutoMoqer(); + mocker.SetConstant(db); + + var fakeEpisode = Builder.CreateNew() + .With(e => e.SeriesId = 1) + .With(e => e.SeasonNumber = 2) + .With(e => e.EpisodeNumber = 10) + .Build(); + + var fakeEpisode2 = Builder.CreateNew() + .With(e => e.SeriesId = 1) + .With(e => e.SeasonNumber = 2) + .With(e => e.EpisodeNumber = 11) + .Build(); + + var fakeSeries = Builder.CreateNew().Build(); + + db.Insert(fakeEpisode); + db.Insert(fakeEpisode2); + db.Insert(fakeSeries); + + var parseResult = new EpisodeParseResult + { + Series = fakeSeries, + SeasonNumber = 2, + EpisodeNumbers = new List { 10, 11 } + }; + + var ep = mocker.Resolve().GetEpisodesByParseResult(parseResult); + + ep.Should().HaveCount(2); + db.Fetch().Should().HaveCount(2); + ep.First().ShouldHave().AllPropertiesBut(e => e.Series); + } + + [Test] + public void Multi_GetSeason_Episode_Doesnt_exists_should_not_add() + { + var mocker = new AutoMoqer(); + var db = MockLib.GetEmptyDatabase(); + mocker.SetConstant(db); + + var fakeSeries = Builder.CreateNew().Build(); + + + var parseResult = new EpisodeParseResult + { + Series = fakeSeries, + SeasonNumber = 2, + EpisodeNumbers = new List { 10, 11 } + }; + + var ep = mocker.Resolve().GetEpisodesByParseResult(parseResult); + + ep.Should().BeEmpty(); + db.Fetch().Should().HaveCount(0); + } + + [Test] + public void Multi_GetSeason_Episode_Doesnt_exists_should_add() + { + var mocker = new AutoMoqer(); + var db = MockLib.GetEmptyDatabase(); + mocker.SetConstant(db); + + var fakeSeries = Builder.CreateNew().Build(); + + + var parseResult = new EpisodeParseResult + { + Series = fakeSeries, + SeasonNumber = 2, + EpisodeNumbers = new List { 10, 11 } + }; + + var ep = mocker.Resolve().GetEpisodesByParseResult(parseResult, true); + + ep.Should().HaveCount(2); + db.Fetch().Should().HaveCount(2); + } + + } +} \ No newline at end of file diff --git a/NzbDrone.Core.Test/EpisodeSearchJobTest.cs b/NzbDrone.Core.Test/EpisodeSearchJobTest.cs index 26ea4c501..0e34f0a64 100644 --- a/NzbDrone.Core.Test/EpisodeSearchJobTest.cs +++ b/NzbDrone.Core.Test/EpisodeSearchJobTest.cs @@ -199,7 +199,7 @@ namespace NzbDrone.Core.Test } - + [TestCase(0)] [TestCase(-1)] [TestCase(-100)] @@ -245,7 +245,7 @@ namespace NzbDrone.Core.Test .Returns(indexers); mocker.GetMock() - .Setup(c => c.IsQualityNeeded(It.Is(d => d.Series != null && d.Episodes.Count != 0))).Returns(false); + .Setup(c => c.IsQualityNeeded(It.IsAny())).Returns(false); mocker.GetMock() .Setup(s => s.GetSceneName(It.IsAny())).Returns(""); @@ -296,7 +296,7 @@ namespace NzbDrone.Core.Test .Returns(indexers); mocker.GetMock() - .Setup(c => c.IsQualityNeeded(It.Is(d => d.Series != null && d.Episodes.Count != 0))).Returns(false); + .Setup(c => c.IsQualityNeeded(It.IsAny())).Returns(false); mocker.GetMock() .Setup(s => s.GetSceneName(71256)).Returns("The Daily Show"); @@ -326,7 +326,7 @@ namespace NzbDrone.Core.Test .With(c => c.SeasonNumber = 12) .Build(); - var mocker = new AutoMoqer(MockBehavior.Strict); + var mocker = new AutoMoqer(); mocker.GetMock() .Setup(c => c.GetEpisode(episode.EpisodeId)) @@ -342,7 +342,7 @@ namespace NzbDrone.Core.Test .Throws(new Exception()).Verifiable(); var indexer3 = new Mock(); - indexer2.Setup(c => c.FetchEpisode(episode.Series.Title, episode.SeasonNumber, episode.EpisodeNumber)) + indexer3.Setup(c => c.FetchEpisode(episode.Series.Title, episode.SeasonNumber, episode.EpisodeNumber)) .Returns(parseResults).Verifiable(); @@ -353,7 +353,7 @@ namespace NzbDrone.Core.Test .Returns(indexers); mocker.GetMock() - .Setup(c => c.IsQualityNeeded(It.Is(d => d.Series != null && d.Episodes.Count != 0))).Returns(false);; + .Setup(c => c.IsQualityNeeded(It.IsAny())).Returns(false); mocker.GetMock() .Setup(s => s.GetSceneName(It.IsAny())).Returns(""); diff --git a/NzbDrone.Core.Test/InventoryProvider_IsMonitoredTest.cs b/NzbDrone.Core.Test/InventoryProvider_IsMonitoredTest.cs index 0197d94f6..589cf808f 100644 --- a/NzbDrone.Core.Test/InventoryProvider_IsMonitoredTest.cs +++ b/NzbDrone.Core.Test/InventoryProvider_IsMonitoredTest.cs @@ -110,9 +110,9 @@ namespace NzbDrone.Core.Test mocker.VerifyAllMocks(); } - + [Test] - public void IsMonitored_dailyshow_should_do_daily_lookup() + public void IsMonitored_should_return_true() { var mocker = new AutoMoqer(MockBehavior.Strict); @@ -121,23 +121,47 @@ namespace NzbDrone.Core.Test .Returns(series); mocker.GetMock() - .Setup(p => p.GetEpisode(episode.SeriesId, episode.SeasonNumber, episode.EpisodeNumber)) - .Returns(null); + .Setup(p => p.GetEpisodesByParseResult(It.IsAny(), true)) + .Returns(new List { episode }); + + parseResultSingle.Series.Should().BeNull(); + + var result = mocker.Resolve().IsMonitored(parseResultSingle); + + //Assert + result.Should().BeTrue(); + parseResultSingle.Series.Should().Be(series); + mocker.VerifyAllMocks(); + } + + + [Test] + public void IsMonitored_ignored_single_episode_should_return_false() + { + var mocker = new AutoMoqer(MockBehavior.Strict); + + mocker.GetMock() + .Setup(p => p.FindSeries(It.IsAny())) + .Returns(series); mocker.GetMock() - .Setup(p => p.GetEpisode(episode.SeriesId, episode.AirDate)) - .Returns(episode); + .Setup(p => p.GetEpisodesByParseResult(It.IsAny(), true)) + .Returns(new List { episode }); + + episode.Ignored = true; + + parseResultSingle.Series.Should().BeNull(); var result = mocker.Resolve().IsMonitored(parseResultSingle); //Assert - Assert.IsTrue(result); - Assert.AreSame(series, parseResultSingle.Series); + result.Should().BeFalse(); + parseResultSingle.Series.Should().Be(series); mocker.VerifyAllMocks(); } [Test] - public void none_db_episode_should_be_added() + public void IsMonitored_multi_some_episodes_ignored_should_return_true() { var mocker = new AutoMoqer(MockBehavior.Strict); @@ -146,25 +170,75 @@ namespace NzbDrone.Core.Test .Returns(series); mocker.GetMock() - .Setup(p => p.GetEpisode(episode.SeriesId, episode.SeasonNumber, episode.EpisodeNumber)) - .Returns(null); + .Setup(p => p.GetEpisodesByParseResult(It.IsAny(), true)) + .Returns(new List { episode, episode2 }); + + episode.Ignored = true; + episode2.Ignored = false; + + parseResultMulti.Series.Should().BeNull(); + + var result = mocker.Resolve().IsMonitored(parseResultMulti); + + //Assert + result.Should().BeTrue(); + parseResultMulti.Series.Should().Be(series); + mocker.VerifyAllMocks(); + } + + [Test] + public void IsMonitored_multi_all_episodes_ignored_should_return_false() + { + var mocker = new AutoMoqer(MockBehavior.Strict); + + mocker.GetMock() + .Setup(p => p.FindSeries(It.IsAny())) + .Returns(series); mocker.GetMock() - .Setup(p => p.GetEpisode(episode.SeriesId, episode.AirDate)) - .Returns(null); + .Setup(p => p.GetEpisodesByParseResult(It.IsAny(), true)) + .Returns(new List { episode, episode2 }); + + episode.Ignored = true; + episode2.Ignored = true; + + parseResultSingle.Series.Should().BeNull(); + + var result = mocker.Resolve().IsMonitored(parseResultMulti); + + //Assert + result.Should().BeFalse(); + parseResultMulti.Series.Should().Be(series); + mocker.VerifyAllMocks(); + } + + + [Test] + public void IsMonitored_multi_no_episodes_ignored_should_return_true() + { + var mocker = new AutoMoqer(MockBehavior.Strict); + + mocker.GetMock() + .Setup(p => p.FindSeries(It.IsAny())) + .Returns(series); mocker.GetMock() - .Setup(p => p.AddEpisode(It.IsAny())); + .Setup(p => p.GetEpisodesByParseResult(It.IsAny(), true)) + .Returns(new List { episode, episode2 }); - //Act - var result = mocker.Resolve().IsMonitored(parseResultSingle); + episode.Ignored = false; + episode2.Ignored = false; + + parseResultSingle.Series.Should().BeNull(); + + var result = mocker.Resolve().IsMonitored(parseResultMulti); //Assert - Assert.IsTrue(result); - Assert.AreSame(series, parseResultSingle.Series); - parseResultSingle.Episodes.Should().HaveCount(1); - Assert.AreEqual("TBD", parseResultSingle.Episodes[0].Title); + result.Should().BeTrue(); + parseResultMulti.Series.Should().Be(series); mocker.VerifyAllMocks(); } + + } } \ No newline at end of file diff --git a/NzbDrone.Core.Test/InventoryProvider_QualityNeededTest.cs b/NzbDrone.Core.Test/InventoryProvider_QualityNeededTest.cs index ca03ab27c..3bf61ff62 100644 --- a/NzbDrone.Core.Test/InventoryProvider_QualityNeededTest.cs +++ b/NzbDrone.Core.Test/InventoryProvider_QualityNeededTest.cs @@ -37,7 +37,6 @@ namespace NzbDrone.Core.Test EpisodeNumbers = new List { 3, 4 }, SeasonNumber = 12, AirDate = DateTime.Now.AddDays(-12).Date, - Episodes = new List() }; parseResultSingle = new EpisodeParseResult @@ -48,7 +47,6 @@ namespace NzbDrone.Core.Test EpisodeNumbers = new List { 3 }, SeasonNumber = 12, AirDate = DateTime.Now.AddDays(-12).Date, - Episodes = new List() }; episodeFile = Builder.CreateNew().With(c => c.Quality = QualityTypes.DVD).Build(); @@ -99,9 +97,9 @@ namespace NzbDrone.Core.Test parseResultMulti.Series = series; parseResultSingle.Series = series; - parseResultSingle.Episodes.Add(episode); - parseResultMulti.Episodes.Add(episode); - parseResultMulti.Episodes.Add(episode2); + /* parseResultSingle.Episodes.Add(episode); + parseResultMulti.Episodes.Add(episode); + parseResultMulti.Episodes.Add(episode2);*/ base.Setup(); @@ -129,9 +127,15 @@ namespace NzbDrone.Core.Test var mocker = new AutoMoqer(MockBehavior.Strict); parseResultMulti.Series.QualityProfile = hdProfile; - parseResultMulti.Episodes[0].EpisodeFile.Quality = QualityTypes.Bluray720p; + parseResultMulti.Quality = new Quality(QualityTypes.HDTV, true); + mocker.GetMock() + .Setup(p => p.GetEpisodesByParseResult(parseResultMulti, true)) + .Returns(new List { episode, episode2 }); + + episode.EpisodeFile.Quality = QualityTypes.Bluray720p; + //Act bool result = mocker.Resolve().IsQualityNeeded(parseResultMulti); @@ -147,13 +151,18 @@ namespace NzbDrone.Core.Test var mocker = new AutoMoqer(MockBehavior.Strict); parseResultSingle.Series.QualityProfile = sdProfile; - parseResultSingle.Episodes[0].EpisodeFile.Quality = QualityTypes.SDTV; parseResultSingle.Quality.QualityType = QualityTypes.DVD; mocker.GetMock() .Setup(p => p.GetBestQualityInHistory(episode.EpisodeId)) .Returns(new Quality(QualityTypes.DVD, true)); + mocker.GetMock() + .Setup(p => p.GetEpisodesByParseResult(parseResultSingle, true)) + .Returns(new List { episode }); + + episode.EpisodeFile.Quality = QualityTypes.SDTV; + //Act bool result = mocker.Resolve().IsQualityNeeded(parseResultSingle); @@ -168,13 +177,18 @@ namespace NzbDrone.Core.Test var mocker = new AutoMoqer(MockBehavior.Strict); parseResultSingle.Series.QualityProfile = sdProfile; - parseResultSingle.Episodes[0].EpisodeFile.Quality = QualityTypes.SDTV; parseResultSingle.Quality.QualityType = QualityTypes.DVD; mocker.GetMock() .Setup(p => p.GetBestQualityInHistory(episode.EpisodeId)) .Returns(new Quality(QualityTypes.SDTV, true)); + mocker.GetMock() + .Setup(p => p.GetEpisodesByParseResult(parseResultSingle, true)) + .Returns(new List { episode }); + + episode.EpisodeFile.Quality = QualityTypes.SDTV; + //Act bool result = mocker.Resolve().IsQualityNeeded(parseResultSingle); @@ -189,13 +203,18 @@ namespace NzbDrone.Core.Test var mocker = new AutoMoqer(MockBehavior.Strict); parseResultSingle.Series.QualityProfile = sdProfile; - parseResultSingle.Episodes[0].EpisodeFile.Quality = QualityTypes.SDTV; parseResultSingle.Quality.QualityType = QualityTypes.DVD; mocker.GetMock() .Setup(p => p.GetBestQualityInHistory(episode.EpisodeId)) .Returns(null); + + mocker.GetMock() + .Setup(p => p.GetEpisodesByParseResult(parseResultSingle, true)) + .Returns(new List { episode }); + + episode.EpisodeFile.Quality = QualityTypes.SDTV; //Act bool result = mocker.Resolve().IsQualityNeeded(parseResultSingle); diff --git a/NzbDrone.Core.Test/NzbDrone.Core.Test.csproj b/NzbDrone.Core.Test/NzbDrone.Core.Test.csproj index ed54b6fe2..bfed0f28a 100644 --- a/NzbDrone.Core.Test/NzbDrone.Core.Test.csproj +++ b/NzbDrone.Core.Test/NzbDrone.Core.Test.csproj @@ -85,6 +85,7 @@ + @@ -128,18 +129,6 @@ - - {5270F048-E580-486C-B14C-E5B9F6E539D4} - Migrator.Framework - - - {D58C68E4-D789-40F7-9078-C9F587D4363C} - Migrator.Providers - - - {1FEE70A4-AAD7-4C60-BE60-3F7DC03A8C4D} - Migrator - {FF5EE3B6-913B-47CE-9CEB-11C51B4E1205} NzbDrone.Core diff --git a/NzbDrone.Core.Test/SabProviderTest.cs b/NzbDrone.Core.Test/SabProviderTest.cs index ced522f06..92d8c3c90 100644 --- a/NzbDrone.Core.Test/SabProviderTest.cs +++ b/NzbDrone.Core.Test/SabProviderTest.cs @@ -259,7 +259,7 @@ namespace NzbDrone.Core.Test mocker.Resolve().IsInQueue(String.Empty); } - + [Test] [TestCase(1, new[] { 2 }, "My Episode Title", QualityTypes.DVD, false, "My Series Name - 1x2 - My Episode Title [DVD]")] [TestCase(1, new[] { 2 }, "My Episode Title", QualityTypes.DVD, true, "My Series Name - 1x2 - My Episode Title [DVD] [Proper]")] [TestCase(1, new[] { 2 }, "", QualityTypes.DVD, true, "My Series Name - 1x2 - [DVD] [Proper]")] @@ -274,10 +274,6 @@ namespace NzbDrone.Core.Test .With(c => c.Path = @"d:\tv shows\My Series Name") .Build(); - var episode = Builder.CreateNew() - .With(e => e.Title = title) - .Build(); - var parsResult = new EpisodeParseResult() { AirDate = DateTime.Now, @@ -285,7 +281,7 @@ namespace NzbDrone.Core.Test Quality = new Quality(quality, proper), SeasonNumber = seasons, Series = series, - Episodes = new List() { episode } + EpisodeTitle = title }; //Act diff --git a/NzbDrone.Core/Model/EpisodeParseResult.cs b/NzbDrone.Core/Model/EpisodeParseResult.cs index de566a5e6..9c9818d82 100644 --- a/NzbDrone.Core/Model/EpisodeParseResult.cs +++ b/NzbDrone.Core/Model/EpisodeParseResult.cs @@ -7,13 +7,15 @@ namespace NzbDrone.Core.Model public class EpisodeParseResult { internal string CleanTitle { get; set; } + + public string EpisodeTitle { get; set; } internal int SeasonNumber { get; set; } internal List EpisodeNumbers { get; set; } internal DateTime AirDate { get; set; } - + public Quality Quality { get; set; } public LanguageType Language { get; set; } @@ -24,8 +26,6 @@ namespace NzbDrone.Core.Model public Series Series { get; set; } - public IList Episodes { get; set; } - public String Indexer { get; set; } public override string ToString() @@ -34,7 +34,7 @@ namespace NzbDrone.Core.Model return string.Format("{0} - {1} {2}", CleanTitle, AirDate.ToShortDateString(), Quality); return string.Format("{0} - S{1:00}E{2} {3}", CleanTitle, SeasonNumber, - String.Join("-", EpisodeNumbers),Quality); + String.Join("-", EpisodeNumbers), Quality); } } diff --git a/NzbDrone.Core/NzbDrone.Core.csproj b/NzbDrone.Core/NzbDrone.Core.csproj index 648df1730..1fd72ff8f 100644 --- a/NzbDrone.Core/NzbDrone.Core.csproj +++ b/NzbDrone.Core/NzbDrone.Core.csproj @@ -126,6 +126,18 @@ ..\Libraries\Exceptioneer.WindowsFormsClient.dll + + False + ..\Libraries\Migrator.NET\Migrator.dll + + + False + ..\Libraries\Migrator.NET\Migrator.Framework.dll + + + False + ..\Libraries\Migrator.NET\Migrator.Providers.dll + False ..\packages\MiniProfiler.1.3\lib\MvcMiniProfiler.dll @@ -279,20 +291,6 @@ - - - {5270F048-E580-486C-B14C-E5B9F6E539D4} - Migrator.Framework - - - {D58C68E4-D789-40F7-9078-C9F587D4363C} - Migrator.Providers - - - {1FEE70A4-AAD7-4C60-BE60-3F7DC03A8C4D} - Migrator - - diff --git a/NzbDrone.Core/Providers/Core/DiskProvider.cs b/NzbDrone.Core/Providers/Core/DiskProvider.cs index a8574c5cb..8e8ae2cb0 100644 --- a/NzbDrone.Core/Providers/Core/DiskProvider.cs +++ b/NzbDrone.Core/Providers/Core/DiskProvider.cs @@ -43,7 +43,7 @@ namespace NzbDrone.Core.Providers.Core File.Delete(path); } - public virtual void RenameFile(string sourcePath, string destinationPath) + public virtual void MoveFile(string sourcePath, string destinationPath) { File.Move(sourcePath, destinationPath); } diff --git a/NzbDrone.Core/Providers/DiskScanProvider.cs b/NzbDrone.Core/Providers/DiskScanProvider.cs index 7783c376b..be123a816 100644 --- a/NzbDrone.Core/Providers/DiskScanProvider.cs +++ b/NzbDrone.Core/Providers/DiskScanProvider.cs @@ -14,7 +14,6 @@ namespace NzbDrone.Core.Providers { private static readonly Logger Logger = LogManager.GetCurrentClassLogger(); private static readonly string[] MediaExtentions = new[] { ".mkv", ".avi", ".wmv", ".mp4" }; - private readonly IDatabase _database; private readonly DiskProvider _diskProvider; private readonly EpisodeProvider _episodeProvider; private readonly MediaFileProvider _mediaFileProvider; @@ -22,14 +21,12 @@ namespace NzbDrone.Core.Providers [Inject] public DiskScanProvider(DiskProvider diskProvider, EpisodeProvider episodeProvider, - SeriesProvider seriesProvider, MediaFileProvider mediaFileProvider, - IDatabase database) + SeriesProvider seriesProvider, MediaFileProvider mediaFileProvider) { _diskProvider = diskProvider; _episodeProvider = episodeProvider; _seriesProvider = seriesProvider; _mediaFileProvider = mediaFileProvider; - _database = database; } @@ -86,7 +83,7 @@ namespace NzbDrone.Core.Providers { Logger.Trace("Importing file to database [{0}]", filePath); - if (_database.Exists("WHERE Path =@0", Parser.NormalizePath(filePath))) + if (_mediaFileProvider.Exists(filePath)) { Logger.Trace("[{0}] already exists in the database. skipping.", filePath); return null; @@ -107,46 +104,15 @@ namespace NzbDrone.Core.Providers return null; parseResult.CleanTitle = series.Title; //replaces the nasty path as title to help with logging + parseResult.Series = series; - //Stores the list of episodes to add to the EpisodeFile - var episodes = new List(); + var episodes = _episodeProvider.GetEpisodesByParseResult(parseResult); - //Check for daily shows - if (parseResult.EpisodeNumbers == null) - { - var episode = _episodeProvider.GetEpisode(series.SeriesId, parseResult.AirDate.Date); - - if (episode != null) - { - episodes.Add(episode); - } - else - { - Logger.Warn("Unable to find [{0}] in the database.[{1}]", parseResult, filePath); - } - } - else - { - foreach (var episodeNumber in parseResult.EpisodeNumbers) - { - var episode = _episodeProvider.GetEpisode(series.SeriesId, parseResult.SeasonNumber, - episodeNumber); - - if (episode != null) - { - episodes.Add(episode); - } - else - { - Logger.Warn("Unable to find [{0}] in the database.[{1}]", parseResult, filePath); - } - } - } - - //Return null if no Episodes exist in the DB for the parsed episodes from file if (episodes.Count <= 0) + { + Logger.Debug("Can't find any matching episodes in the database. skipping"); return null; - + } if (episodes.Any(e => e.EpisodeFile != null && e.EpisodeFile.QualityWrapper > parseResult.Quality)) { @@ -162,24 +128,22 @@ namespace NzbDrone.Core.Providers episodeFile.Quality = parseResult.Quality.QualityType; episodeFile.Proper = parseResult.Quality.Proper; episodeFile.SeasonNumber = parseResult.SeasonNumber; - int fileId = Convert.ToInt32(_database.Insert(episodeFile)); + var fileId = _mediaFileProvider.Add(episodeFile); - //This is for logging + updating the episodes that are linked to this EpisodeFile - string episodeList = String.Empty; + //Link file to all episodes foreach (var ep in episodes) { ep.EpisodeFileId = fileId; _episodeProvider.UpdateEpisode(ep); - episodeList += String.Format(", {0}", ep.EpisodeId).Trim(' ', ','); + Logger.Trace("Linking file {0} to {1}", filePath, ep); } - Logger.Trace("File {0}:{1} attached to episode(s): '{2}'", episodeFile.EpisodeFileId, filePath, - episodeList); + return episodeFile; } - public virtual bool RenameEpisodeFile(EpisodeFile episodeFile) + public virtual bool MoveEpisodeFile(EpisodeFile episodeFile) { if (episodeFile == null) throw new ArgumentNullException("episodeFile"); @@ -191,7 +155,7 @@ namespace NzbDrone.Core.Providers //Do the rename Logger.Trace("Attempting to rename {0} to {1}", episodeFile.Path, newFile.FullName); - _diskProvider.RenameFile(episodeFile.Path, newFile.FullName); + _diskProvider.MoveFile(episodeFile.Path, newFile.FullName); //Update the filename in the DB episodeFile.Path = newFile.FullName; @@ -221,7 +185,7 @@ namespace NzbDrone.Core.Providers } //Delete it from the DB - _database.Delete(episodeFile.EpisodeFileId); + _mediaFileProvider.Delete(episodeFile.EpisodeFileId); } } } diff --git a/NzbDrone.Core/Providers/DownloadProvider.cs b/NzbDrone.Core/Providers/DownloadProvider.cs index fbab37e78..710ac20ad 100644 --- a/NzbDrone.Core/Providers/DownloadProvider.cs +++ b/NzbDrone.Core/Providers/DownloadProvider.cs @@ -39,7 +39,7 @@ namespace NzbDrone.Core.Providers if (addSuccess) { - foreach (var episode in parseResult.Episodes) + foreach (var episode in _episodeProvider.GetEpisodesByParseResult(parseResult)) { var history = new History(); history.Date = DateTime.Now; diff --git a/NzbDrone.Core/Providers/EpisodeProvider.cs b/NzbDrone.Core/Providers/EpisodeProvider.cs index 50576b820..eca329ce8 100644 --- a/NzbDrone.Core/Providers/EpisodeProvider.cs +++ b/NzbDrone.Core/Providers/EpisodeProvider.cs @@ -35,7 +35,7 @@ namespace NzbDrone.Core.Providers public virtual Episode GetEpisode(long id) { - var episode = AttachSeries(_database.Fetch(@"SELECT * FROM Episodes + var episode = AttachSeries(_database.Fetch(@"SELECT * FROM Episodes LEFT JOIN EpisodeFiles ON Episodes.EpisodeFileId = EpisodeFiles.EpisodeFileId WHERE EpisodeId = @0", id).Single()); @@ -86,7 +86,7 @@ namespace NzbDrone.Core.Providers if (episode.EpisodeFileId == 0) episode.EpisodeFile = null; } - + return episodes; } @@ -105,33 +105,46 @@ namespace NzbDrone.Core.Providers return episodes; } - public virtual List GetEpisodes(EpisodeParseResult parseResult) - { - if (parseResult.Series == null) - { - Logger.Debug("Episode Parse Result is Invalid, skipping"); - return new List(); - } - var episodes = new List(); + public virtual IList GetEpisodesByParseResult(EpisodeParseResult parseResult, Boolean autoAddNew = false) + { + var result = new List(); - foreach (var ep in parseResult.EpisodeNumbers) + foreach (var episodeNumber in parseResult.EpisodeNumbers) { - var episode = GetEpisode(parseResult.Series.SeriesId, parseResult.SeasonNumber, ep); - - if (episode == null) - return new List(); - - episodes.Add(episode); - } + var episodeInfo = GetEpisode(parseResult.Series.SeriesId, parseResult.SeasonNumber, episodeNumber); + if (episodeInfo == null) + { + episodeInfo = GetEpisode(parseResult.Series.SeriesId, parseResult.AirDate); + } + //if still null we should add the temp episode + if (episodeInfo == null && autoAddNew) + { + Logger.Debug("Episode {0} doesn't exist in db. adding it now.", parseResult); + episodeInfo = new Episode + { + SeriesId = parseResult.Series.SeriesId, + AirDate = DateTime.Now.Date, + EpisodeNumber = episodeNumber, + SeasonNumber = parseResult.SeasonNumber, + Title = "TBD", + Overview = String.Empty, + }; + + AddEpisode(episodeInfo); + } - foreach (var episode in episodes) - { - if (episode.EpisodeFileId == 0) - episode.EpisodeFile = null; + if (episodeInfo != null) + { + result.Add(episodeInfo); + } + else + { + Logger.Debug("Unable to file {0}-S{1:00}E{2:00}", parseResult.Series.Title, parseResult.SeasonNumber, episodeNumber); + } } - return episodes; + return result; } public virtual IList EpisodesWithoutFiles(bool includeSpecials) diff --git a/NzbDrone.Core/Providers/IndexerProvider.cs b/NzbDrone.Core/Providers/IndexerProvider.cs index 48ad40431..6bb8a83b9 100644 --- a/NzbDrone.Core/Providers/IndexerProvider.cs +++ b/NzbDrone.Core/Providers/IndexerProvider.cs @@ -14,12 +14,13 @@ namespace NzbDrone.Core.Providers private static readonly Logger Logger = LogManager.GetCurrentClassLogger(); private readonly IDatabase _database; - private IList _indexers = new List(); + private IEnumerable _indexers; [Inject] - public IndexerProvider(IDatabase database) + public IndexerProvider(IDatabase database, IEnumerable indexers) { _database = database; + _indexers = indexers; } public IndexerProvider() diff --git a/NzbDrone.Core/Providers/InventoryProvider.cs b/NzbDrone.Core/Providers/InventoryProvider.cs index ffe3931ca..202f0a9c7 100644 --- a/NzbDrone.Core/Providers/InventoryProvider.cs +++ b/NzbDrone.Core/Providers/InventoryProvider.cs @@ -1,5 +1,6 @@ using System; using System.Collections.Generic; +using System.Linq; using Ninject; using NLog; using NzbDrone.Core.Model; @@ -41,7 +42,6 @@ namespace NzbDrone.Core.Providers } parseResult.Series = series; - parseResult.Episodes = new List(); if (!series.Monitored) { @@ -49,34 +49,16 @@ namespace NzbDrone.Core.Providers return false; } - foreach (var episodeNumber in parseResult.EpisodeNumbers) - { - var episodeInfo = _episodeProvider.GetEpisode(series.SeriesId, parseResult.SeasonNumber, episodeNumber); - if (episodeInfo == null) - { - episodeInfo = _episodeProvider.GetEpisode(series.SeriesId, parseResult.AirDate); - } - //if still null we should add the temp episode - if (episodeInfo == null) - { - Logger.Debug("Episode {0} doesn't exist in db. adding it now.", parseResult); - episodeInfo = new Episode - { - SeriesId = series.SeriesId, - AirDate = DateTime.Now.Date, - EpisodeNumber = episodeNumber, - SeasonNumber = parseResult.SeasonNumber, - Title = "TBD", - Overview = String.Empty, - }; - - _episodeProvider.AddEpisode(episodeInfo); - } + var episodes = _episodeProvider.GetEpisodesByParseResult(parseResult, true); - parseResult.Episodes.Add(episodeInfo); + //return monitored if any of the episodes are monitored + if (episodes.Any(episode => !episode.Ignored)) + { + return true; } - return true; + Logger.Debug("All episodes are ignored. skipping."); + return false; } /// @@ -97,7 +79,7 @@ namespace NzbDrone.Core.Providers var cutoff = parsedReport.Series.QualityProfile.Cutoff; - foreach (var episode in parsedReport.Episodes) + foreach (var episode in _episodeProvider.GetEpisodesByParseResult(parsedReport, true)) { //Checking File var file = episode.EpisodeFile; diff --git a/NzbDrone.Core/Providers/Jobs/EpisodeSearchJob.cs b/NzbDrone.Core/Providers/Jobs/EpisodeSearchJob.cs index 97f5e3f24..45330756a 100644 --- a/NzbDrone.Core/Providers/Jobs/EpisodeSearchJob.cs +++ b/NzbDrone.Core/Providers/Jobs/EpisodeSearchJob.cs @@ -68,11 +68,11 @@ namespace NzbDrone.Core.Providers.Jobs { try { - notification.CurrentMessage = String.Format("Searching for {0} in {1}", episode, indexer.Name); + //notification.CurrentMessage = String.Format("Searching for {0} in {1}", episode, indexer.Name); //TODO:Add support for daily episodes, maybe search using both date and season/episode? var indexerResults = indexer.FetchEpisode(title, episode.SeasonNumber, episode.EpisodeNumber); - + reports.AddRange(indexerResults); } catch (Exception e) @@ -84,10 +84,12 @@ namespace NzbDrone.Core.Providers.Jobs Logger.Debug("Finished searching all indexers. Total {0}", reports.Count); notification.CurrentMessage = "Processing search results"; + + //TODO:fix this so when search returns more than one episode + //TODO:-its populated with more than the original episode. reports.ForEach(c => { c.Series = series; - c.Episodes = new List { episode }; }); ProcessResults(notification, episode, reports); diff --git a/NzbDrone.Core/Providers/Jobs/PostDownloadScanJob.cs b/NzbDrone.Core/Providers/Jobs/PostDownloadScanJob.cs index e48119aa0..f1564425c 100644 --- a/NzbDrone.Core/Providers/Jobs/PostDownloadScanJob.cs +++ b/NzbDrone.Core/Providers/Jobs/PostDownloadScanJob.cs @@ -83,7 +83,7 @@ namespace NzbDrone.Core.Providers.Jobs } var importedFiles = _diskScanProvider.Scan(series, subfolder); - importedFiles.ForEach(file => _diskScanProvider.RenameEpisodeFile(file)); + importedFiles.ForEach(file => _diskScanProvider.MoveEpisodeFile(file)); } Logger.Debug("New Download Scan Job completed successfully"); diff --git a/NzbDrone.Core/Providers/Jobs/RenameEpisodeJob.cs b/NzbDrone.Core/Providers/Jobs/RenameEpisodeJob.cs index cc39c455a..9e7920767 100644 --- a/NzbDrone.Core/Providers/Jobs/RenameEpisodeJob.cs +++ b/NzbDrone.Core/Providers/Jobs/RenameEpisodeJob.cs @@ -33,7 +33,7 @@ namespace NzbDrone.Core.Providers.Jobs public void Start(ProgressNotification notification, int targetId) { var episode = _mediaFileProvider.GetEpisodeFile(targetId); - _diskScanProvider.RenameEpisodeFile(episode); + _diskScanProvider.MoveEpisodeFile(episode); } } } \ No newline at end of file diff --git a/NzbDrone.Core/Providers/MediaFileProvider.cs b/NzbDrone.Core/Providers/MediaFileProvider.cs index d9e4271ec..d734bb988 100644 --- a/NzbDrone.Core/Providers/MediaFileProvider.cs +++ b/NzbDrone.Core/Providers/MediaFileProvider.cs @@ -31,11 +31,28 @@ namespace NzbDrone.Core.Providers { } + + + public virtual int Add(EpisodeFile episodeFile) + { + return Convert.ToInt32(_database.Insert(episodeFile)); + } + public virtual void Update(EpisodeFile episodeFile) { _database.Update(episodeFile); } + public virtual void Delete(int episodeFileId) + { + _database.Delete(episodeFileId); + } + + public virtual bool Exists(string path) + { + return _database.Exists("WHERE Path =@0", Parser.NormalizePath(path)); + } + public virtual EpisodeFile GetEpisodeFile(int episodeFileId) { return _database.Single(episodeFileId); diff --git a/NzbDrone.Core/Providers/SabProvider.cs b/NzbDrone.Core/Providers/SabProvider.cs index db8e59b79..734f04026 100644 --- a/NzbDrone.Core/Providers/SabProvider.cs +++ b/NzbDrone.Core/Providers/SabProvider.cs @@ -126,7 +126,7 @@ namespace NzbDrone.Core.Providers var epNumberString = String.Join("-", episodeString); - var result = String.Format("{0} - {1} - {2} [{3}]", new DirectoryInfo(parseResult.Series.Path).Name, epNumberString, parseResult.Episodes.FirstOrDefault().Title, parseResult.Quality.QualityType); + var result = String.Format("{0} - {1} - {2} [{3}]", new DirectoryInfo(parseResult.Series.Path).Name, epNumberString, parseResult.EpisodeTitle, parseResult.Quality.QualityType); if (parseResult.Quality.Proper) { diff --git a/NzbDrone.Core/Providers/SeriesProvider.cs b/NzbDrone.Core/Providers/SeriesProvider.cs index 0271e8ab7..896f09a02 100644 --- a/NzbDrone.Core/Providers/SeriesProvider.cs +++ b/NzbDrone.Core/Providers/SeriesProvider.cs @@ -135,10 +135,11 @@ namespace NzbDrone.Core.Providers return series; } - //This will catch InvalidOperationExceptions that may be thrown for GetSeries due to the series being in SceneMapping, but not in the users Database - catch (InvalidOperationException ex) + + catch (InvalidOperationException) { - Logger.DebugException(ex.Message, ex); + //This will catch InvalidOperationExceptions(Sequence contains no element) + //that may be thrown for GetSeries due to the series being in SceneMapping, but not in the users Database return null; } }