diff --git a/Libraries/MigSharp.dll b/Libraries/MigSharp.dll new file mode 100644 index 000000000..d0c265902 Binary files /dev/null and b/Libraries/MigSharp.dll differ diff --git a/Libraries/MigSharp.xml b/Libraries/MigSharp.xml new file mode 100644 index 000000000..1dc15a3fc --- /dev/null +++ b/Libraries/MigSharp.xml @@ -0,0 +1,1335 @@ + + + + MigSharp + + + + + Represents a type that knows how to provide database-specific DDL statements. + + + + + Checks whether a user created table exists on the database. The returned SQL command must yield 0 if the table does not exist. + + The database name. + The table name. + The SQL command to be executed. + + + + Converts an object to its SQL representation for scripting. + + + + + Creates a tables with the specified + + The name of the new table. + The columns of the new table. + Empty if there are no primary key columns. + The SQL commands to be executed. + + + + Drops a table. + + The SQL commands to be executed. + + + + Adds columns to an existing table. + + The SQL commands to be executed. + + + + Renames an existing table. + + The SQL commands to be executed. + + + + Renames a column of an existing table. + + The SQL commands to be executed. + + + + Removes a column from an existing table. + + The SQL commands to be executed. + + + + Changes the data type of a column. + + The SQL commands to be executed. + + + + Adds an index to a table. + + The SQL commands to be executed. + + + + Drops an index from a table. + + The SQL commands to be executed. + + + + Adds a foreign key constraint to a table. + + The SQL commands to be executed. + + + + Drops a foreign key constraint from a table. + + The SQL commands to be executed. + + + + Adds a primary key constraint to a table. + + The SQL commands to be executed. + + + + Renames the primary key. + + The SQL commands to be executed. + + + + Drops a primary key constraint from a table. + + The SQL commands to be executed. + + + + Adds an unique constraint to a table. + + The SQL commands to be executed. + + + + Drops a unique constraint from a table. + + The SQL commands to be executed. + + + + Drops the default value (constraint) from a column. + + The SQL commands to be executed. + + + + Represents the main entry point to perform migrations. + + + + + Initializes a new instance of . + + Connection string to the database to be migrated. + The name of the provider that should be used for this migrator (). + Options. + + + + Initializes a new instance of for a specific module. + + Connection string to the database to be migrated. + The name of the provider that should be used for this migrator (). + The name of the module whose migrations should be executed. + + + + Initializes a new instance of with default options. + + Connection string to the database to be migrated. + The name of the provider that should be used for this migrator (). + + + + Executes all pending migrations found in . + + The assembly to search for migrations. + Optional assemblies that hold additional migrations. + + + + Executes all migrations required to reach . + + + + Optional assemblies that hold additional migrations. + + + + Retrieves all pending migrations. + + The assembly that contains the migrations. + Optional assemblies that hold additional migrations. + + + + Retrieves all required migrations to reach . + + The assembly that contains the migrations. + The timestamp to migrate to. + When the migration path would require downgrading a migration which is not reversible. + Optional assemblies that hold additional migrations. + + + + Checks if any migrations are pending to be performed. + + The assembly that contains the migrations. + Optional assemblies that hold additional migrations. + + + + Injects a custom version mechanism. + + + + + Injects a custom bootstrapping mechanism. + + + + + Represents a database. + + + + + Creates a new table on the database. + + The name of the new table. + Optionally, the name of the primary key constraint. + + + + Executes a custom query. + + Custom SQL which must be understood by all providers that should be supported by this migration. + + + + Executes a custom action against the . Use this method if you need to directly access the + underlying or . + + + + + Gets the context of the migration. + + + + + Gets existing tables. + + + + + Contains the extension methods for the interface. + + + + + Creates a new table on the database with a default primary key constraint name. + + + + + Represents the context of a migration. + + + + + Gets the metadata describing the underlying provider. + + + + + Represents an index. + + + + + Drops the index. + + + + + Represents an index which is about to be added to a table. + + + + + Adds the index on the provided column. + + + + + Represents a primary key constraint which is about to be added to a table. + + + + + Adds the index on the provided column. + + + + + Represents a batch of s. + + + + + Performs the migrations contained in this batch. + + + + + Raised before each migration that will be executed. + + + + + Raised after each migration that has been executed. + + + + + Gets the number of migrations in this batch. + + + + + Gets a list of s that were used to create new objects (excluding primary key columns). + + + + + Gets a list of s that were used to create primary key columns. + + + + + Gets a list of names of any created objects. + + + + + Gets a list of method names that were used for the migration. + + + + + Represents a collection of existing indexes of a table. + + + + + Gets an index by name. + + + + + Represents an unique constraint. + + + + + Drops the unique constraint. + + + + + Represents a foreign key constraint. + + + + + Drops the foreign key constraint. + + + + + Executes the migration step and updates the versioning information in one transaction. + + Might be null in the case of a bootstrap step. + + + + Executes the migration step and updates the versioning information in one transaction. + + + + + Represents a pair of column names which is used as an element of a foreign key relationship. + + + + + Gets the name of the referencing column. + + + + + Gets the name of the referenced column. + + + + + Represents the version of a database containing all information about past migrations. + + + + + Verifies if a specific migration was executed. + + It might seems odd that this method does not receive a connection. However, the idea is that the object implementing + this interface initializes itself once upon construction and the answers the calls to this method from a cache. + Thus, it would not need to do any round-trip to the database which is important as this method is called for each migration. + + + + + + Updates the versioning to include or exclude the migration depending on the provided . + + Post-condition: if was , must return true. + Otherwise was and must return false. + + + The metadata of the migration to be included in the versioning table. + An open connection to the database containing the versioning table. + The associated transaction. + The direction of the migration. + Used to execute s. + + + + Represents a created table. + + + + + Represents a created table. + + + + + Adds a non-nullable column which is part of the primary key constraint to the table being created. + + + + + Adds a non-nullable column to the table being created. + + + + + Adds a nullable column to the table being created. + + + + + Gets the name of the table. + + + + + Represents a unique constraint which is about to be added to a table. + + + + + Adds the unique constraint on the provided column. + + + + + Declares the support of a specific data type. + + + + + Initializes a new instance. + + + + + Gets the supported data type. + + + + + Gets or sets the maximum length for character data types or the maximum total number of decimal digits for numeric data types. + + + + + Gets or sets the maximum number of decimal digits that can be stored to the right of the decimal point. Scale is a value from 0 through . + + + + + Indicates if the data type can be used in primary key columns. By default, this is false. + + + + + Gets or sets a warning message if there are any restrictions when using this . + The warning message is logged when executing a migration containing this data type. + + + + + Indicates if the data type can be scripted. + + + + + Represents an added column on an existing table. + + + + + Represents a table that was created before. + + + + + Adds a not-nullable column. + + + + + Adds a nullable column. + + + + + Gets the name of the table. + + + + + Specifies the size and the scale of the data type of the column. + + The length for character data types or the maximum total number of decimal digits for numeric data types. + The maximum number of decimal digits that can be stored to the right of the decimal point. Scale must be a value from 0 through . + + + + Adds a default value to the column. + + + + + Adds a default value to the column. + + + + + Adds a default value to the column which is dropped after adding the column to the table. + Use this method to fill a non-nullable column with default values. + + + + + Adds a default value to the column which is dropped after adding the column to the table. + Use this method to fill a non-nullable column with default values. + + + + + Contains the extensions methods for the interface. + + + + + Specifies the size of the data type of the column. + + + The length for character data types or the maximum total number of decimal digits for numeric data types. + + + + Sets the default of the column to be the current system time of the database server. + + + + + Reperesents metadata about an . + + + + + Gets the unique name of this provider. + + + + + Gets the invariant name of the provider needed for . + + + + + Gets an indication if the underlying provider supports transactions. + + + + + Gets an expression that specifies how s are addressed in command texts. The literal 'p' is replaced by the parameter name. + + + + + Gets the maximum length of object names within the database. 0 meaning that there is non restriction which is the default. + + + + + Represents a column on a newly created table. + + + + + Specifies the size and the scale of the data type of the column. + + The length for character data types or the maximum total number of decimal digits for numeric data types. + The maximum number of decimal digits that can be stored to the right of the decimal point. Scale must be a value from 0 through . + + + + Puts the column under an unique constraint. + + Optionally, specify the name of the unique constraint. If null or empty, a default constraint name will be generated. + + + + Makes the column auto-increment. + + + + + Adds a default value to the column. + + + + + Adds a default value to the column. + + + + + Gets the column name. + + + + + This exception is thrown when a requested downgrade path contains an irreversible migration. + + + + + Contains the names of the supported providers. + + + + + Represents the run-time context of a migration. + + + + + Gets the connection which is used to perform the migration. + + + + + Gets the transaction which is used to perform the migration. + + + + + Gets the that should be used to execute database modifying commands. + This ensures that they are logged and scripted consistently. + + + + + Lists special default values for columns. + + + + + Represents the current date time of the database server. + + + + + Represents a collection of providers that should be supported for all migrations. Validation of migrations is performed + against providers contained within this list. + + + + + Initializes a new instance used for unit testing. + + + + + Initializes a new instance which is ready-to-use with the default providers. + + + + + Adds a provider. + + + + + Removes a provider. + + + + + Removes all ODBC providers. + + + + + Sets the collection to a list of providers. + + + + + Resets the collection to support all providers that are delivered with Mig#. + + + + + Gets the names of the providers. + + + + + Represents a collection of existing unique constraints of a table. + + + + + Gets an unique constraint by name. + + + + + Use this attribute to mark classes as migrations. + + + + + The maximum string length for a module name. + + + + + Initializes a new instance of the attribute. + + + + + Gets the name of the module to which this migration belongs to (see also ). + + + + + Gets the tag associated with this migration. + + + + + Represents a migration that can also be downgraded. + + + + + The interface that needs to be implemented in order to define a migration. + Additionally, the must be applied + to a class implementing this interface in order to be recognized as a migration. + + + + + Applies the required changes to the provided for this migration. + + + + + Undoes all changes from the method. + + + + + Represents a foreign key constraint which is about to be added to a table. + + + + + Specifies the columns on which the foreign key is defined. + + + This method is not called 'On', as 'On' is a reserved language keyword (see FxCop rule "Identifiers should not match keywords"). + + + + + Contains information about used data types, longest name, etc. for a specific migration. + + + + + Gets the full type name of the migration for this report. + + + + + Gets the error message for the migration if there was an . + + + + + Gets a list of s that were used to create new objects (including primary key columns). + + + + + Gets a list of s that were used to create primary key columns. + + + + + Gets the longest name of any created objects. + + + + + Gets a list of used provider method names. + + + + + Marks the class as an provider implementing . + + + + + Initializes a new instance. + + + + + Gets the unique name of this provider. + + + + + Gets the invariant name of the provider needed for . + + + + + Gets or sets an indication if the underlying provider supports transactions. True by default. + + + + + Gets an expression that specifies how s are addressed in command texts. The literal 'p' is replaced by the parameter name. + The default is '@p'. + + + + + Gets or sets the maximum length of object names within the database. 0 meaning that there is non restriction which is the default. + + + + + Represents a collection of existing foreign key constraints of a table. + + + + + Gets an foreign key constraint by name. + + + + + Represents an existing column whose data type is being altered. + + + + + Specifies the size and the scale of the new data type of the column. + + The length for character data types or the maximum total number of decimal digits for numeric data types. + The maximum number of decimal digits that can be stored to the right of the decimal point. Scale must be a value from 0 through . + + + + Adds a default value to the column. + + + + + Adds a default value to the column. + + + + + Contains the extensions methods for the interface. + + + + + Specifies the size of the new data type of the column. + + + The length for character data types or the maximum total number of decimal digits for numeric data types. + + + + Sets the default of the column to be the current system time of the database server. + + + + + MigSharp provider for Microsoft SQL Compact Edition 4.0. + + + + + Represents a collection of existing tables. + + + + + Gets a table by its name. + + + + + Represents a collection of existing columns. + + + + + Gets a column by its name. + + + + + Represents an existing column. + + + + + Renames the column. + + + + + Removes the column from its table. + + + + + Alters the column to be a nullable column. + + A new or the old data type of the column. + + + + Alters the column to be a nullable column. + + A new or the old data type of the column. + + + + Gets the name of the column. + + + + + Gets the name of the table of the column. + + + + + Contains the extensions methods for the interface. + + + + + Specifies the size of the data type of the column. + + + The length for character data types or the maximum total number of decimal digits for numeric data types. + + + + Puts the column under an unique constraint with a default constraint name. + + + + + Sets the default of the column to be the current system time of the database server. + + + + + Visits all commands executed against a and translates them into SQL + using a specific . + + + + + Translates the recorded commands on the to SQL commands. + + + + + Represents a SQL data type. + + + + + Initializes a new instance of . + + + + + Used in validation messages and for debugging. + + + + + Gets the length for character data types or the maximum total number of decimal digits for numeric data types. + + + + + Gets the maximum number of decimal digits that can be stored to the right of the decimal point. Scale is a value from 0 through . + + + + + Use this class to configure the behaviour of the . + + + + + Initializes an instance of default options. + + + + + Initializes options that select migrations for specific module only. + + The name of the selected module. Only migrations for this module will be executed. + + + + Suppresses validation warnings for the provider called and the data type under the . + + + + + Outputs the SQL used for the migrations to external files without affecting the database. + + + + + Outputs the SQL used for the migrations to external files without affecting the database. + + + + + Outputs the SQL used for the migrations to external files while migrating the database. + + + + + Outputs the SQL used for the migrations to external files while migrating the database. + + + + + Sets the level of general information being traced. + + + + + Sets the level of SQL information being traced. + + + + + Sets the level of performance information being traced. + + + + + Gets or sets the table name of the versioning table. + + + + + Gets the providers that should be supported for all migrations. Compatibility validation of migrations is performed + against the providers in this collection. + + + + + Gets or sets a function that selects the module based on its name. Only migrations for this module will be executed. + + + + + Expresses under which circumstances a warning should be expressed for a given and its OfSize parameters. + + + + + Suppresses all warnings for the specified . Use diligently. + + + + + Suppresses warnings for the specified when it is used without a specified size. + + + + + Suppresses warnings for the specified when it is used with a specified size. + + + + + Suppresses warnings for the specified when it is used with a specified size and a specified scale. + + + + + Validates s against the list of supported providers. + + + + + Represents a table that was created before. + + + + + Gets the primary key constraint of the table. + + Optionally, the name of the primary key constraint. If null or empty, the default name will be used. + + + + Renames the table. + + + + + Drops the table. + + + + + Adds a primary key constraint to the table. + + Optionally, the primary key constraint name. If null or empty, a default name will be generated. + + + + Adds an index to the table. + + Optionally, the index name. If null or empty, a default name will be generated. + + + + Adds a foreign key constraint to another table. + + The name of the referenced table. + Optionally, the name of the foreign key constraint. If null or empty, a default name will be generated. + + + + Adds an unique constraint to the table. + + Optionally, the name of the unique constraint. If null or empty, a default name will be generated. + + + + Gets the columns of the table. + + + + + Gets the unique constraints of the table. + + + + + Gets the unique constraints of the table. + + + + + Gets the foreign key constraints of the table. + + + + + Represents a primary key constraint. + + + + + Drops the primary key constraint. + + + + + Renames the primary key constraint and all associated resources (e.g. Oracle maintains an index along with the primary key which is renamed, too). + + + + + + Returns the longer string. + + + + + Returns the longest string. + + + + + Creates a name with the following structure: '_[0]_[1]_..._'. + The contained names are shortened such that the complete generated name fits characters. + + The table name. + The postfix to be appended to the name. The postfix will *not* be shortened in any way. + The maximum length of the generated name. + Additional names that should be included in the result which follow the table name. + + + + Event arguments for migration events. + + + + + Initializes a new instance. + + + + + Gets the associated metadata. + + + + + Gets the direction of the migration step. + + + + + Implements a custom bootstrapping logic which is executed if the versioning table of MigSharp doe not exists yet. + + + + + Triggers whatever actions are needed to prepare the custom bootstrapping. This method is called exactly once, + before is called any times. + + The connection used to update the versioning table. + The transaction used to update the versioning table. + + + + Returns true if the migration should be assumed as already executed when bootstrapping the versioning. + + + + + Triggers whatever actions are needed to finish the custom bootstrapping. This method is called exactly once, + after is called any times. + + The connection used to update the versioning table. + The transaction used to update the versioning table. + + + + Contains extension methods for the interface. + + + + + Gets the primary key constraint of the table with the default name. + + + + + Adds a primary key constraint to the table with the default name. + + + + + Adds an index to the table. + + + + + Adds a foreign key constraint to another table with the default name. + + + The name of the referenced table. + + + + Adds an unique constraint to the table with the default name. + + + + + Gets an unique constraint by the name of its first column. + + + + + Gets an foreign key constraint by the name of its referenced table. + + + + + Implements without creating a versioning table until it is really needed. + + + + diff --git a/NzbDrone.Core.Test/App.config b/NzbDrone.Core.Test/App.config index 6e63be8ce..0d29e6625 100644 --- a/NzbDrone.Core.Test/App.config +++ b/NzbDrone.Core.Test/App.config @@ -19,5 +19,11 @@ + + + + + + \ No newline at end of file diff --git a/NzbDrone.Core.Test/Framework/MockLib.cs b/NzbDrone.Core.Test/Framework/MockLib.cs index 0030f9682..9adc0f271 100644 --- a/NzbDrone.Core.Test/Framework/MockLib.cs +++ b/NzbDrone.Core.Test/Framework/MockLib.cs @@ -10,6 +10,7 @@ using NzbDrone.Core.Instrumentation; using NzbDrone.Core.Providers.Core; using NzbDrone.Core.Repository; using NzbDrone.Core.Repository.Quality; +using PetaPoco; using SubSonic.DataProviders; using SubSonic.Repository; @@ -38,7 +39,7 @@ namespace NzbDrone.Core.Test.Framework public static IRepository GetEmptyRepository(bool enableLogging = false, string fileName = "") { - Console.WriteLine("Creating an empty SQLite database"); + Console.WriteLine("Creating an empty Subsonic repository"); if (String.IsNullOrWhiteSpace(fileName)) { @@ -50,7 +51,7 @@ namespace NzbDrone.Core.Test.Framework var repo = Connection.CreateSimpleRepository(provider); ForceMigration(repo); - Migrations.Run(Connection.GetConnectionString(fileName), false); + //Migrations.Run(Connection.GetConnectionString(fileName), false); if (enableLogging) { @@ -62,6 +63,24 @@ namespace NzbDrone.Core.Test.Framework return repo; } + public static IDatabase GetEmptyDatabase(bool enableLogging = false, string fileName = "") + { + Console.WriteLine("Creating an empty PetaPoco database"); + + if (String.IsNullOrWhiteSpace(fileName)) + { + fileName = Guid.NewGuid() + ".db"; + } + + var connectionString = Connection.GetConnectionString(fileName); + + MigrationsHelper.MigrateDatabase(connectionString); + + var database = Connection.GetPetaPocoDb(connectionString); + + return database; + } + public static DiskProvider GetStandardDisk(int seasons, int episodes) { var mock = new Mock(); diff --git a/NzbDrone.Core.Test/NzbDrone.Core.Test.csproj b/NzbDrone.Core.Test/NzbDrone.Core.Test.csproj index 8776fe479..fbbb24f17 100644 --- a/NzbDrone.Core.Test/NzbDrone.Core.Test.csproj +++ b/NzbDrone.Core.Test/NzbDrone.Core.Test.csproj @@ -36,6 +36,9 @@ True + + ..\packages\Castle.Core.2.5.2\lib\NET35\Castle.Core.dll + ..\packages\NBuilder.2.3.0.0\lib\FizzWare.NBuilder.dll @@ -72,11 +75,9 @@ ..\packages\NUnit.2.5.10.11092\lib\pnunit.framework.dll - - False - ..\Libraries\SubSonic.Core.dll - + + ..\Libraries\System.Data.SQLite.dll diff --git a/NzbDrone.Core.Test/RepoTest.cs b/NzbDrone.Core.Test/RepoTest.cs index 55852a919..c206d4136 100644 --- a/NzbDrone.Core.Test/RepoTest.cs +++ b/NzbDrone.Core.Test/RepoTest.cs @@ -42,7 +42,6 @@ namespace NzbDrone.Core.Test } [Test] - [Ignore] public void query_scratch_pad() { diff --git a/NzbDrone.Core.Test/SeriesProviderTest.cs b/NzbDrone.Core.Test/SeriesProviderTest.cs index bd5086dd6..d8f4a283a 100644 Binary files a/NzbDrone.Core.Test/SeriesProviderTest.cs and b/NzbDrone.Core.Test/SeriesProviderTest.cs differ diff --git a/NzbDrone.Core.Test/packages.config b/NzbDrone.Core.Test/packages.config index 764a1f0cd..05e7c4fc9 100644 --- a/NzbDrone.Core.Test/packages.config +++ b/NzbDrone.Core.Test/packages.config @@ -7,4 +7,6 @@ + + \ No newline at end of file diff --git a/NzbDrone.Core/CentralDispatch.cs b/NzbDrone.Core/CentralDispatch.cs index a1e4b894c..8f9e7811f 100644 --- a/NzbDrone.Core/CentralDispatch.cs +++ b/NzbDrone.Core/CentralDispatch.cs @@ -15,6 +15,7 @@ using NzbDrone.Core.Providers.Indexer; using NzbDrone.Core.Providers.Jobs; using NzbDrone.Core.Repository; using NzbDrone.Core.Repository.Quality; +using PetaPoco; using SubSonic.DataProviders; using SubSonic.Repository; @@ -56,7 +57,7 @@ namespace NzbDrone.Core LogConfiguration.StartDbLogging(); - Migrations.Run(Connection.MainConnectionString, true); + MigrationsHelper.Run(Connection.MainConnectionString, true); _kernel.Get().SetupDefaultProfiles(); @@ -95,6 +96,7 @@ namespace NzbDrone.Core _kernel.Bind().ToSelf().InSingletonScope(); _kernel.Bind().ToConstant(Connection.CreateSimpleRepository(Connection.MainConnectionString)).InSingletonScope(); + _kernel.Bind().ToConstant(Connection.GetPetaPocoDb(Connection.MainConnectionString)).InRequestScope(); _kernel.Bind().ToConstant(Connection.CreateSimpleRepository(Connection.LogConnectionString)).WhenInjectedInto().InSingletonScope(); _kernel.Bind().ToConstant(Connection.CreateSimpleRepository(Connection.LogConnectionString)).WhenInjectedInto().InSingletonScope(); } diff --git a/NzbDrone.Core/Datastore/Connection.cs b/NzbDrone.Core/Datastore/Connection.cs index 40abbf572..e8eedf736 100644 --- a/NzbDrone.Core/Datastore/Connection.cs +++ b/NzbDrone.Core/Datastore/Connection.cs @@ -1,6 +1,8 @@ using System; +using System.Data.SQLite; using System.IO; using MvcMiniProfiler.Data; +using PetaPoco; using SubSonic.DataProviders; using SubSonic.DataProviders.SQLite; using SubSonic.Repository; @@ -53,6 +55,16 @@ namespace NzbDrone.Core.Datastore return new SimpleRepository(GetDataProvider(connectionString), SimpleRepositoryOptions.RunMigrations); } + public static IDatabase GetPetaPocoDb(string connectionString) + { + var profileConnection = ProfiledDbConnection.Get(new SQLiteConnection(connectionString)); + PetaPoco.Database.Mapper = new CustomeMapper(); + var db = new PetaPoco.Database(profileConnection); + db.OpenSharedConnection(); + + return db; + } + } @@ -67,6 +79,7 @@ namespace NzbDrone.Core.Datastore public override System.Data.Common.DbConnection CreateConnection(string connectionString) { return ProfiledDbConnection.Get(base.CreateConnection(connectionString)); + } } } diff --git a/NzbDrone.Core/Datastore/CustomeMapper.cs b/NzbDrone.Core/Datastore/CustomeMapper.cs new file mode 100644 index 000000000..cb993927a --- /dev/null +++ b/NzbDrone.Core/Datastore/CustomeMapper.cs @@ -0,0 +1,33 @@ +using System; +using PetaPoco; + +namespace NzbDrone.Core.Datastore +{ + public class CustomeMapper : DefaultMapper + { + public override Func GetFromDbConverter(DestinationInfo destinationInfo, Type SourceType) + { + + if ((SourceType == typeof(Int32) || SourceType == typeof(Int64)) && destinationInfo.Type.IsGenericType && destinationInfo.Type.GetGenericTypeDefinition() == typeof(Nullable<>)) + { + // If it is NULLABLE, then get the underlying type. eg if "Nullable" then this will return just "int" + if (destinationInfo.Type.GetGenericArguments()[0].IsEnum) + { + return delegate(object s) + { + int value; + Int32.TryParse(s.ToString(), out value); + if (value == 0) + { + return null; + } + + return (Nullable)value; + }; + } + } + + return base.GetFromDbConverter(destinationInfo, SourceType); + } + } +} \ No newline at end of file diff --git a/NzbDrone.Core/Datastore/Migrations/Legacy/Migration.cs b/NzbDrone.Core/Datastore/Migrations/Legacy/Migration.cs new file mode 100644 index 000000000..2d5f74f2c --- /dev/null +++ b/NzbDrone.Core/Datastore/Migrations/Legacy/Migration.cs @@ -0,0 +1,69 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using Migrator.Framework; + +namespace NzbDrone.Core.Datastore.Migrations.Legacy +{ + [Migration(20110523)] + public class Migration20110523 : Migration + { + public override void Up() + { + Database.RemoveTable(RepositoryProvider.JobsSchema.Name); + } + + public override void Down() + { + throw new NotImplementedException(); + } + } + + [Migration(20110603)] + public class Migration20110603 : Migration + { + public override void Up() + { + Database.RemoveTable("Seasons"); + + MigrationsHelper.RemoveDeletedColumns(Database); + MigrationsHelper.AddNewColumns(Database); + } + + public override void Down() + { + throw new NotImplementedException(); + } + } + + [Migration(20110604)] + public class Migration20110604 : Migration + { + public override void Up() + { + MigrationsHelper.ForceSubSonicMigration(Connection.CreateSimpleRepository(Connection.MainConnectionString)); + + var episodesTable = RepositoryProvider.EpisodesSchema; + //Database.AddIndex("idx_episodes_series_season_episode", episodesTable.Name, true, + // episodesTable.GetColumnByPropertyName("SeriesId").Name, + // episodesTable.GetColumnByPropertyName("SeasonNumber").Name, + // episodesTable.GetColumnByPropertyName("EpisodeNumber").Name); + + Database.AddIndex("idx_episodes_series_season", episodesTable.Name, false, + episodesTable.GetColumnByPropertyName("SeriesId").Name, + episodesTable.GetColumnByPropertyName("SeasonNumber").Name); + + Database.AddIndex("idx_episodes_series", episodesTable.Name, false, + episodesTable.GetColumnByPropertyName("SeriesId").Name); + + MigrationsHelper.RemoveDeletedColumns(Database); + MigrationsHelper.AddNewColumns(Database); + } + + public override void Down() + { + throw new NotImplementedException(); + } + } +} diff --git a/NzbDrone.Core/Datastore/Migrations/MigrationExport.cs b/NzbDrone.Core/Datastore/Migrations/MigrationExport.cs new file mode 100644 index 000000000..74fef9048 --- /dev/null +++ b/NzbDrone.Core/Datastore/Migrations/MigrationExport.cs @@ -0,0 +1,32 @@ +using System; +using System.Collections.Generic; +using System.Data; +using System.Linq; +using System.Text; +using MigSharp; + +namespace NzbDrone.Core.Datastore.Migrations +{ + [MigrationExport] + internal class Migration1 : IMigration + { + public void Up(IDatabase db) + { + db.CreateTable("Series") + .WithPrimaryKeyColumn("SeriesId", DbType.Int32).AsIdentity() + .WithNullableColumn("Title", DbType.String) + .WithNullableColumn("CleanTitle", DbType.String) + .WithNullableColumn("Status", DbType.String) + .WithNullableColumn("Overview", DbType.String) + .WithNullableColumn("AirsDayOfWeek", DbType.Int16) + .WithNullableColumn("AirTimes", DbType.String) + .WithNullableColumn("Language", DbType.String) + .WithNotNullableColumn("Path", DbType.String) + .WithNotNullableColumn("Monitored", DbType.Boolean) + .WithNotNullableColumn("QualityProfileId", DbType.Int16) + .WithNotNullableColumn("SeasonFolder", DbType.Boolean) + .WithNullableColumn("LastInfoSync", DbType.DateTime) + .WithNullableColumn("LastDiskSync", DbType.DateTime); + } + } +} diff --git a/NzbDrone.Core/Datastore/Migrations.cs b/NzbDrone.Core/Datastore/MigrationsHelper.cs similarity index 63% rename from NzbDrone.Core/Datastore/Migrations.cs rename to NzbDrone.Core/Datastore/MigrationsHelper.cs index b87f8589f..d73acfbc0 100644 --- a/NzbDrone.Core/Datastore/Migrations.cs +++ b/NzbDrone.Core/Datastore/MigrationsHelper.cs @@ -5,6 +5,7 @@ using System.Linq; using System.Reflection; using System.Text; using Migrator.Framework; +using MigSharp; using NLog; using NzbDrone.Core.Repository; using NzbDrone.Core.Repository.Quality; @@ -14,7 +15,7 @@ using SubSonic.Schema; namespace NzbDrone.Core.Datastore { - public class Migrations + public class MigrationsHelper { private static readonly Logger Logger = LogManager.GetCurrentClassLogger(); @@ -27,11 +28,11 @@ namespace NzbDrone.Core.Datastore Migrator.Migrator migrator; if (trace) { - migrator = new Migrator.Migrator("Sqlite", connetionString, Assembly.GetAssembly(typeof(Migrations)), true, new MigrationLogger()); + migrator = new Migrator.Migrator("Sqlite", connetionString, Assembly.GetAssembly(typeof(MigrationsHelper)), true, new MigrationLogger()); } else { - migrator = new Migrator.Migrator("Sqlite", connetionString, Assembly.GetAssembly(typeof(Migrations))); + migrator = new Migrator.Migrator("Sqlite", connetionString, Assembly.GetAssembly(typeof(MigrationsHelper))); } @@ -50,6 +51,13 @@ namespace NzbDrone.Core.Datastore } } + + public static void MigrateDatabase(string connectionString) + { + var migrator = new MigSharp.Migrator(connectionString, ProviderNames.SQLite); + migrator.MigrateAll(typeof(MigrationsHelper).Assembly); + } + public static void ForceSubSonicMigration(IRepository repository) { repository.Single(1); @@ -113,64 +121,5 @@ namespace NzbDrone.Core.Datastore } - [Migration(20110523)] - public class Migration20110523 : Migration - { - public override void Up() - { - Database.RemoveTable(RepositoryProvider.JobsSchema.Name); - } - - public override void Down() - { - throw new NotImplementedException(); - } - } - - [Migration(20110603)] - public class Migration20110603 : Migration - { - public override void Up() - { - Database.RemoveTable("Seasons"); - - Migrations.RemoveDeletedColumns(Database); - Migrations.AddNewColumns(Database); - } - public override void Down() - { - throw new NotImplementedException(); - } - } - - [Migration(20110604)] - public class Migration20110604 : Migration - { - public override void Up() - { - Migrations.ForceSubSonicMigration(Connection.CreateSimpleRepository(Connection.MainConnectionString)); - - var episodesTable = RepositoryProvider.EpisodesSchema; - //Database.AddIndex("idx_episodes_series_season_episode", episodesTable.Name, true, - // episodesTable.GetColumnByPropertyName("SeriesId").Name, - // episodesTable.GetColumnByPropertyName("SeasonNumber").Name, - // episodesTable.GetColumnByPropertyName("EpisodeNumber").Name); - - Database.AddIndex("idx_episodes_series_season", episodesTable.Name, false, - episodesTable.GetColumnByPropertyName("SeriesId").Name, - episodesTable.GetColumnByPropertyName("SeasonNumber").Name); - - Database.AddIndex("idx_episodes_series", episodesTable.Name, false, - episodesTable.GetColumnByPropertyName("SeriesId").Name); - - Migrations.RemoveDeletedColumns(Database); - Migrations.AddNewColumns(Database); - } - - public override void Down() - { - throw new NotImplementedException(); - } - } } \ No newline at end of file diff --git a/NzbDrone.Core/Models/PetaPoco.cs b/NzbDrone.Core/Models/PetaPoco.cs new file mode 100644 index 000000000..24555a31c --- /dev/null +++ b/NzbDrone.Core/Models/PetaPoco.cs @@ -0,0 +1,2447 @@ +/* PetaPoco v4.0.2 - A Tiny ORMish thing for your POCO's. + * Copyright © 2011 Topten Software. All Rights Reserved. + * + * Apache License 2.0 - http://www.toptensoftware.com/petapoco/license + * + * Special thanks to Rob Conery (@robconery) for original inspiration (ie:Massive) and for + * use of Subsonic's T4 templates, Rob Sullivan (@DataChomp) for hard core DBA advice + * and Adam Schroder (@schotime) for lots of suggestions, improvements and Oracle support + */ + +#define PETAPOCO_NO_DYNAMIC //in your project settings on .NET 3.5 + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Configuration; +using System.Data.Common; +using System.Data; +using System.Text.RegularExpressions; +using System.Reflection; +using System.Reflection.Emit; +using System.Linq.Expressions; + + +// ReSharper disable +namespace PetaPoco + +{ + // Poco's marked [Explicit] require all column properties to be marked + [AttributeUsage(AttributeTargets.Class)] + public class ExplicitColumnsAttribute : Attribute + { + } + // For non-explicit pocos, causes a property to be ignored + [AttributeUsage(AttributeTargets.Property)] + public class IgnoreAttribute : Attribute + { + } + + // For explicit pocos, marks property as a column + [AttributeUsage(AttributeTargets.Property)] + public class ColumnAttribute : Attribute + { + public ColumnAttribute() { } + public ColumnAttribute(string name) { Name = name; } + public string Name { get; set; } + } + + // For explicit pocos, marks property as a column + [AttributeUsage(AttributeTargets.Property)] + public class ResultColumnAttribute : ColumnAttribute + { + public ResultColumnAttribute() { } + public ResultColumnAttribute(string name) : base(name) { } + } + + // Specify the table name of a poco + [AttributeUsage(AttributeTargets.Class)] + public class TableNameAttribute : Attribute + { + public TableNameAttribute(string tableName) + { + Value = tableName; + } + public string Value { get; private set; } + } + + // Specific the primary key of a poco class (and optional sequence name for Oracle) + [AttributeUsage(AttributeTargets.Class)] + public class PrimaryKeyAttribute : Attribute + { + public PrimaryKeyAttribute(string primaryKey) + { + Value = primaryKey; + autoIncrement = true; + } + + public string Value { get; private set; } + public string sequenceName { get; set; } + public bool autoIncrement { get; set; } + } + + [AttributeUsage(AttributeTargets.Property)] + public class AutoJoinAttribute : Attribute + { + public AutoJoinAttribute() { } + } + + [AttributeUsage(AttributeTargets.Property)] + public class VersionColumnAttribute : ColumnAttribute + { + public VersionColumnAttribute() { } + public VersionColumnAttribute(string name) : base(name) { } + } + + // Results from paged request + public class Page + { + public long CurrentPage { get; set; } + public long TotalPages { get; set; } + public long TotalItems { get; set; } + public long ItemsPerPage { get; set; } + public List Items { get; set; } + public object Context { get; set; } + } + + // Pass as parameter value to force to DBType.AnsiString + public class AnsiString + { + public AnsiString(string str) + { + Value = str; + } + public string Value { get; private set; } + } + + // Used by IMapper to override table bindings for an object + public class TableInfo + { + public string TableName { get; set; } + public string PrimaryKey { get; set; } + public bool AutoIncrement { get; set; } + public string SequenceName { get; set; } + } + + // Optionally provide and implementation of this to Database.Mapper + public interface IMapper + { + void GetTableInfo(Type t, TableInfo ti); + bool MapPropertyToColumn(PropertyInfo pi, ref string columnName, ref bool resultColumn); + Func GetFromDbConverter(DestinationInfo destinationInfo, Type SourceType); + Func GetToDbConverter(Type SourceType); + } + + public class DefaultMapper : IMapper + { + public virtual void GetTableInfo(Type t, TableInfo ti) { } + public virtual bool MapPropertyToColumn(PropertyInfo pi, ref string columnName, ref bool resultColumn) + { + return true; + } + public virtual Func GetFromDbConverter(DestinationInfo destinationInfo, Type SourceType) + { + return null; + } + public virtual Func GetToDbConverter(Type SourceType) + { + return null; + } + } + + public class DestinationInfo + { + public DestinationInfo(Type type) + { + Type = type; + } + + public DestinationInfo(PropertyInfo propertyInfo) + { + PropertyInfo = propertyInfo; + Type = propertyInfo.PropertyType; + } + + public PropertyInfo PropertyInfo { get; private set; } + public Type Type { get; private set; } + } + + public interface IDatabaseQuery + { + void OpenSharedConnection(); + void CloseSharedConnection(); + int Execute(string sql, params object[] args); + int Execute(Sql sql); + T ExecuteScalar(string sql, params object[] args); + T ExecuteScalar(Sql sql); + List Fetch(); + List Fetch(string sql, params object[] args); + List Fetch(Sql sql); + List Fetch(long page, long itemsPerPage, string sql, params object[] args); + List Fetch(long page, long itemsPerPage, Sql sql); + Page Page(long page, long itemsPerPage, string sql, params object[] args); + Page Page(long page, long itemsPerPage, Sql sql); + List Fetch(Func cb, string sql, params object[] args); + List Fetch(Func cb, string sql, params object[] args); + List Fetch(Func cb, string sql, params object[] args); + IEnumerable Query(Func cb, string sql, params object[] args); + IEnumerable Query(Func cb, string sql, params object[] args); + IEnumerable Query(Func cb, string sql, params object[] args); + List Fetch(Func cb, Sql sql); + List Fetch(Func cb, Sql sql); + List Fetch(Func cb, Sql sql); + IEnumerable Query(Func cb, Sql sql); + IEnumerable Query(Func cb, Sql sql); + IEnumerable Query(Func cb, Sql sql); + List Fetch(string sql, params object[] args); + List Fetch(string sql, params object[] args); + List Fetch(string sql, params object[] args); + IEnumerable Query(string sql, params object[] args); + IEnumerable Query(string sql, params object[] args); + IEnumerable Query(string sql, params object[] args); + IEnumerable Query(Type[] types, object cb, string sql, params object[] args); + List Fetch(Sql sql); + List Fetch(Sql sql); + List Fetch(Sql sql); + IEnumerable Query(Sql sql); + IEnumerable Query(Sql sql); + IEnumerable Query(Sql sql); + IEnumerable Query(string sql, params object[] args); + IEnumerable Query(Sql sql); + T Single(object primaryKey); + T SingleOrDefault(object primaryKey); + T Single(string sql, params object[] args); + T SingleOrDefault(string sql, params object[] args); + T First(string sql, params object[] args); + T FirstOrDefault(string sql, params object[] args); + T Single(Sql sql); + T SingleOrDefault(Sql sql); + T First(Sql sql); + T FirstOrDefault(Sql sql); + bool Exists(object primaryKey); + int OneTimeCommandTimeout { get; set; } + } + + public interface IDatabase : IDatabaseQuery + { + void Dispose(); + IDbConnection Connection { get; } + Transaction GetTransaction(); + void BeginTransaction(); + void AbortTransaction(); + void CompleteTransaction(); + object Insert(string tableName, string primaryKeyName, bool autoIncrement, object poco); + object Insert(string tableName, string primaryKeyName, object poco); + object Insert(object poco); + int Update(string tableName, string primaryKeyName, object poco, object primaryKeyValue); + int Update(string tableName, string primaryKeyName, object poco); + int Update(object poco); + int Update(object poco, object primaryKeyValue); + int Update(string sql, params object[] args); + int Update(Sql sql); + int Delete(string tableName, string primaryKeyName, object poco); + int Delete(string tableName, string primaryKeyName, object poco, object primaryKeyValue); + int Delete(object poco); + int Delete(string sql, params object[] args); + int Delete(Sql sql); + int Delete(object pocoOrPrimaryKey); + void Save(string tableName, string primaryKeyName, object poco); + void Save(object poco); + } + + // Database class ... this is where most of the action happens + public class Database : IDisposable, IDatabase + { + public const string MsSqlClientProvider = "System.Data.SqlClient"; + + public Database(IDbConnection connection) + { + _sharedConnection = connection; + _connectionString = connection.ConnectionString; + _sharedConnectionDepth = 2; // Prevent closing external connection + CommonConstruct(); + } + + public Database(string connectionString, string providerName) + { + _connectionString = connectionString; + _providerName = providerName; + CommonConstruct(); + } + + public Database(string connectionString, DbProviderFactory provider) + { + _connectionString = connectionString; + _factory = provider; + CommonConstruct(); + } + + public Database(string connectionStringName) + { + // Use first? + if (connectionStringName == "") + connectionStringName = ConfigurationManager.ConnectionStrings[0].Name; + + // Work out connection string and provider name + var providerName = "System.Data.SqlClient"; + if (ConfigurationManager.ConnectionStrings[connectionStringName] != null) + { + if (!string.IsNullOrEmpty(ConfigurationManager.ConnectionStrings[connectionStringName].ProviderName)) + providerName = ConfigurationManager.ConnectionStrings[connectionStringName].ProviderName; + } + else + { + throw new InvalidOperationException("Can't find a connection string with the name '" + connectionStringName + "'"); + } + + // Store factory and connection string + _connectionString = ConfigurationManager.ConnectionStrings[connectionStringName].ConnectionString; + _providerName = providerName; + CommonConstruct(); + } + + enum DBType + { + SqlServer, + SqlServerCE, + MySql, + PostgreSQL, + Oracle, + SQLite + } + DBType _dbType = DBType.SqlServer; + + // Common initialization + private void CommonConstruct() + { + _transactionDepth = 0; + ForceDateTimesToUtc = true; + EnableAutoSelect = true; + + if (_providerName != null) + _factory = DbProviderFactories.GetFactory(_providerName); + + string dbtype = (_factory == null ? _sharedConnection.GetType() : _factory.GetType()).Name; + if (dbtype.StartsWith("MySql")) _dbType = DBType.MySql; + else if (dbtype.StartsWith("SqlCe")) _dbType = DBType.SqlServerCE; + else if (dbtype.StartsWith("Npgsql")) _dbType = DBType.PostgreSQL; + else if (dbtype.StartsWith("Oracle")) _dbType = DBType.Oracle; + else if (dbtype.StartsWith("SQLite")) _dbType = DBType.SQLite; + + if (_dbType == DBType.MySql && _connectionString != null && _connectionString.IndexOf("Allow User Variables=true") >= 0) + _paramPrefix = "?"; + if (_dbType == DBType.Oracle) + _paramPrefix = ":"; + } + + // Automatically close one open shared connection + public void Dispose() + { + // Automatically close one open connection reference + // (Works with KeepConnectionAlive and manually opening a shared connection) + CloseSharedConnection(); + } + + // Set to true to keep the first opened connection alive until this object is disposed + public bool KeepConnectionAlive { get; set; } + + // Open a connection (can be nested) + public void OpenSharedConnection() + { + if (_sharedConnectionDepth == 0) + { + _sharedConnection = _factory.CreateConnection(); + _sharedConnection.ConnectionString = _connectionString; + _sharedConnection.Open(); + + if (KeepConnectionAlive) + _sharedConnectionDepth++; // Make sure you call Dispose + } + _sharedConnectionDepth++; + } + + // Close a previously opened connection + public void CloseSharedConnection() + { + if (_sharedConnectionDepth > 0) + { + _sharedConnectionDepth--; + if (_sharedConnectionDepth == 0) + { + _sharedConnection.Dispose(); + _sharedConnection = null; + } + } + } + + // Access to our shared connection + public IDbConnection Connection + { + get { return _sharedConnection; } + } + + // Helper to create a transaction scope + public Transaction GetTransaction() + { + return new Transaction(this); + } + + // Use by derived repo generated by T4 templates + public virtual void OnBeginTransaction() { } + public virtual void OnEndTransaction() { } + + // Start a new transaction, can be nested, every call must be + // matched by a call to AbortTransaction or CompleteTransaction + // Use `using (var scope=db.Transaction) { scope.Complete(); }` to ensure correct semantics + public void BeginTransaction() + { + _transactionDepth++; + + if (_transactionDepth == 1) + { + OpenSharedConnection(); + _transaction = _sharedConnection.BeginTransaction(); + _transactionCancelled = false; + OnBeginTransaction(); + } + + } + + // Internal helper to cleanup transaction stuff + void CleanupTransaction() + { + OnEndTransaction(); + + if (_transactionCancelled) + _transaction.Rollback(); + else + _transaction.Commit(); + + _transaction.Dispose(); + _transaction = null; + + CloseSharedConnection(); + } + + // Abort the entire outer most transaction scope + public void AbortTransaction() + { + _transactionCancelled = true; + if ((--_transactionDepth) == 0) + CleanupTransaction(); + } + + // Complete the transaction + public void CompleteTransaction() + { + if ((--_transactionDepth) == 0) + CleanupTransaction(); + } + + // Helper to handle named parameters from object properties + static Regex rxParams = new Regex(@"(? args_dest) + { + return rxParams.Replace(_sql, m => + { + string param = m.Value.Substring(1); + + object arg_val; + + int paramIndex; + if (int.TryParse(param, out paramIndex)) + { + // Numbered parameter + if (paramIndex < 0 || paramIndex >= args_src.Length) + throw new ArgumentOutOfRangeException(string.Format("Parameter '@{0}' specified but only {1} parameters supplied (in `{2}`)", paramIndex, args_src.Length, _sql)); + arg_val = args_src[paramIndex]; + } + else + { + // Look for a property on one of the arguments with this name + bool found = false; + arg_val = null; + foreach (var o in args_src) + { + var pi = o.GetType().GetProperty(param); + if (pi != null) + { + arg_val = pi.GetValue(o, null); + found = true; + break; + } + } + + if (!found) + throw new ArgumentException(string.Format("Parameter '@{0}' specified but none of the passed arguments have a property with this name (in '{1}')", param, _sql)); + } + + // Expand collections to parameter lists + if ((arg_val as System.Collections.IEnumerable) != null && + (arg_val as string) == null && + (arg_val as byte[]) == null) + { + var sb = new StringBuilder(); + foreach (var i in arg_val as System.Collections.IEnumerable) + { + sb.Append((sb.Length == 0 ? "@" : ",@") + args_dest.Count.ToString()); + args_dest.Add(i); + } + return sb.ToString(); + } + else + { + args_dest.Add(arg_val); + return "@" + (args_dest.Count - 1).ToString(); + } + } + ); + } + + // Add a parameter to a DB command + void AddParam(IDbCommand cmd, object item, string ParameterPrefix) + { + // Convert value to from poco type to db type + if (Database.Mapper != null && item != null) + { + var fn = Database.Mapper.GetToDbConverter(item.GetType()); + if (fn != null) + item = fn(item); + } + + // Support passed in parameters + var idbParam = item as IDbDataParameter; + if (idbParam != null) + { + idbParam.ParameterName = string.Format("{0}{1}", ParameterPrefix, cmd.Parameters.Count); + cmd.Parameters.Add(idbParam); + return; + } + var p = cmd.CreateParameter(); + p.ParameterName = string.Format("{0}{1}", ParameterPrefix, cmd.Parameters.Count); + + if (item == null) + { + p.Value = DBNull.Value; + } + else + { + var t = item.GetType(); + if (t.IsEnum) // PostgreSQL .NET driver wont cast enum to int + { + p.Value = (int)item; + } + else if (t == typeof(Guid)) + { + p.Value = item.ToString(); + p.DbType = DbType.String; + p.Size = 40; + } + else if (t == typeof(string)) + { + p.Size = Math.Max((item as string).Length + 1, 4000); // Help query plan caching by using common size + p.Value = item; + } + else if (t == typeof(AnsiString)) + { + // Thanks @DataChomp for pointing out the SQL Server indexing performance hit of using wrong string type on varchar + p.Size = Math.Max((item as AnsiString).Value.Length + 1, 4000); + p.Value = (item as AnsiString).Value; + p.DbType = DbType.AnsiString; + } + else if (t == typeof(bool) && _dbType != DBType.PostgreSQL) + { + p.Value = ((bool)item) ? 1 : 0; + } + else if (item.GetType().Name == "SqlGeography") //SqlGeography is a CLR Type + { + p.GetType().GetProperty("UdtTypeName").SetValue(p, "geography", null); //geography is the equivalent SQL Server Type + p.Value = item; + } + + else if (item.GetType().Name == "SqlGeometry") //SqlGeometry is a CLR Type + { + p.GetType().GetProperty("UdtTypeName").SetValue(p, "geometry", null); //geography is the equivalent SQL Server Type + p.Value = item; + } + else + { + p.Value = item; + } + } + + cmd.Parameters.Add(p); + } + + // Create a command + static Regex rxParamsPrefix = new Regex(@"(? _paramPrefix + m.Value.Substring(1)); + sql = sql.Replace("@@", "@"); // <- double @@ escapes a single @ + + // Create the command and add parameters + IDbCommand cmd = _factory == null ? connection.CreateCommand() : _factory.CreateCommand(); + cmd.Connection = connection; + cmd.CommandText = sql; + cmd.Transaction = _transaction; + + foreach (var item in args) + { + AddParam(cmd, item, _paramPrefix); + } + + if (_dbType == DBType.Oracle) + { + cmd.GetType().GetProperty("BindByName").SetValue(cmd, true, null); + } + + if (!String.IsNullOrEmpty(sql)) + DoPreExecute(cmd); + + return cmd; + } + + // Create a command + IDbCommand CreateCommand(IDbConnection connection, string sql, params object[] args) + { + var sqlStatement = new Sql(sql, args); + return CreateCommand(connection, sqlStatement); + } + + // Override this to log/capture exceptions + public virtual void OnException(Exception x) + { + System.Diagnostics.Debug.WriteLine(x.ToString()); + System.Diagnostics.Debug.WriteLine(LastCommand); + } + + // Override this to log commands, or modify command before execution + public virtual void OnExecutingCommand(IDbCommand cmd) { } + public virtual void OnExecutedCommand(IDbCommand cmd) { } + + // Execute a non-query command + public int Execute(string sql, params object[] args) + { + return Execute(new Sql(sql, args)); + } + + public int Execute(Sql sql) + { + try + { + OpenSharedConnection(); + try + { + using (var cmd = CreateCommand(_sharedConnection, sql)) + { + var result = cmd.ExecuteNonQuery(); + OnExecutedCommand(cmd); + return result; + } + } + finally + { + CloseSharedConnection(); + } + } + catch (Exception x) + { + OnException(x); + throw; + } + } + + // Execute and cast a scalar property + public T ExecuteScalar(string sql, params object[] args) + { + return ExecuteScalar(new Sql(sql, args)); + } + + public T ExecuteScalar(Sql sql) + { + try + { + OpenSharedConnection(); + try + { + using (var cmd = CreateCommand(_sharedConnection, sql)) + { + object val = cmd.ExecuteScalar(); + OnExecutedCommand(cmd); + return (T)Convert.ChangeType(val, typeof(T)); + } + } + finally + { + CloseSharedConnection(); + } + } + catch (Exception x) + { + OnException(x); + throw; + } + } + + Regex rxSelect = new Regex(@"\A\s*(SELECT|EXECUTE|CALL)\s", RegexOptions.Compiled | RegexOptions.Singleline | RegexOptions.IgnoreCase | RegexOptions.Multiline); + Regex rxFrom = new Regex(@"\A\s*FROM\s", RegexOptions.Compiled | RegexOptions.Singleline | RegexOptions.IgnoreCase | RegexOptions.Multiline); + string AddSelectClause(string sql) + { + if (sql.StartsWith(";")) + return sql.Substring(1); + + if (!rxSelect.IsMatch(sql)) + { + var pd = PocoData.ForType(typeof(T)); + var tableName = EscapeTableName(pd.TableInfo.TableName); + string cols = string.Join(", ", (from c in pd.QueryColumns select EscapeSqlIdentifier(c)).ToArray()); + if (!rxFrom.IsMatch(sql)) + sql = string.Format("SELECT {0} FROM {1} {2}", cols, tableName, sql); + else + sql = string.Format("SELECT {0} {1}", cols, sql); + } + return sql; + } + + public bool ForceDateTimesToUtc { get; set; } + public bool EnableAutoSelect { get; set; } + + // Return a typed list of pocos + public List Fetch(string sql, params object[] args) + { + if (EnableAutoSelect) + sql = AddSelectClause(sql); + + return Fetch(new Sql(sql, args)); + } + + public List Fetch(Sql sql) + { + return Query(sql).ToList(); + } + + public List Fetch(long page, long itemsPerPage, string sql, params object[] args) + { + string sqlCount, sqlPage; + BuildPageQueries(page, itemsPerPage, sql, ref args, out sqlCount, out sqlPage); + return Fetch(sqlPage, args); + } + + public List Fetch(long page, long itemsPerPage, Sql sql) + { + return Fetch(page, itemsPerPage, sql.SQL, sql.Arguments); + } + + public List Fetch() + { + return Fetch(AddSelectClause("")); + } + + static Regex rxColumns = new Regex(@"\A\s*SELECT\s+((?:\((?>\((?)|\)(?<-depth>)|.?)*(?(depth)(?!))\)|.)*?)(?\((?)|\)(?<-depth>)|.?)*(?(depth)(?!))\)|[\w\(\)\.])+(?:\s+(?:ASC|DESC))?(?:\s*,\s*(?:\((?>\((?)|\)(?<-depth>)|.?)*(?(depth)(?!))\)|[\w\(\)\.])+(?:\s+(?:ASC|DESC))?)*", RegexOptions.IgnoreCase | RegexOptions.Multiline | RegexOptions.Singleline | RegexOptions.Compiled); + public static bool SplitSqlForPaging(string sql, out string sqlCount, out string sqlSelectRemoved, out string sqlOrderBy) + { + sqlSelectRemoved = null; + sqlCount = null; + sqlOrderBy = null; + + // Extract the columns from "SELECT FROM" + var m = rxColumns.Match(sql); + if (!m.Success) + return false; + + // Save column list and replace with COUNT(*) + Group g = m.Groups[1]; + sqlCount = sql.Substring(0, g.Index) + "COUNT(*) " + sql.Substring(g.Index + g.Length); + sqlSelectRemoved = sql.Substring(g.Index); + + // Look for an "ORDER BY " clause + m = rxOrderBy.Match(sqlCount); + if (!m.Success) + return false; + + g = m.Groups[0]; + sqlOrderBy = g.ToString(); + sqlCount = sqlCount.Substring(0, g.Index) + sqlCount.Substring(g.Index + g.Length); + + return true; + } + + public void BuildPageQueries(long page, long itemsPerPage, string sql, ref object[] args, out string sqlCount, out string sqlPage) + { + // Add auto select clause + sql = AddSelectClause(sql); + + // Split the SQL into the bits we need + string sqlSelectRemoved, sqlOrderBy; + if (!SplitSqlForPaging(sql, out sqlCount, out sqlSelectRemoved, out sqlOrderBy)) + throw new Exception("Unable to parse SQL statement for paged query"); + if (_dbType == DBType.Oracle && sqlSelectRemoved.StartsWith("*")) + throw new Exception("Query must alias '*' when performing a paged query.\neg. select t.* from table t order by t.id"); + + // Build the SQL for the actual final result + if (_dbType == DBType.SqlServer || _dbType == DBType.Oracle) + { + sqlSelectRemoved = rxOrderBy.Replace(sqlSelectRemoved, ""); + sqlPage = string.Format("SELECT * FROM (SELECT ROW_NUMBER() OVER ({0}) peta_rn, {1}) peta_paged WHERE peta_rn>@{2} AND peta_rn<=@{3}", + sqlOrderBy, sqlSelectRemoved, args.Length, args.Length + 1); + args = args.Concat(new object[] { (page - 1) * itemsPerPage, page * itemsPerPage }).ToArray(); + } + else if (_dbType == DBType.SqlServerCE) + { + sqlPage = string.Format("{0}\nOFFSET @{1} ROWS FETCH NEXT @{2} ROWS ONLY", sql, args.Length, args.Length + 1); + args = args.Concat(new object[] { (page - 1) * itemsPerPage, itemsPerPage }).ToArray(); + } + else + { + sqlPage = string.Format("{0}\nLIMIT @{1} OFFSET @{2}", sql, args.Length, args.Length + 1); + args = args.Concat(new object[] { itemsPerPage, (page - 1) * itemsPerPage }).ToArray(); + } + + } + + // Fetch a page + public Page Page(long page, long itemsPerPage, string sql, params object[] args) + { + string sqlCount, sqlPage; + BuildPageQueries(page, itemsPerPage, sql, ref args, out sqlCount, out sqlPage); + + // Save the one-time command time out and use it for both queries + int saveTimeout = OneTimeCommandTimeout; + + // Setup the paged result + var result = new Page(); + result.CurrentPage = page; + result.ItemsPerPage = itemsPerPage; + result.TotalItems = ExecuteScalar(sqlCount, args); + result.TotalPages = result.TotalItems / itemsPerPage; + if ((result.TotalItems % itemsPerPage) != 0) + result.TotalPages++; + + OneTimeCommandTimeout = saveTimeout; + + // Get the records + result.Items = Fetch(sqlPage, args); + + // Done + return result; + } + + public Page Page(long page, long itemsPerPage, Sql sql) + { + return Page(page, itemsPerPage, sql.SQL, sql.Arguments); + } + + // Return an enumerable collection of pocos + public IEnumerable Query(string sql, params object[] args) + { + if (EnableAutoSelect) + sql = AddSelectClause(sql); + + return Query(new Sql(sql, args)); + } + + public IEnumerable Query(Sql sql) + { + OpenSharedConnection(); + try + { + using (var cmd = CreateCommand(_sharedConnection, sql)) + { + IDataReader r; + var pd = PocoData.ForType(typeof(T)); + try + { + if (_sharedConnection.State != ConnectionState.Open) + _sharedConnection.Open(); + + r = cmd.ExecuteReader(); + OnExecutedCommand(cmd); + } + catch (Exception x) + { + OnException(x); + throw; + } + + using (r) + { + var factory = pd.GetFactory(cmd.CommandText, _sharedConnection.ConnectionString, ForceDateTimesToUtc, 0, r.FieldCount, r) as Func; + while (true) + { + T poco; + try + { + if (!r.Read()) + yield break; + poco = factory(r); + } + catch (Exception x) + { + OnException(x); + throw; + } + + yield return poco; + } + } + } + } + finally + { + CloseSharedConnection(); + } + } + + // Multi Fetch + public List Fetch(Func cb, string sql, params object[] args) { return Query(cb, sql, args).ToList(); } + public List Fetch(Func cb, string sql, params object[] args) { return Query(cb, sql, args).ToList(); } + public List Fetch(Func cb, string sql, params object[] args) { return Query(cb, sql, args).ToList(); } + + // Multi Query + public IEnumerable Query(Func cb, string sql, params object[] args) { return Query(new Type[] { typeof(T1), typeof(T2) }, cb, sql, args); } + public IEnumerable Query(Func cb, string sql, params object[] args) { return Query(new Type[] { typeof(T1), typeof(T2), typeof(T3) }, cb, sql, args); } + public IEnumerable Query(Func cb, string sql, params object[] args) { return Query(new Type[] { typeof(T1), typeof(T2), typeof(T3), typeof(T4) }, cb, sql, args); } + + // Multi Fetch (SQL builder) + public List Fetch(Func cb, Sql sql) { return Query(cb, sql.SQL, sql.Arguments).ToList(); } + public List Fetch(Func cb, Sql sql) { return Query(cb, sql.SQL, sql.Arguments).ToList(); } + public List Fetch(Func cb, Sql sql) { return Query(cb, sql.SQL, sql.Arguments).ToList(); } + + // Multi Query (SQL builder) + public IEnumerable Query(Func cb, Sql sql) { return Query(new Type[] { typeof(T1), typeof(T2) }, cb, sql.SQL, sql.Arguments); } + public IEnumerable Query(Func cb, Sql sql) { return Query(new Type[] { typeof(T1), typeof(T2), typeof(T3) }, cb, sql.SQL, sql.Arguments); } + public IEnumerable Query(Func cb, Sql sql) { return Query(new Type[] { typeof(T1), typeof(T2), typeof(T3), typeof(T4) }, cb, sql.SQL, sql.Arguments); } + + // Multi Fetch (Simple) + public List Fetch(string sql, params object[] args) { return Query(sql, args).ToList(); } + public List Fetch(string sql, params object[] args) { return Query(sql, args).ToList(); } + public List Fetch(string sql, params object[] args) { return Query(sql, args).ToList(); } + + // Multi Query (Simple) + public IEnumerable Query(string sql, params object[] args) { return Query(new Type[] { typeof(T1), typeof(T2) }, null, sql, args); } + public IEnumerable Query(string sql, params object[] args) { return Query(new Type[] { typeof(T1), typeof(T2), typeof(T3) }, null, sql, args); } + public IEnumerable Query(string sql, params object[] args) { return Query(new Type[] { typeof(T1), typeof(T2), typeof(T3), typeof(T4) }, null, sql, args); } + + // Multi Fetch (Simple) (SQL builder) + public List Fetch(Sql sql) { return Query(sql.SQL, sql.Arguments).ToList(); } + public List Fetch(Sql sql) { return Query(sql.SQL, sql.Arguments).ToList(); } + public List Fetch(Sql sql) { return Query(sql.SQL, sql.Arguments).ToList(); } + + // Multi Query (Simple) (SQL builder) + public IEnumerable Query(Sql sql) { return Query(new Type[] { typeof(T1), typeof(T2) }, null, sql.SQL, sql.Arguments); } + public IEnumerable Query(Sql sql) { return Query(new Type[] { typeof(T1), typeof(T2), typeof(T3) }, null, sql.SQL, sql.Arguments); } + public IEnumerable Query(Sql sql) { return Query(new Type[] { typeof(T1), typeof(T2), typeof(T3), typeof(T4) }, null, sql.SQL, sql.Arguments); } + + // Automagically guess the property relationships between various POCOs and create a delegate that will set them up + object GetAutoMapper(Type[] types) + { + // Build a key + var kb = new StringBuilder(); + foreach (var t in types) + { + kb.Append(t.ToString()); + kb.Append(":"); + } + var key = kb.ToString(); + + // Check cache + RWLock.EnterReadLock(); + try + { + object mapper; + if (AutoMappers.TryGetValue(key, out mapper)) + return mapper; + } + finally + { + RWLock.ExitReadLock(); + } + + // Create it + RWLock.EnterWriteLock(); + try + { + // Try again + object mapper; + if (AutoMappers.TryGetValue(key, out mapper)) + return mapper; + + // Create a method + var m = new DynamicMethod("petapoco_automapper", types[0], types, true); + var il = m.GetILGenerator(); + + for (int i = 1; i < types.Length; i++) + { + bool handled = false; + for (int j = i - 1; j >= 0; j--) + { + // Find the property + var candidates = from p in types[j].GetProperties() where p.PropertyType == types[i] select p; + if (candidates.Count() == 0) + continue; + if (candidates.Count() > 1) + throw new InvalidOperationException(string.Format("Can't auto join {0} as {1} has more than one property of type {0}", types[i], types[j])); + + // Generate code + il.Emit(OpCodes.Ldarg_S, j); + il.Emit(OpCodes.Ldarg_S, i); + il.Emit(OpCodes.Callvirt, candidates.First().GetSetMethod(true)); + handled = true; + } + + if (!handled) + throw new InvalidOperationException(string.Format("Can't auto join {0}", types[i])); + } + + il.Emit(OpCodes.Ldarg_0); + il.Emit(OpCodes.Ret); + + // Cache it + var del = m.CreateDelegate(Expression.GetFuncType(types.Concat(types.Take(1)).ToArray())); + AutoMappers.Add(key, del); + return del; + } + finally + { + RWLock.ExitWriteLock(); + } + } + + // Find the split point in a result set for two different pocos and return the poco factory for the first + Delegate FindSplitPoint(Type typeThis, Type typeNext, string sql, IDataReader r, ref int pos) + { + // Last? + if (typeNext == null) + return PocoData.ForType(typeThis).GetFactory(sql, _sharedConnection.ConnectionString, ForceDateTimesToUtc, pos, r.FieldCount - pos, r); + + // Get PocoData for the two types + PocoData pdThis = PocoData.ForType(typeThis); + PocoData pdNext = PocoData.ForType(typeNext); + + // Find split point + int firstColumn = pos; + var usedColumns = new Dictionary(); + for (; pos < r.FieldCount; pos++) + { + // Split if field name has already been used, or if the field doesn't exist in current poco but does in the next + string fieldName = r.GetName(pos); + if (usedColumns.ContainsKey(fieldName) || (!pdThis.Columns.ContainsKey(fieldName) && pdNext.Columns.ContainsKey(fieldName))) + { + return pdThis.GetFactory(sql, _sharedConnection.ConnectionString, ForceDateTimesToUtc, firstColumn, pos - firstColumn, r); + } + usedColumns.Add(fieldName, true); + } + + throw new InvalidOperationException(string.Format("Couldn't find split point between {0} and {1}", typeThis, typeNext)); + } + + // Instance data used by the Multipoco factory delegate - essentially a list of the nested poco factories to call + class MultiPocoFactory + { + public List m_Delegates; + public Delegate GetItem(int index) { return m_Delegates[index]; } + } + + // Create a multi-poco factory + Func CreateMultiPocoFactory(Type[] types, string sql, IDataReader r) + { + var m = new DynamicMethod("petapoco_multipoco_factory", typeof(TRet), new Type[] { typeof(MultiPocoFactory), typeof(IDataReader), typeof(object) }, typeof(MultiPocoFactory)); + var il = m.GetILGenerator(); + + // Load the callback + il.Emit(OpCodes.Ldarg_2); + + // Call each delegate + var dels = new List(); + int pos = 0; + for (int i = 0; i < types.Length; i++) + { + // Add to list of delegates to call + var del = FindSplitPoint(types[i], i + 1 < types.Length ? types[i + 1] : null, sql, r, ref pos); + dels.Add(del); + + // Get the delegate + il.Emit(OpCodes.Ldarg_0); // callback,this + il.Emit(OpCodes.Ldc_I4, i); // callback,this,Index + il.Emit(OpCodes.Callvirt, typeof(MultiPocoFactory).GetMethod("GetItem")); // callback,Delegate + il.Emit(OpCodes.Ldarg_1); // callback,delegate, datareader + + // Call Invoke + var tDelInvoke = del.GetType().GetMethod("Invoke"); + il.Emit(OpCodes.Callvirt, tDelInvoke); // Poco left on stack + } + + // By now we should have the callback and the N pocos all on the stack. Call the callback and we're done + il.Emit(OpCodes.Callvirt, Expression.GetFuncType(types.Concat(new Type[] { typeof(TRet) }).ToArray()).GetMethod("Invoke")); + il.Emit(OpCodes.Ret); + + // Finish up + return (Func)m.CreateDelegate(typeof(Func), new MultiPocoFactory() { m_Delegates = dels }); + } + + // Various cached stuff + static Dictionary MultiPocoFactories = new Dictionary(); + static Dictionary AutoMappers = new Dictionary(); + static System.Threading.ReaderWriterLockSlim RWLock = new System.Threading.ReaderWriterLockSlim(); + + // Get (or create) the multi-poco factory for a query + Func GetMultiPocoFactory(Type[] types, string sql, IDataReader r) + { + // Build a key string (this is crap, should address this at some point) + var kb = new StringBuilder(); + kb.Append(typeof(TRet).ToString()); + kb.Append(":"); + foreach (var t in types) + { + kb.Append(":"); + kb.Append(t.ToString()); + } + kb.Append(":"); kb.Append(_sharedConnection.ConnectionString); + kb.Append(":"); kb.Append(ForceDateTimesToUtc); + kb.Append(":"); kb.Append(sql); + string key = kb.ToString(); + + // Check cache + RWLock.EnterReadLock(); + try + { + object oFactory; + if (MultiPocoFactories.TryGetValue(key, out oFactory)) + return (Func)oFactory; + } + finally + { + RWLock.ExitReadLock(); + } + + // Cache it + RWLock.EnterWriteLock(); + try + { + // Check again + object oFactory; + if (MultiPocoFactories.TryGetValue(key, out oFactory)) + return (Func)oFactory; + + // Create the factory + var Factory = CreateMultiPocoFactory(types, sql, r); + + MultiPocoFactories.Add(key, Factory); + return Factory; + } + finally + { + RWLock.ExitWriteLock(); + } + + } + + // Actual implementation of the multi-poco query + public IEnumerable Query(Type[] types, object cb, string sql, params object[] args) + { + OpenSharedConnection(); + try + { + using (var cmd = CreateCommand(_sharedConnection, sql, args)) + { + IDataReader r; + try + { + r = cmd.ExecuteReader(); + OnExecutedCommand(cmd); + } + catch (Exception x) + { + OnException(x); + throw; + } + var factory = GetMultiPocoFactory(types, sql, r); + if (cb == null) + cb = GetAutoMapper(types.ToArray()); + bool bNeedTerminator = false; + using (r) + { + while (true) + { + TRet poco; + try + { + if (!r.Read()) + break; + poco = factory(r, cb); + } + catch (Exception x) + { + OnException(x); + throw; + } + + if (poco != null) + yield return poco; + else + bNeedTerminator = true; + } + if (bNeedTerminator) + { + var poco = (TRet)(cb as Delegate).DynamicInvoke(new object[types.Length]); + if (poco != null) + yield return poco; + else + yield break; + } + } + } + } + finally + { + CloseSharedConnection(); + } + } + + public bool Exists(object primaryKey) + { + var index = 0; + var primaryKeyValuePairs = GetPrimaryKeyValues(PocoData.ForType(typeof(T)).TableInfo.PrimaryKey, primaryKey); + return FirstOrDefault(string.Format("WHERE {0}", BuildPrimaryKeySql(primaryKeyValuePairs, ref index)), primaryKeyValuePairs.Select(x => x.Value).ToArray()) != null; + } + public T Single(object primaryKey) + { + var index = 0; + var primaryKeyValuePairs = GetPrimaryKeyValues(PocoData.ForType(typeof(T)).TableInfo.PrimaryKey, primaryKey); + return Single(string.Format("WHERE {0}", BuildPrimaryKeySql(primaryKeyValuePairs, ref index)), primaryKeyValuePairs.Select(x => x.Value).ToArray()); + } + public T SingleOrDefault(object primaryKey) + { + var index = 0; + var primaryKeyValuePairs = GetPrimaryKeyValues(PocoData.ForType(typeof(T)).TableInfo.PrimaryKey, primaryKey); + return SingleOrDefault(string.Format("WHERE {0}", BuildPrimaryKeySql(primaryKeyValuePairs, ref index)), primaryKeyValuePairs.Select(x => x.Value).ToArray()); + } + public T Single(string sql, params object[] args) + { + return Query(sql, args).Single(); + } + public T SingleOrDefault(string sql, params object[] args) + { + return Query(sql, args).SingleOrDefault(); + } + public T First(string sql, params object[] args) + { + return Query(sql, args).First(); + } + public T FirstOrDefault(string sql, params object[] args) + { + return Query(sql, args).FirstOrDefault(); + } + public T Single(Sql sql) + { + return Query(sql).Single(); + } + public T SingleOrDefault(Sql sql) + { + return Query(sql).SingleOrDefault(); + } + public T First(Sql sql) + { + return Query(sql).First(); + } + public T FirstOrDefault(Sql sql) + { + return Query(sql).FirstOrDefault(); + } + + public string EscapeTableName(string str) + { + // Assume table names with "dot", or opening sq is already escaped + return str.IndexOf('.') >= 0 ? str : EscapeSqlIdentifier(str); + } + + public string EscapeSqlIdentifier(string str) + { + switch (_dbType) + { + case DBType.MySql: + return string.Format("`{0}`", str); + + case DBType.PostgreSQL: + case DBType.Oracle: + return string.Format("\"{0}\"", str); + + default: + return string.Format("[{0}]", str); + } + } + + public object Insert(string tableName, string primaryKeyName, object poco) + { + return Insert(tableName, primaryKeyName, true, poco); + } + + // Insert a poco into a table. If the poco has a property with the same name + // as the primary key the id of the new record is assigned to it. Either way, + // the new id is returned. + public object Insert(string tableName, string primaryKeyName, bool autoIncrement, object poco) + { + try + { + OpenSharedConnection(); + try + { + using (var cmd = CreateCommand(_sharedConnection, "")) + { + if(_sharedConnection.State != ConnectionState.Open) + _sharedConnection.Open(); + + var pd = PocoData.ForObject(poco, primaryKeyName); + var names = new List(); + var values = new List(); + var index = 0; + var versionName = ""; + + foreach (var i in pd.Columns) + { + // Don't insert result columns + if (i.Value.ResultColumn) + continue; + + // Don't insert the primary key (except under oracle where we need bring in the next sequence value) + if (autoIncrement && primaryKeyName != null && string.Compare(i.Key, primaryKeyName, true) == 0) + { + if (_dbType == DBType.Oracle && !string.IsNullOrEmpty(pd.TableInfo.SequenceName)) + { + names.Add(i.Key); + values.Add(string.Format("{0}.nextval", pd.TableInfo.SequenceName)); + } + continue; + } + + names.Add(EscapeSqlIdentifier(i.Key)); + values.Add(string.Format("{0}{1}", _paramPrefix, index++)); + + object val = i.Value.GetValue(poco); + if (i.Value.VersionColumn) + { + val = 1; + versionName = i.Key; + } + + AddParam(cmd, val, _paramPrefix); + } + + cmd.CommandText = string.Format("INSERT INTO {0} ({1}) VALUES ({2})", + EscapeTableName(tableName), + string.Join(",", names.ToArray()), + string.Join(",", values.ToArray()) + ); + + if (!autoIncrement) + { + DoPreExecute(cmd); + cmd.ExecuteNonQuery(); + OnExecutedCommand(cmd); + return true; + } + + object id; + + switch (_dbType) + { + case DBType.SqlServerCE: + DoPreExecute(cmd); + cmd.ExecuteNonQuery(); + OnExecutedCommand(cmd); + id = ExecuteScalar("SELECT @@@IDENTITY AS NewID;"); + break; + case DBType.SqlServer: + cmd.CommandText += ";\nSELECT SCOPE_IDENTITY() AS NewID;"; + DoPreExecute(cmd); + id = cmd.ExecuteScalar(); + OnExecutedCommand(cmd); + break; + case DBType.PostgreSQL: + if (primaryKeyName != null) + { + cmd.CommandText += string.Format("returning {0} as NewID", EscapeSqlIdentifier(primaryKeyName)); + DoPreExecute(cmd); + id = cmd.ExecuteScalar(); + } + else + { + id = -1; + DoPreExecute(cmd); + cmd.ExecuteNonQuery(); + } + OnExecutedCommand(cmd); + break; + case DBType.Oracle: + if (primaryKeyName != null) + { + cmd.CommandText += string.Format(" returning {0} into :newid", EscapeSqlIdentifier(primaryKeyName)); + var param = cmd.CreateParameter(); + param.ParameterName = ":newid"; + param.Value = DBNull.Value; + param.Direction = ParameterDirection.ReturnValue; + param.DbType = DbType.Int64; + cmd.Parameters.Add(param); + DoPreExecute(cmd); + cmd.ExecuteNonQuery(); + id = param.Value; + } + else + { + id = -1; + DoPreExecute(cmd); + cmd.ExecuteNonQuery(); + } + OnExecutedCommand(cmd); + break; + case DBType.SQLite: + if (primaryKeyName != null) + { + cmd.CommandText += ";\nSELECT last_insert_rowid();"; + DoPreExecute(cmd); + id = cmd.ExecuteScalar(); + } + else + { + id = -1; + DoPreExecute(cmd); + cmd.ExecuteNonQuery(); + } + OnExecutedCommand(cmd); + break; + default: + cmd.CommandText += ";\nSELECT @@IDENTITY AS NewID;"; + DoPreExecute(cmd); + id = cmd.ExecuteScalar(); + OnExecutedCommand(cmd); + break; + } + + // Assign the ID back to the primary key property + if (primaryKeyName != null) + { + PocoColumn pc; + if (pd.Columns.TryGetValue(primaryKeyName, out pc)) + { + pc.SetValue(poco, pc.ChangeType(id)); + } + } + + // Assign the Version column + if (!string.IsNullOrEmpty(versionName)) + { + PocoColumn pc; + if (pd.Columns.TryGetValue(versionName, out pc)) + { + pc.SetValue(poco, pc.ChangeType(1)); + } + } + + return id; + } + } + finally + { + CloseSharedConnection(); + } + } + catch (Exception x) + { + OnException(x); + throw; + } + } + + // Insert an annotated poco object + public object Insert(object poco) + { + var pd = PocoData.ForType(poco.GetType()); + return Insert(pd.TableInfo.TableName, pd.TableInfo.PrimaryKey, pd.TableInfo.AutoIncrement, poco); + } + + // Update a record with values from a poco. primary key value can be either supplied or read from the poco + public int Update(string tableName, string primaryKeyName, object poco, object primaryKeyValue) + { + try + { + OpenSharedConnection(); + try + { + using (var cmd = CreateCommand(_sharedConnection, "")) + { + var sb = new StringBuilder(); + var index = 0; + var pd = PocoData.ForObject(poco, primaryKeyName); + string versionName = null; + object versionValue = null; + + var primaryKeyValuePairs = GetPrimaryKeyValues(primaryKeyName, primaryKeyValue); + + foreach (var i in pd.Columns) + { + // Don't update the primary key, but grab the value if we don't have it + if (primaryKeyValue == null && primaryKeyValuePairs.ContainsKey(i.Key)) + { + primaryKeyValuePairs[i.Key] = i.Value.PropertyInfo.GetValue(poco, null); + continue; + } + + // Dont update result only columns + if (i.Value.ResultColumn) + continue; + + object value = i.Value.PropertyInfo.GetValue(poco, null); + + if (i.Value.VersionColumn) + { + versionName = i.Key; + versionValue = value; + value = Convert.ToInt64(value) + 1; + } + + // Build the sql + if (index > 0) + sb.Append(", "); + sb.AppendFormat("{0} = {1}{2}", EscapeSqlIdentifier(i.Key), _paramPrefix, index++); + + // Store the parameter in the command + AddParam(cmd, value, _paramPrefix); + } + + cmd.CommandText = string.Format("UPDATE {0} SET {1} WHERE {2}", + EscapeSqlIdentifier(tableName), sb.ToString(), BuildPrimaryKeySql(primaryKeyValuePairs, ref index)); + + foreach (var keyValue in primaryKeyValuePairs) + { + AddParam(cmd, keyValue.Value, _paramPrefix); + } + + if (!string.IsNullOrEmpty(versionName)) + { + cmd.CommandText += string.Format(" AND {0} = {1}{2}", EscapeSqlIdentifier(versionName), _paramPrefix, index++); + AddParam(cmd, versionValue, _paramPrefix); + } + + DoPreExecute(cmd); + + // Do it + var result = cmd.ExecuteNonQuery(); + OnExecutedCommand(cmd); + + // Set Version + if (!string.IsNullOrEmpty(versionName)) + { + PocoColumn pc; + if (pd.Columns.TryGetValue(versionName, out pc)) + { + pc.PropertyInfo.SetValue(poco, Convert.ChangeType(Convert.ToInt64(versionValue) + 1, pc.PropertyInfo.PropertyType), null); + } + } + + return result; + } + } + finally + { + CloseSharedConnection(); + } + } + catch (Exception x) + { + OnException(x); + throw; + } + } + + private string BuildPrimaryKeySql(Dictionary primaryKeyValuePair, ref int index) + { + var tempIndex = index; + index += primaryKeyValuePair.Count; + return string.Join(" AND ", primaryKeyValuePair.Select((x, i) => string.Format("{0} = {1}{2}", EscapeSqlIdentifier(x.Key), _paramPrefix, tempIndex + i)).ToArray()); + } + + private Dictionary GetPrimaryKeyValues(string primaryKeyName, object primaryKeyValue) + { + Dictionary primaryKeyValues; + + var multiplePrimaryKeysNames = primaryKeyName.Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries).Select(x => x.Trim()).ToArray(); + if (primaryKeyValue != null) + { + if (multiplePrimaryKeysNames.Length == 1) + primaryKeyValues = new Dictionary(StringComparer.OrdinalIgnoreCase) { { primaryKeyName, primaryKeyValue } }; + else + primaryKeyValues = multiplePrimaryKeysNames.ToDictionary(x => x, + x => primaryKeyValue.GetType().GetProperties() + .Where(y => string.Equals(x, y.Name, StringComparison.OrdinalIgnoreCase)) + .Single().GetValue(primaryKeyValue, null), StringComparer.OrdinalIgnoreCase); + } + else + { + primaryKeyValues = multiplePrimaryKeysNames.ToDictionary(x => x, x => (object)null, StringComparer.OrdinalIgnoreCase); + } + return primaryKeyValues; + } + + public int Update(string tableName, string primaryKeyName, object poco) + { + return Update(tableName, primaryKeyName, poco, null); + } + + public int Update(object poco) + { + return Update(poco, null); + } + + public int Update(object poco, object primaryKeyValue) + { + var pd = PocoData.ForType(poco.GetType()); + return Update(pd.TableInfo.TableName, pd.TableInfo.PrimaryKey, poco, primaryKeyValue); + } + + public int Update(string sql, params object[] args) + { + var pd = PocoData.ForType(typeof(T)); + return Execute(string.Format("UPDATE {0} {1}", EscapeTableName(pd.TableInfo.TableName), sql), args); + } + + public int Update(Sql sql) + { + var pd = PocoData.ForType(typeof(T)); + return Execute(new Sql(string.Format("UPDATE {0}", EscapeTableName(pd.TableInfo.TableName))).Append(sql)); + } + + public int Delete(string tableName, string primaryKeyName, object poco) + { + return Delete(tableName, primaryKeyName, poco, null); + } + + public int Delete(string tableName, string primaryKeyName, object poco, object primaryKeyValue) + { + var primaryKeyValuePairs = GetPrimaryKeyValues(primaryKeyName, primaryKeyValue); + // If primary key value not specified, pick it up from the object + if (primaryKeyValue == null) + { + var pd = PocoData.ForObject(poco, primaryKeyName); + foreach (var i in pd.Columns) + { + if (primaryKeyValuePairs.ContainsKey(i.Key)) + { + primaryKeyValuePairs[i.Key] = i.Value.PropertyInfo.GetValue(poco, null); + } + } + } + + // Do it + var index = 0; + var sql = string.Format("DELETE FROM {0} WHERE {1}", tableName, BuildPrimaryKeySql(primaryKeyValuePairs, ref index)); + return Execute(sql, primaryKeyValuePairs.Select(x => x.Value).ToArray()); + } + + public int Delete(object poco) + { + var pd = PocoData.ForType(poco.GetType()); + return Delete(pd.TableInfo.TableName, pd.TableInfo.PrimaryKey, poco); + } + + public int Delete(object pocoOrPrimaryKey) + { + if (pocoOrPrimaryKey.GetType() == typeof(T)) + return Delete(pocoOrPrimaryKey); + var pd = PocoData.ForType(typeof(T)); + return Delete(pd.TableInfo.TableName, pd.TableInfo.PrimaryKey, null, pocoOrPrimaryKey); + } + + public int Delete(string sql, params object[] args) + { + var pd = PocoData.ForType(typeof(T)); + return Execute(string.Format("DELETE FROM {0} {1}", EscapeTableName(pd.TableInfo.TableName), sql), args); + } + + public int Delete(Sql sql) + { + var pd = PocoData.ForType(typeof(T)); + return Execute(new Sql(string.Format("DELETE FROM {0}", EscapeTableName(pd.TableInfo.TableName))).Append(sql)); + } + + // Check if a poco represents a new record + public bool IsNew(string primaryKeyName, object poco) + { + var pd = PocoData.ForObject(poco, primaryKeyName); + object pk; + PocoColumn pc; + if (pd.Columns.TryGetValue(primaryKeyName, out pc)) + { + pk = pc.GetValue(poco); + } +#if !PETAPOCO_NO_DYNAMIC + else if (poco.GetType() == typeof(System.Dynamic.ExpandoObject)) + { + return true; + } +#endif + else + { + var pi = poco.GetType().GetProperty(primaryKeyName); + if (pi == null) + throw new ArgumentException(string.Format("The object doesn't have a property matching the primary key column name '{0}'", primaryKeyName)); + pk = pi.GetValue(poco, null); + } + + if (pk == null) + return true; + + var type = pk.GetType(); + + if (type.IsValueType) + { + // Common primary key types + if (type == typeof(long)) + return (long)pk == 0; + else if (type == typeof(ulong)) + return (ulong)pk == 0; + else if (type == typeof(int)) + return (int)pk == 0; + else if (type == typeof(uint)) + return (uint)pk == 0; + + // Create a default instance and compare + return pk == Activator.CreateInstance(pk.GetType()); + } + else + { + return pk == null; + } + } + + public bool IsNew(object poco) + { + var pd = PocoData.ForType(poco.GetType()); + if (!pd.TableInfo.AutoIncrement) + throw new InvalidOperationException("IsNew() and Save() are only supported on tables with auto-increment/identity primary key columns"); + return IsNew(pd.TableInfo.PrimaryKey, poco); + } + + // Insert new record or Update existing record + public void Save(string tableName, string primaryKeyName, object poco) + { + if (IsNew(primaryKeyName, poco)) + { + Insert(tableName, primaryKeyName, true, poco); + } + else + { + Update(tableName, primaryKeyName, poco); + } + } + + public void Save(object poco) + { + var pd = PocoData.ForType(poco.GetType()); + Save(pd.TableInfo.TableName, pd.TableInfo.PrimaryKey, poco); + } + + public int CommandTimeout { get; set; } + public int OneTimeCommandTimeout { get; set; } + + void DoPreExecute(IDbCommand cmd) + { + // Setup command timeout + if (OneTimeCommandTimeout != 0) + { + cmd.CommandTimeout = OneTimeCommandTimeout; + OneTimeCommandTimeout = 0; + } + else if (CommandTimeout != 0) + { + cmd.CommandTimeout = CommandTimeout; + } + + // Call hook + OnExecutingCommand(cmd); + + // Save it + _lastSql = cmd.CommandText; + _lastArgs = (from IDataParameter parameter in cmd.Parameters select parameter.Value).ToArray(); + } + + public string LastSQL { get { return _lastSql; } } + public object[] LastArgs { get { return _lastArgs; } } + public string LastCommand + { + get { return FormatCommand(_lastSql, _lastArgs); } + } + + public string FormatCommand(IDbCommand cmd) + { + return FormatCommand(cmd.CommandText, (from IDataParameter parameter in cmd.Parameters select parameter.Value).ToArray()); + } + + public string FormatCommand(string sql, object[] args) + { + var sb = new StringBuilder(); + if (sql == null) + return ""; + sb.Append(sql); + if (args != null && args.Length > 0) + { + sb.Append("\n"); + for (int i = 0; i < args.Length; i++) + { + sb.AppendFormat("\t -> {0}{1} [{2}] = \"{3}\"\n", _paramPrefix, i, args[i].GetType().Name, args[i]); + } + sb.Remove(sb.Length - 1, 1); + } + return sb.ToString(); + } + + + public static IMapper Mapper + { + get; + set; + } + + public class PocoColumn + { + public string ColumnName; + public PropertyInfo PropertyInfo; + public bool ResultColumn; + public bool VersionColumn; + public virtual void SetValue(object target, object val) { PropertyInfo.SetValue(target, val, null); } + public virtual object GetValue(object target) { return PropertyInfo.GetValue(target, null); } + public virtual object ChangeType(object val) { return Convert.ChangeType(val, PropertyInfo.PropertyType); } + } + internal class ExpandoColumn : PocoColumn + { + public override void SetValue(object target, object val) { (target as IDictionary)[ColumnName] = val; } + public override object GetValue(object target) + { + object val = null; + (target as IDictionary).TryGetValue(ColumnName, out val); + return val; + } + public override object ChangeType(object val) { return val; } + } + internal class PocoData + { + public static PocoData ForObject(object o, string primaryKeyName) + { + var t = o.GetType(); +#if !PETAPOCO_NO_DYNAMIC + if (t == typeof(System.Dynamic.ExpandoObject)) + { + var pd = new PocoData(); + pd.TableInfo = new TableInfo(); + pd.Columns = new Dictionary(StringComparer.OrdinalIgnoreCase); + pd.Columns.Add(primaryKeyName, new ExpandoColumn() { ColumnName = primaryKeyName }); + pd.TableInfo.PrimaryKey = primaryKeyName; + pd.TableInfo.AutoIncrement = true; + foreach (var col in (o as IDictionary).Keys) + { + if (col!=primaryKeyName) + pd.Columns.Add(col, new ExpandoColumn() { ColumnName = col }); + } + return pd; + } + else +#endif + return ForType(t); + } + static System.Threading.ReaderWriterLockSlim RWLock = new System.Threading.ReaderWriterLockSlim(); + public static PocoData ForType(Type t) + { +#if !PETAPOCO_NO_DYNAMIC + if (t == typeof(System.Dynamic.ExpandoObject)) + throw new InvalidOperationException("Can't use dynamic types with this method"); +#endif + // Check cache + RWLock.EnterReadLock(); + PocoData pd; + try + { + if (m_PocoDatas.TryGetValue(t, out pd)) + return pd; + } + finally + { + RWLock.ExitReadLock(); + } + + + // Cache it + RWLock.EnterWriteLock(); + try + { + // Check again + if (m_PocoDatas.TryGetValue(t, out pd)) + return pd; + + // Create it + pd = new PocoData(t); + m_PocoDatas.Add(t, pd); + } + finally + { + RWLock.ExitWriteLock(); + } + + return pd; + } + + public PocoData() + { + } + + public PocoData(Type t) + { + type = t; + TableInfo = new TableInfo(); + + // Get the table name + var a = t.GetCustomAttributes(typeof(TableNameAttribute), true); + TableInfo.TableName = a.Length == 0 ? t.Name : (a[0] as TableNameAttribute).Value; + + // Get the primary key + a = t.GetCustomAttributes(typeof(PrimaryKeyAttribute), true); + TableInfo.PrimaryKey = a.Length == 0 ? "ID" : (a[0] as PrimaryKeyAttribute).Value; + TableInfo.SequenceName = a.Length == 0 ? null : (a[0] as PrimaryKeyAttribute).sequenceName; + TableInfo.AutoIncrement = a.Length == 0 ? false : (a[0] as PrimaryKeyAttribute).autoIncrement; + + // Set autoincrement false if primary key has multiple columns + TableInfo.AutoIncrement = TableInfo.AutoIncrement ? !TableInfo.PrimaryKey.Contains(',') : TableInfo.AutoIncrement; + + // Call column mapper + if (Database.Mapper != null) + Database.Mapper.GetTableInfo(t, TableInfo); + + // Work out bound properties + bool ExplicitColumns = t.GetCustomAttributes(typeof(ExplicitColumnsAttribute), true).Length > 0; + Columns = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var pi in t.GetProperties()) + { + // Work out if properties is to be included + var ColAttrs = pi.GetCustomAttributes(typeof(ColumnAttribute), true); + if (ExplicitColumns) + { + if (ColAttrs.Length == 0) + continue; + } + else + { + if (pi.GetCustomAttributes(typeof(IgnoreAttribute), true).Length != 0) + continue; + } + + var pc = new PocoColumn(); + pc.PropertyInfo = pi; + + // Work out the DB column name + if (ColAttrs.Length > 0) + { + var colattr = (ColumnAttribute)ColAttrs[0]; + pc.ColumnName = colattr.Name; + if ((colattr as ResultColumnAttribute) != null) + pc.ResultColumn = true; + if ((colattr as VersionColumnAttribute) != null) + pc.VersionColumn = true; + } + if (pc.ColumnName == null) + { + pc.ColumnName = pi.Name; + if (Database.Mapper != null && !Database.Mapper.MapPropertyToColumn(pi, ref pc.ColumnName, ref pc.ResultColumn)) + continue; + } + + // Store it + Columns.Add(pc.ColumnName, pc); + } + + // Build column list for automatic select + QueryColumns = (from c in Columns where !c.Value.ResultColumn select c.Key).ToArray(); + + } + + static bool IsIntegralType(Type t) + { + var tc = Type.GetTypeCode(t); + return tc >= TypeCode.SByte && tc <= TypeCode.UInt64; + } + + // Create factory function that can convert a IDataReader record into a POCO + public Delegate GetFactory(string sql, string connString, bool ForceDateTimesToUtc, int firstColumn, int countColumns, IDataReader r) + { + // Check cache + var key = string.Format("{0}:{1}:{2}:{3}:{4}", sql, connString, ForceDateTimesToUtc, firstColumn, countColumns); + RWLock.EnterReadLock(); + try + { + // Have we already created it? + Delegate factory; + if (PocoFactories.TryGetValue(key, out factory)) + return factory; + } + finally + { + RWLock.ExitReadLock(); + } + + // Take the writer lock + RWLock.EnterWriteLock(); + + try + { + + // Check again, just in case + Delegate factory; + if (PocoFactories.TryGetValue(key, out factory)) + return factory; + + // Create the method + var m = new DynamicMethod("petapoco_factory_" + PocoFactories.Count.ToString(), type, new Type[] { typeof(IDataReader) }, true); + var il = m.GetILGenerator(); + +#if !PETAPOCO_NO_DYNAMIC + if (type == typeof(object)) + { + // var poco=new T() + il.Emit(OpCodes.Newobj, typeof(System.Dynamic.ExpandoObject).GetConstructor(Type.EmptyTypes)); // obj + + MethodInfo fnAdd = typeof(IDictionary).GetMethod("Add"); + + // Enumerate all fields generating a set assignment for the column + for (int i = firstColumn; i < firstColumn + countColumns; i++) + { + var srcType = r.GetFieldType(i); + + il.Emit(OpCodes.Dup); // obj, obj + il.Emit(OpCodes.Ldstr, r.GetName(i)); // obj, obj, fieldname + + // Get the converter + Func converter = null; + if (Database.Mapper != null) + converter = Database.Mapper.GetFromDbConverter(null, srcType); + if (ForceDateTimesToUtc && converter == null && srcType == typeof(DateTime)) + converter = delegate(object src) { return new DateTime(((DateTime)src).Ticks, DateTimeKind.Utc); }; + + // Setup stack for call to converter + AddConverterToStack(il, converter); + + // r[i] + il.Emit(OpCodes.Ldarg_0); // obj, obj, fieldname, converter?, rdr + il.Emit(OpCodes.Ldc_I4, i); // obj, obj, fieldname, converter?, rdr,i + il.Emit(OpCodes.Callvirt, fnGetValue); // obj, obj, fieldname, converter?, value + + // Convert DBNull to null + il.Emit(OpCodes.Dup); // obj, obj, fieldname, converter?, value, value + il.Emit(OpCodes.Isinst, typeof(DBNull)); // obj, obj, fieldname, converter?, value, (value or null) + var lblNotNull = il.DefineLabel(); + il.Emit(OpCodes.Brfalse_S, lblNotNull); // obj, obj, fieldname, converter?, value + il.Emit(OpCodes.Pop); // obj, obj, fieldname, converter? + if (converter!=null) + il.Emit(OpCodes.Pop); // obj, obj, fieldname, + il.Emit(OpCodes.Ldnull); // obj, obj, fieldname, null + if (converter != null) + { + var lblReady = il.DefineLabel(); + il.Emit(OpCodes.Br_S, lblReady); + il.MarkLabel(lblNotNull); + il.Emit(OpCodes.Callvirt, fnInvoke); + il.MarkLabel(lblReady); + } + else + { + il.MarkLabel(lblNotNull); + } + + il.Emit(OpCodes.Callvirt, fnAdd); + } + } + else +#endif + if (type.IsValueType || type == typeof(string) || type == typeof(byte[])) + { + // Do we need to install a converter? + var srcType = r.GetFieldType(0); + var converter = GetConverter(ForceDateTimesToUtc, null, srcType, type); + + // "if (!rdr.IsDBNull(i))" + il.Emit(OpCodes.Ldarg_0); // rdr + il.Emit(OpCodes.Ldc_I4_0); // rdr,0 + il.Emit(OpCodes.Callvirt, fnIsDBNull); // bool + var lblCont = il.DefineLabel(); + il.Emit(OpCodes.Brfalse_S, lblCont); + il.Emit(OpCodes.Ldnull); // null + var lblFin = il.DefineLabel(); + il.Emit(OpCodes.Br_S, lblFin); + + il.MarkLabel(lblCont); + + // Setup stack for call to converter + AddConverterToStack(il, converter); + + il.Emit(OpCodes.Ldarg_0); // rdr + il.Emit(OpCodes.Ldc_I4_0); // rdr,0 + il.Emit(OpCodes.Callvirt, fnGetValue); // value + + // Call the converter + if (converter != null) + il.Emit(OpCodes.Callvirt, fnInvoke); + + il.MarkLabel(lblFin); + il.Emit(OpCodes.Unbox_Any, type); // value converted + } + else + { + // var poco=new T() + il.Emit(OpCodes.Newobj, type.GetConstructor(BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic, null, new Type[0], null)); + + // Enumerate all fields generating a set assignment for the column + for (int i = firstColumn; i < firstColumn + countColumns; i++) + { + // Get the PocoColumn for this db column, ignore if not known + PocoColumn pc; + if (!Columns.TryGetValue(r.GetName(i), out pc)) + continue; + + // Get the source type for this column + var srcType = r.GetFieldType(i); + var dstType = pc.PropertyInfo.PropertyType; + + // "if (!rdr.IsDBNull(i))" + il.Emit(OpCodes.Ldarg_0); // poco,rdr + il.Emit(OpCodes.Ldc_I4, i); // poco,rdr,i + il.Emit(OpCodes.Callvirt, fnIsDBNull); // poco,bool + var lblNext = il.DefineLabel(); + il.Emit(OpCodes.Brtrue_S, lblNext); // poco + + il.Emit(OpCodes.Dup); // poco,poco + + // Do we need to install a converter? + var converter = GetConverter(ForceDateTimesToUtc, pc, srcType, dstType); + + // Fast + bool Handled = false; + if (converter == null) + { + var valuegetter = typeof(IDataRecord).GetMethod("Get" + srcType.Name, new Type[] { typeof(int) }); + if (valuegetter != null + && valuegetter.ReturnType == srcType + && (valuegetter.ReturnType == dstType || valuegetter.ReturnType == Nullable.GetUnderlyingType(dstType))) + { + il.Emit(OpCodes.Ldarg_0); // *,rdr + il.Emit(OpCodes.Ldc_I4, i); // *,rdr,i + il.Emit(OpCodes.Callvirt, valuegetter); // *,value + + // Convert to Nullable + if (Nullable.GetUnderlyingType(dstType) != null) + { + il.Emit(OpCodes.Newobj, dstType.GetConstructor(new Type[] { Nullable.GetUnderlyingType(dstType) })); + } + + il.Emit(OpCodes.Callvirt, pc.PropertyInfo.GetSetMethod(true)); // poco + Handled = true; + } + } + + // Not so fast + if (!Handled) + { + // Setup stack for call to converter + AddConverterToStack(il, converter); + + // "value = rdr.GetValue(i)" + il.Emit(OpCodes.Ldarg_0); // *,rdr + il.Emit(OpCodes.Ldc_I4, i); // *,rdr,i + il.Emit(OpCodes.Callvirt, fnGetValue); // *,value + + // Call the converter + if (converter != null) + il.Emit(OpCodes.Callvirt, fnInvoke); + + // Assign it + il.Emit(OpCodes.Unbox_Any, pc.PropertyInfo.PropertyType); // poco,poco,value + il.Emit(OpCodes.Callvirt, pc.PropertyInfo.GetSetMethod(true)); // poco + } + + il.MarkLabel(lblNext); + } + } + + il.Emit(OpCodes.Ret); + + // Cache it, return it + var del = m.CreateDelegate(Expression.GetFuncType(typeof(IDataReader), type)); + PocoFactories.Add(key, del); + return del; + } + finally + { + RWLock.ExitWriteLock(); + } + } + + private static void AddConverterToStack(ILGenerator il, Func converter) + { + if (converter != null) + { + // Add the converter + int converterIndex = m_Converters.Count; + m_Converters.Add(converter); + + // Generate IL to push the converter onto the stack + il.Emit(OpCodes.Ldsfld, fldConverters); + il.Emit(OpCodes.Ldc_I4, converterIndex); + il.Emit(OpCodes.Callvirt, fnListGetItem); // Converter + } + } + + private static Func GetConverter(bool forceDateTimesToUtc, PocoColumn pc, Type srcType, Type dstType) + { + Func converter = null; + + // Get converter from the mapper + if (Database.Mapper != null) + { + DestinationInfo destinationInfo = pc != null + ? new DestinationInfo(pc.PropertyInfo) + : new DestinationInfo(dstType); + converter = Database.Mapper.GetFromDbConverter(destinationInfo, srcType); + } + + // Standard DateTime->Utc mapper + if (forceDateTimesToUtc && converter == null && srcType == typeof(DateTime) && (dstType == typeof(DateTime) || dstType == typeof(DateTime?))) + { + converter = delegate(object src) { return new DateTime(((DateTime)src).Ticks, DateTimeKind.Utc); }; + } + + // Forced type conversion including integral types -> enum + if (converter == null) + { + if (dstType.IsEnum && IsIntegralType(srcType)) + { + if (srcType != typeof(int)) + { + converter = delegate(object src) { return Convert.ChangeType(src, typeof(int), null); }; + } + } + else if (!dstType.IsAssignableFrom(srcType)) + { + converter = delegate(object src) { return Convert.ChangeType(src, dstType, null); }; + } + } + return converter; + } + + + static Dictionary m_PocoDatas = new Dictionary(); + static List> m_Converters = new List>(); + static MethodInfo fnGetValue = typeof(IDataRecord).GetMethod("GetValue", new Type[] { typeof(int) }); + static MethodInfo fnIsDBNull = typeof(IDataRecord).GetMethod("IsDBNull"); + static FieldInfo fldConverters = typeof(PocoData).GetField("m_Converters", BindingFlags.Static | BindingFlags.GetField | BindingFlags.NonPublic); + static MethodInfo fnListGetItem = typeof(List>).GetProperty("Item").GetGetMethod(); + static MethodInfo fnInvoke = typeof(Func).GetMethod("Invoke"); + public Type type; + public string[] QueryColumns { get; private set; } + public TableInfo TableInfo { get; private set; } + public Dictionary Columns { get; private set; } + Dictionary PocoFactories = new Dictionary(); + } + + // Member variables + string _connectionString; + string _providerName; + DbProviderFactory _factory; + IDbConnection _sharedConnection; + IDbTransaction _transaction; + int _sharedConnectionDepth; + int _transactionDepth; + bool _transactionCancelled; + string _lastSql; + object[] _lastArgs; + string _paramPrefix = "@"; + } + + // Transaction object helps maintain transaction depth counts + public class Transaction : IDisposable + { + public Transaction(Database db) + { + _db = db; + _db.BeginTransaction(); + } + + public void Complete() + { + _db.CompleteTransaction(); + _db = null; + } + + public void Dispose() + { + if (_db != null) + _db.AbortTransaction(); + } + + Database _db; + } + + // Simple helper class for building SQL statments + public class Sql + { + public Sql() + { + } + + public Sql(string sql, params object[] args) + { + _sql = sql; + _args = args; + } + + public static Sql Builder + { + get { return new Sql(); } + } + + string _sql; + object[] _args; + Sql _rhs; + string _sqlFinal; + object[] _argsFinal; + + private void Build() + { + // already built? + if (_sqlFinal != null) + return; + + // Build it + var sb = new StringBuilder(); + var args = new List(); + Build(sb, args, null); + _sqlFinal = sb.ToString(); + _argsFinal = args.ToArray(); + } + + public string SQL + { + get + { + Build(); + return _sqlFinal; + } + } + + public object[] Arguments + { + get + { + Build(); + return _argsFinal; + } + } + + public Sql Append(Sql sql) + { + if (_rhs != null) + _rhs.Append(sql); + else + _rhs = sql; + + return this; + } + + public Sql Append(string sql, params object[] args) + { + return Append(new Sql(sql, args)); + } + + static bool Is(Sql sql, string sqltype) + { + return sql != null && sql._sql != null && sql._sql.StartsWith(sqltype, StringComparison.InvariantCultureIgnoreCase); + } + + private void Build(StringBuilder sb, List args, Sql lhs) + { + if (!String.IsNullOrEmpty(_sql)) + { + // Add SQL to the string + if (sb.Length > 0) + { + sb.Append("\n"); + } + + var sql = Database.ProcessParams(_sql, _args, args); + + if (Is(lhs, "WHERE ") && Is(this, "WHERE ")) + sql = "AND " + sql.Substring(6); + if (Is(lhs, "ORDER BY ") && Is(this, "ORDER BY ")) + sql = ", " + sql.Substring(9); + + sb.Append(sql); + } + + // Now do rhs + if (_rhs != null) + _rhs.Build(sb, args, this); + } + + public Sql Where(string sql, params object[] args) + { + return Append(new Sql("WHERE (" + sql + ")", args)); + } + + public Sql OrderBy(params object[] columns) + { + return Append(new Sql("ORDER BY " + String.Join(", ", (from x in columns select x.ToString()).ToArray()))); + } + + public Sql Select(params object[] columns) + { + return Append(new Sql("SELECT " + String.Join(", ", (from x in columns select x.ToString()).ToArray()))); + } + + public Sql From(params object[] tables) + { + return Append(new Sql("FROM " + String.Join(", ", (from x in tables select x.ToString()).ToArray()))); + } + + public Sql GroupBy(params object[] columns) + { + return Append(new Sql("GROUP BY " + String.Join(", ", (from x in columns select x.ToString()).ToArray()))); + } + + private SqlJoinClause Join(string JoinType, string table) + { + return new SqlJoinClause(Append(new Sql(JoinType + table))); + } + + public SqlJoinClause InnerJoin(string table) { return Join("INNER JOIN ", table); } + public SqlJoinClause LeftJoin(string table) { return Join("LEFT JOIN ", table); } + + public class SqlJoinClause + { + private readonly Sql _sql; + + public SqlJoinClause(Sql sql) + { + _sql = sql; + } + + public Sql On(string onClause, params object[] args) + { + return _sql.Append("ON " + onClause, args); + } + } + } + +} \ No newline at end of file diff --git a/NzbDrone.Core/NzbDrone.Core.csproj b/NzbDrone.Core/NzbDrone.Core.csproj index 598fdcc7d..7a2918f5d 100644 --- a/NzbDrone.Core/NzbDrone.Core.csproj +++ b/NzbDrone.Core/NzbDrone.Core.csproj @@ -129,6 +129,10 @@ False ..\Libraries\Exceptioneer.WindowsFormsClient.dll + + + ..\Libraries\MigSharp.dll + ..\packages\MiniProfiler.1.2\lib\MvcMiniProfiler.dll @@ -139,10 +143,12 @@ False ..\Libraries\NLog.dll - + + False ..\Libraries\SubSonic.Core.dll + @@ -165,7 +171,10 @@ - + + + + @@ -176,6 +185,7 @@ + @@ -290,6 +300,12 @@ Migrator + + + + + +