From 39e5a5a4ee519728c193f41875455f653a8ab43a Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Fri, 26 Jul 2024 03:31:48 -0700 Subject: [PATCH 01/78] feat: create `sqlx.toml` format --- Cargo.toml | 12 +- sqlx-core/Cargo.toml | 12 +- sqlx-core/src/config/common.rs | 21 +-- sqlx-core/src/config/macros.rs | 218 ++++++---------------------- sqlx-core/src/config/migrate.rs | 98 +++---------- sqlx-core/src/config/mod.rs | 151 ++++++++++--------- sqlx-core/src/config/reference.toml | 57 +++----- sqlx-core/src/config/tests.rs | 23 ++- sqlx-core/src/lib.rs | 1 + sqlx-macros-core/Cargo.toml | 4 +- sqlx-macros/Cargo.toml | 3 +- src/lib.rs | 33 +---- 12 files changed, 198 insertions(+), 435 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index b72ec9afbd..37f719b770 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -54,18 +54,20 @@ repository.workspace = true rust-version.workspace = true [package.metadata.docs.rs] -features = ["all-databases", "_unstable-all-types", "_unstable-doc", "sqlite-preupdate-hook"] +features = ["all-databases", "_unstable-all-types", "_unstable-doc"] rustdoc-args = ["--cfg", "docsrs"] [features] -default = ["any", "macros", "migrate", "json"] +default = ["any", "macros", "migrate", "json", "config-all"] derive = ["sqlx-macros/derive"] macros = ["derive", "sqlx-macros/macros"] migrate = ["sqlx-core/migrate", "sqlx-macros?/migrate", "sqlx-mysql?/migrate", "sqlx-postgres?/migrate", "sqlx-sqlite?/migrate"] -# Enable parsing of `sqlx.toml` for configuring macros and migrations. -sqlx-toml = ["sqlx-core/sqlx-toml", "sqlx-macros?/sqlx-toml"] +# Enable parsing of `sqlx.toml` for configuring macros, migrations, or both. +config-macros = ["sqlx-macros?/config-macros"] +config-migrate = ["sqlx-macros?/config-migrate"] +config-all = ["config-macros", "config-migrate"] # intended mainly for CI and docs all-databases = ["mysql", "sqlite", "postgres", "any"] @@ -83,7 +85,7 @@ _unstable-all-types = [ "bstr" ] # Render documentation that wouldn't otherwise be shown (e.g. `sqlx_core::config`). -_unstable-doc = [] +_unstable-doc = ["config-all", "sqlx-core/_unstable-doc"] # Base runtime features without TLS runtime-async-std = ["_rt-async-std", "sqlx-core/_rt-async-std", "sqlx-macros?/_rt-async-std"] diff --git a/sqlx-core/Cargo.toml b/sqlx-core/Cargo.toml index bf57849733..c12de18889 100644 --- a/sqlx-core/Cargo.toml +++ b/sqlx-core/Cargo.toml @@ -13,7 +13,7 @@ features = ["offline"] [features] default = [] -migrate = ["sha2", "crc"] +migrate = ["sha2", "crc", "config-migrate"] any = [] @@ -32,13 +32,11 @@ _tls-none = [] # support offline/decoupled building (enables serialization of `Describe`) offline = ["serde", "either/serde"] -# Enable parsing of `sqlx.toml`. -# For simplicity, the `config` module is always enabled, -# but disabling this disables the `serde` derives and the `toml` crate, -# which is a good bit less code to compile if the feature isn't being used. -sqlx-toml = ["serde", "toml/parse"] +config = ["serde", "toml/parse"] +config-macros = ["config"] +config-migrate = ["config"] -_unstable-doc = ["sqlx-toml"] +_unstable-doc = ["config-macros", "config-migrate"] [dependencies] # Runtimes diff --git a/sqlx-core/src/config/common.rs b/sqlx-core/src/config/common.rs index 2d5342d5b8..8c774fc60f 100644 --- a/sqlx-core/src/config/common.rs +++ b/sqlx-core/src/config/common.rs @@ -1,10 +1,5 @@ /// Configuration shared by multiple components. -#[derive(Debug, Default)] -#[cfg_attr( - feature = "sqlx-toml", - derive(serde::Deserialize), - serde(default, rename_all = "kebab-case", deny_unknown_fields) -)] +#[derive(Debug, Default, serde::Deserialize)] pub struct Config { /// Override the database URL environment variable. /// @@ -21,14 +16,14 @@ pub struct Config { /// /// #### `foo/sqlx.toml` /// ```toml - /// [common] - /// database-url-var = "FOO_DATABASE_URL" + /// [macros] + /// database_url_var = "FOO_DATABASE_URL" /// ``` /// /// #### `bar/sqlx.toml` /// ```toml - /// [common] - /// database-url-var = "BAR_DATABASE_URL" + /// [macros] + /// database_url_var = "BAR_DATABASE_URL" /// ``` /// /// #### `.env` @@ -41,9 +36,3 @@ pub struct Config { /// and the ones used in `bar` will use `BAR_DATABASE_URL`. pub database_url_var: Option, } - -impl Config { - pub fn database_url_var(&self) -> &str { - self.database_url_var.as_deref().unwrap_or("DATABASE_URL") - } -} diff --git a/sqlx-core/src/config/macros.rs b/sqlx-core/src/config/macros.rs index 6d08aa3ec2..5edd30dc15 100644 --- a/sqlx-core/src/config/macros.rs +++ b/sqlx-core/src/config/macros.rs @@ -1,31 +1,40 @@ use std::collections::BTreeMap; /// Configuration for the `query!()` family of macros. -/// -/// See also [`common::Config`][crate::config::common::Config] for renaming `DATABASE_URL`. -#[derive(Debug, Default)] -#[cfg_attr( - feature = "sqlx-toml", - derive(serde::Deserialize), - serde(default, rename_all = "kebab-case", deny_unknown_fields) -)] +#[derive(Debug, Default, serde::Deserialize)] +#[serde(default)] pub struct Config { - /// Specify which crates' types to use when types from multiple crates apply. + /// Specify the crate to use for mapping date/time types to Rust. + /// + /// The default behavior is to use whatever crate is enabled, + /// [`chrono`] or [`time`] (the latter takes precedent). + /// + /// [`chrono`]: crate::types::chrono + /// [`time`]: crate::types::time + /// + /// Example: Always Use Chrono + /// ------- + /// Thanks to Cargo's [feature unification], a crate in the dependency graph may enable + /// the `time` feature of SQLx which will force it on for all crates using SQLx, + /// which will result in problems if your crate wants to use types from [`chrono`]. + /// + /// You can use the type override syntax (see `sqlx::query!` for details), + /// or you can force an override globally by setting this option. /// - /// See [`PreferredCrates`] for details. - pub preferred_crates: PreferredCrates, + /// #### `sqlx.toml` + /// ```toml + /// [macros] + /// datetime_crate = "chrono" + /// ``` + /// + /// [feature unification]: https://doc.rust-lang.org/cargo/reference/features.html#feature-unification + pub datetime_crate: DateTimeCrate, /// Specify global overrides for mapping SQL type names to Rust type names. /// /// Default type mappings are defined by the database driver. /// Refer to the `sqlx::types` module for details. /// - /// ## Note: Case-Sensitive - /// Currently, the case of the type name MUST match the name SQLx knows it by. - /// Built-in types are spelled in all-uppercase to match SQL convention. - /// - /// However, user-created types in Postgres are all-lowercase unless quoted. - /// /// ## Note: Orthogonal to Nullability /// These overrides do not affect whether `query!()` decides to wrap a column in `Option<_>` /// or not. They only override the inner type used. @@ -69,9 +78,9 @@ pub struct Config { /// /// #### `sqlx.toml` /// ```toml - /// [macros.type-overrides] + /// [macros.type_overrides] /// # Override a built-in type - /// 'UUID' = "crate::types::MyUuid" + /// 'uuid' = "crate::types::MyUuid" /// /// # Support an external or custom wrapper type (e.g. from the `isn` Postgres extension) /// # (NOTE: FOR DOCUMENTATION PURPOSES ONLY; THIS CRATE/TYPE DOES NOT EXIST AS OF WRITING) @@ -106,7 +115,7 @@ pub struct Config { /// /// #### `sqlx.toml` /// ```toml - /// [macros.type-overrides] + /// [macros.type_overrides] /// # Map SQL type `foo` to `crate::types::Foo` /// 'foo' = "crate::types::Foo" /// ``` @@ -116,7 +125,7 @@ pub struct Config { /// (See `Note` section above for details.) /// /// ```toml - /// [macros.type-overrides] + /// [macros.type_overrides] /// # Map SQL type `foo.foo` to `crate::types::Foo` /// 'foo.foo' = "crate::types::Foo" /// ``` @@ -127,7 +136,7 @@ pub struct Config { /// it must be wrapped in quotes _twice_ for SQLx to know the difference: /// /// ```toml - /// [macros.type-overrides] + /// [macros.type_overrides] /// # `"Foo"` in SQLx /// '"Foo"' = "crate::types::Foo" /// # **NOT** `"Foo"` in SQLx (parses as just `Foo`) @@ -140,11 +149,9 @@ pub struct Config { /// ``` /// /// (See `Note` section above for details.) - // TODO: allow specifying different types for input vs output - // e.g. to accept `&[T]` on input but output `Vec` pub type_overrides: BTreeMap, - /// Specify per-table and per-column overrides for mapping SQL types to Rust types. + /// Specify per-column overrides for mapping SQL types to Rust types. /// /// Default type mappings are defined by the database driver. /// Refer to the `sqlx::types` module for details. @@ -199,7 +206,7 @@ pub struct Config { /// /// #### `sqlx.toml` /// ```toml - /// [macros.table-overrides.'foo'] + /// [macros.column_overrides.'foo'] /// # Map column `bar` of table `foo` to Rust type `crate::types::Foo`: /// 'bar' = "crate::types::Bar" /// @@ -211,89 +218,25 @@ pub struct Config { /// # "Bar" = "crate::types::Bar" /// /// # Table name may be quoted (note the wrapping single-quotes) - /// [macros.table-overrides.'"Foo"'] + /// [macros.column_overrides.'"Foo"'] /// 'bar' = "crate::types::Bar" /// '"Bar"' = "crate::types::Bar" /// /// # Table name may also be schema-qualified. /// # Note how the dot is inside the quotes. - /// [macros.table-overrides.'my_schema.my_table'] + /// [macros.column_overrides.'my_schema.my_table'] /// 'my_column' = "crate::types::MyType" /// /// # Quoted schema, table, and column names - /// [macros.table-overrides.'"My Schema"."My Table"'] + /// [macros.column_overrides.'"My Schema"."My Table"'] /// '"My Column"' = "crate::types::MyType" /// ``` - pub table_overrides: BTreeMap>, -} - -#[derive(Debug, Default)] -#[cfg_attr( - feature = "sqlx-toml", - derive(serde::Deserialize), - serde(default, rename_all = "kebab-case") -)] -pub struct PreferredCrates { - /// Specify the crate to use for mapping date/time types to Rust. - /// - /// The default behavior is to use whatever crate is enabled, - /// [`chrono`] or [`time`] (the latter takes precedent). - /// - /// [`chrono`]: crate::types::chrono - /// [`time`]: crate::types::time - /// - /// Example: Always Use Chrono - /// ------- - /// Thanks to Cargo's [feature unification], a crate in the dependency graph may enable - /// the `time` feature of SQLx which will force it on for all crates using SQLx, - /// which will result in problems if your crate wants to use types from [`chrono`]. - /// - /// You can use the type override syntax (see `sqlx::query!` for details), - /// or you can force an override globally by setting this option. - /// - /// #### `sqlx.toml` - /// ```toml - /// [macros.preferred-crates] - /// date-time = "chrono" - /// ``` - /// - /// [feature unification]: https://doc.rust-lang.org/cargo/reference/features.html#feature-unification - pub date_time: DateTimeCrate, - - /// Specify the crate to use for mapping `NUMERIC` types to Rust. - /// - /// The default behavior is to use whatever crate is enabled, - /// [`bigdecimal`] or [`rust_decimal`] (the latter takes precedent). - /// - /// [`bigdecimal`]: crate::types::bigdecimal - /// [`rust_decimal`]: crate::types::rust_decimal - /// - /// Example: Always Use `bigdecimal` - /// ------- - /// Thanks to Cargo's [feature unification], a crate in the dependency graph may enable - /// the `rust_decimal` feature of SQLx which will force it on for all crates using SQLx, - /// which will result in problems if your crate wants to use types from [`bigdecimal`]. - /// - /// You can use the type override syntax (see `sqlx::query!` for details), - /// or you can force an override globally by setting this option. - /// - /// #### `sqlx.toml` - /// ```toml - /// [macros.preferred-crates] - /// numeric = "bigdecimal" - /// ``` - /// - /// [feature unification]: https://doc.rust-lang.org/cargo/reference/features.html#feature-unification - pub numeric: NumericCrate, + pub column_overrides: BTreeMap>, } -/// The preferred crate to use for mapping date/time types to Rust. -#[derive(Debug, Default, PartialEq, Eq)] -#[cfg_attr( - feature = "sqlx-toml", - derive(serde::Deserialize), - serde(rename_all = "snake_case") -)] +/// The crate to use for mapping date/time types to Rust. +#[derive(Debug, Default, PartialEq, Eq, serde::Deserialize)] +#[serde(rename_all = "snake_case")] pub enum DateTimeCrate { /// Use whichever crate is enabled (`time` then `chrono`). #[default] @@ -302,63 +245,33 @@ pub enum DateTimeCrate { /// Always use types from [`chrono`][crate::types::chrono]. /// /// ```toml - /// [macros.preferred-crates] - /// date-time = "chrono" + /// [macros] + /// datetime_crate = "chrono" /// ``` Chrono, /// Always use types from [`time`][crate::types::time]. /// /// ```toml - /// [macros.preferred-crates] - /// date-time = "time" + /// [macros] + /// datetime_crate = "time" /// ``` Time, } -/// The preferred crate to use for mapping `NUMERIC` types to Rust. -#[derive(Debug, Default, PartialEq, Eq)] -#[cfg_attr( - feature = "sqlx-toml", - derive(serde::Deserialize), - serde(rename_all = "snake_case") -)] -pub enum NumericCrate { - /// Use whichever crate is enabled (`rust_decimal` then `bigdecimal`). - #[default] - Inferred, - - /// Always use types from [`bigdecimal`][crate::types::bigdecimal]. - /// - /// ```toml - /// [macros.preferred-crates] - /// numeric = "bigdecimal" - /// ``` - #[cfg_attr(feature = "sqlx-toml", serde(rename = "bigdecimal"))] - BigDecimal, - - /// Always use types from [`rust_decimal`][crate::types::rust_decimal]. - /// - /// ```toml - /// [macros.preferred-crates] - /// numeric = "rust_decimal" - /// ``` - RustDecimal, -} - /// A SQL type name; may optionally be schema-qualified. /// -/// See [`macros.type-overrides`][Config::type_overrides] for usages. +/// See [`macros.type_overrides`][Config::type_overrides] for usages. pub type SqlType = Box; /// A SQL table name; may optionally be schema-qualified. /// -/// See [`macros.table-overrides`][Config::table_overrides] for usages. +/// See [`macros.column_overrides`][Config::column_overrides] for usages. pub type TableName = Box; /// A column in a SQL table. /// -/// See [`macros.table-overrides`][Config::table_overrides] for usages. +/// See [`macros.column_overrides`][Config::column_overrides] for usages. pub type ColumnName = Box; /// A Rust type name or path. @@ -370,49 +283,14 @@ pub type RustType = Box; impl Config { /// Get the override for a given type name (optionally schema-qualified). pub fn type_override(&self, type_name: &str) -> Option<&str> { - // TODO: make this case-insensitive self.type_overrides.get(type_name).map(|s| &**s) } /// Get the override for a given column and table name (optionally schema-qualified). pub fn column_override(&self, table: &str, column: &str) -> Option<&str> { - self.table_overrides + self.column_overrides .get(table) .and_then(|by_column| by_column.get(column)) .map(|s| &**s) } } - -impl DateTimeCrate { - /// Returns `self == Self::Inferred` - #[inline(always)] - pub fn is_inferred(&self) -> bool { - *self == Self::Inferred - } - - #[inline(always)] - pub fn crate_name(&self) -> Option<&str> { - match self { - Self::Inferred => None, - Self::Chrono => Some("chrono"), - Self::Time => Some("time"), - } - } -} - -impl NumericCrate { - /// Returns `self == Self::Inferred` - #[inline(always)] - pub fn is_inferred(&self) -> bool { - *self == Self::Inferred - } - - #[inline(always)] - pub fn crate_name(&self) -> Option<&str> { - match self { - Self::Inferred => None, - Self::BigDecimal => Some("bigdecimal"), - Self::RustDecimal => Some("rust_decimal"), - } - } -} diff --git a/sqlx-core/src/config/migrate.rs b/sqlx-core/src/config/migrate.rs index 0dd6cc2257..5878f9a24f 100644 --- a/sqlx-core/src/config/migrate.rs +++ b/sqlx-core/src/config/migrate.rs @@ -12,27 +12,9 @@ use std::collections::BTreeSet; /// if the proper precautions are not taken. /// /// Be sure you know what you are doing and that you read all relevant documentation _thoroughly_. -#[derive(Debug, Default)] -#[cfg_attr( - feature = "sqlx-toml", - derive(serde::Deserialize), - serde(default, rename_all = "kebab-case", deny_unknown_fields) -)] +#[derive(Debug, Default, serde::Deserialize)] +#[serde(default)] pub struct Config { - /// Specify the names of schemas to create if they don't already exist. - /// - /// This is done before checking the existence of the migrations table - /// (`_sqlx_migrations` or overridden `table_name` below) so that it may be placed in - /// one of these schemas. - /// - /// ### Example - /// `sqlx.toml`: - /// ```toml - /// [migrate] - /// create-schemas = ["foo"] - /// ``` - pub create_schemas: BTreeSet>, - /// Override the name of the table used to track executed migrations. /// /// May be schema-qualified and/or contain quotes. Defaults to `_sqlx_migrations`. @@ -53,7 +35,7 @@ pub struct Config { /// ```toml /// [migrate] /// # Put `_sqlx_migrations` in schema `foo` - /// table-name = "foo._sqlx_migrations" + /// table_name = "foo._sqlx_migrations" /// ``` pub table_name: Option>, @@ -81,7 +63,7 @@ pub struct Config { /// `sqlx.toml`: /// ```toml /// [migrate] - /// ignored-chars = ["\r"] + /// ignored_chars = ["\r"] /// ``` /// /// For projects using Git, this can also be addressed using [`.gitattributes`]: @@ -99,7 +81,7 @@ pub struct Config { /// To make your migrations amenable to reformatting, you may wish to tell SQLx to ignore /// _all_ whitespace characters in migrations. /// - /// ##### Warning: Beware Syntactically Significant Whitespace! + /// ##### Warning: Beware Syntatically Significant Whitespace! /// If your migrations use string literals or quoted identifiers which contain whitespace, /// this configuration will cause the migration machinery to ignore some changes to these. /// This may result in a mismatch between the development and production versions of @@ -109,70 +91,51 @@ pub struct Config { /// ```toml /// [migrate] /// # Ignore common whitespace characters when hashing - /// ignored-chars = [" ", "\t", "\r", "\n"] # Space, tab, CR, LF + /// ignored_chars = [" ", "\t", "\r", "\n"] # Space, tab, CR, LF /// ``` // Likely lower overhead for small sets than `HashSet`. pub ignored_chars: BTreeSet, - /// Specify default options for new migrations created with `sqlx migrate add`. - pub defaults: MigrationDefaults, -} - -#[derive(Debug, Default)] -#[cfg_attr( - feature = "sqlx-toml", - derive(serde::Deserialize), - serde(default, rename_all = "kebab-case") -)] -pub struct MigrationDefaults { - /// Specify the default type of migration that `sqlx migrate add` should create by default. + /// Specify the default type of migration that `sqlx migrate create` should create by default. /// /// ### Example: Use Reversible Migrations by Default /// `sqlx.toml`: /// ```toml - /// [migrate.defaults] - /// migration-type = "reversible" + /// [migrate] + /// default_type = "reversible" /// ``` - pub migration_type: DefaultMigrationType, + pub default_type: DefaultMigrationType, - /// Specify the default scheme that `sqlx migrate add` should use for version integers. + /// Specify the default scheme that `sqlx migrate create` should use for version integers. /// /// ### Example: Use Sequential Versioning by Default /// `sqlx.toml`: /// ```toml - /// [migrate.defaults] - /// migration-versioning = "sequential" + /// [migrate] + /// default_versioning = "sequential" /// ``` - pub migration_versioning: DefaultVersioning, + pub default_versioning: DefaultVersioning, } -/// The default type of migration that `sqlx migrate add` should create by default. -#[derive(Debug, Default, PartialEq, Eq)] -#[cfg_attr( - feature = "sqlx-toml", - derive(serde::Deserialize), - serde(rename_all = "snake_case") -)] +/// The default type of migration that `sqlx migrate create` should create by default. +#[derive(Debug, Default, PartialEq, Eq, serde::Deserialize)] +#[serde(rename_all = "snake_case")] pub enum DefaultMigrationType { /// Create the same migration type as that of the latest existing migration, /// or `Simple` otherwise. #[default] Inferred, - /// Create non-reversible migrations (`_.sql`) by default. + /// Create a non-reversible migration (`_.sql`). Simple, - /// Create reversible migrations (`_.up.sql` and `[...].down.sql`) by default. + /// Create a reversible migration (`_.up.sql` and `[...].down.sql`). Reversible, } -/// The default scheme that `sqlx migrate add` should use for version integers. -#[derive(Debug, Default, PartialEq, Eq)] -#[cfg_attr( - feature = "sqlx-toml", - derive(serde::Deserialize), - serde(rename_all = "snake_case") -)] +/// The default scheme that `sqlx migrate create` should use for version integers. +#[derive(Debug, Default, PartialEq, Eq, serde::Deserialize)] +#[serde(rename_all = "snake_case")] pub enum DefaultVersioning { /// Infer the versioning scheme from existing migrations: /// @@ -193,20 +156,3 @@ pub enum DefaultVersioning { /// Use sequential integers for migration versions. Sequential, } - -#[cfg(feature = "migrate")] -impl Config { - pub fn migrations_dir(&self) -> &str { - self.migrations_dir.as_deref().unwrap_or("migrations") - } - - pub fn table_name(&self) -> &str { - self.table_name.as_deref().unwrap_or("_sqlx_migrations") - } - - pub fn to_resolve_config(&self) -> crate::migrate::ResolveConfig { - let mut config = crate::migrate::ResolveConfig::new(); - config.ignore_chars(self.ignored_chars.iter().copied()); - config - } -} diff --git a/sqlx-core/src/config/mod.rs b/sqlx-core/src/config/mod.rs index 40feb007fd..979477241f 100644 --- a/sqlx-core/src/config/mod.rs +++ b/sqlx-core/src/config/mod.rs @@ -3,19 +3,18 @@ //! To use, create a `sqlx.toml` file in your crate root (the same directory as your `Cargo.toml`). //! The configuration in a `sqlx.toml` configures SQLx *only* for the current crate. //! -//! Requires the `sqlx-toml` feature (not enabled by default). -//! -//! `sqlx-cli` will also read `sqlx.toml` when running migrations. -//! //! See the [`Config`] type and its fields for individual configuration options. //! //! See the [reference][`_reference`] for the full `sqlx.toml` file. -use std::error::Error; use std::fmt::Debug; use std::io; use std::path::{Path, PathBuf}; +// `std::sync::OnceLock` doesn't have a stable `.get_or_try_init()` +// because it's blocked on a stable `Try` trait. +use once_cell::sync::OnceCell; + /// Configuration shared by multiple components. /// /// See [`common::Config`] for details. @@ -24,11 +23,13 @@ pub mod common; /// Configuration for the `query!()` family of macros. /// /// See [`macros::Config`] for details. +#[cfg(feature = "config-macros")] pub mod macros; /// Configuration for migrations when executed using `sqlx::migrate!()` or through `sqlx-cli`. /// /// See [`migrate::Config`] for details. +#[cfg(feature = "config-migrate")] pub mod migrate; /// Reference for `sqlx.toml` files @@ -40,16 +41,11 @@ pub mod migrate; /// ``` pub mod _reference {} -#[cfg(all(test, feature = "sqlx-toml"))] +#[cfg(test)] mod tests; /// The parsed structure of a `sqlx.toml` file. -#[derive(Debug, Default)] -#[cfg_attr( - feature = "sqlx-toml", - derive(serde::Deserialize), - serde(default, rename_all = "kebab-case", deny_unknown_fields) -)] +#[derive(Debug, Default, serde::Deserialize)] pub struct Config { /// Configuration shared by multiple components. /// @@ -59,11 +55,21 @@ pub struct Config { /// Configuration for the `query!()` family of macros. /// /// See [`macros::Config`] for details. + #[cfg_attr( + docsrs, + doc(cfg(any(feature = "config-all", feature = "config-macros"))) + )] + #[cfg(feature = "config-macros")] pub macros: macros::Config, /// Configuration for migrations when executed using `sqlx::migrate!()` or through `sqlx-cli`. /// /// See [`migrate::Config`] for details. + #[cfg_attr( + docsrs, + doc(cfg(any(feature = "config-all", feature = "config-migrate"))) + )] + #[cfg(feature = "config-migrate")] pub migrate: migrate::Config, } @@ -84,15 +90,13 @@ pub enum ConfigError { std::env::VarError, ), - /// No configuration file was found. Not necessarily fatal. - #[error("config file {path:?} not found")] - NotFound { path: PathBuf }, - /// An I/O error occurred while attempting to read the config file at `path`. /// - /// If the error is [`io::ErrorKind::NotFound`], [`Self::NotFound`] is returned instead. + /// This includes [`io::ErrorKind::NotFound`]. + /// + /// [`Self::not_found_path()`] will return the path if the file was not found. #[error("error reading config file {path:?}")] - Io { + Read { path: PathBuf, #[source] error: io::Error, @@ -101,82 +105,95 @@ pub enum ConfigError { /// An error in the TOML was encountered while parsing the config file at `path`. /// /// The error gives line numbers and context when printed with `Display`/`ToString`. - /// - /// Only returned if the `sqlx-toml` feature is enabled. #[error("error parsing config file {path:?}")] Parse { path: PathBuf, - /// Type-erased [`toml::de::Error`]. #[source] - error: Box, + error: toml::de::Error, }, - - /// A `sqlx.toml` file was found or specified, but the `sqlx-toml` feature is not enabled. - #[error("SQLx found config file at {path:?} but the `sqlx-toml` feature was not enabled")] - ParseDisabled { path: PathBuf }, } impl ConfigError { - /// Create a [`ConfigError`] from a [`std::io::Error`]. - /// - /// Maps to either `NotFound` or `Io`. - pub fn from_io(path: impl Into, error: io::Error) -> Self { - if error.kind() == io::ErrorKind::NotFound { - Self::NotFound { path: path.into() } - } else { - Self::Io { - path: path.into(), - error, - } - } - } - /// If this error means the file was not found, return the path that was attempted. pub fn not_found_path(&self) -> Option<&Path> { - if let Self::NotFound { path } = self { - Some(path) - } else { - None + match self { + ConfigError::Read { path, error } if error.kind() == io::ErrorKind::NotFound => { + Some(path) + } + _ => None, } } } +static CACHE: OnceCell = OnceCell::new(); + /// Internal methods for loading a `Config`. #[allow(clippy::result_large_err)] impl Config { - /// Get the cached config, or read `$CARGO_MANIFEST_DIR/sqlx.toml`. + /// Get the cached config, or attempt to read `$CARGO_MANIFEST_DIR/sqlx.toml`. /// /// On success, the config is cached in a `static` and returned by future calls. /// - /// Errors if `CARGO_MANIFEST_DIR` is not set, or if the config file could not be read. + /// Returns `Config::default()` if the file does not exist. /// - /// If the file does not exist, the cache is populated with `Config::default()`. - pub fn try_from_crate_or_default() -> Result { - Self::read_from(get_crate_path()?).or_else(|e| { - if let ConfigError::NotFound { .. } = e { - Ok(Config::default()) + /// ### Panics + /// If the file exists but an unrecoverable error was encountered while parsing it. + pub fn from_crate() -> &'static Self { + Self::try_from_crate().unwrap_or_else(|e| { + if let Some(path) = e.not_found_path() { + // Non-fatal + tracing::debug!("Not reading config, file {path:?} not found (error: {e})"); + CACHE.get_or_init(Config::default) } else { - Err(e) + // In the case of migrations, + // we can't proceed with defaults as they may be completely wrong. + panic!("failed to read sqlx config: {e}") } }) } - /// Get the cached config, or attempt to read it from the path given. + /// Get the cached config, or to read `$CARGO_MANIFEST_DIR/sqlx.toml`. + /// + /// On success, the config is cached in a `static` and returned by future calls. + /// + /// Errors if `CARGO_MANIFEST_DIR` is not set, or if the config file could not be read. + pub fn try_from_crate() -> Result<&'static Self, ConfigError> { + Self::try_get_with(|| { + let mut path = PathBuf::from(std::env::var("CARGO_MANIFEST_DIR")?); + path.push("sqlx.toml"); + Ok(path) + }) + } + + /// Get the cached config, or attempt to read `sqlx.toml` from the current working directory. /// /// On success, the config is cached in a `static` and returned by future calls. /// /// Errors if the config file does not exist, or could not be read. - pub fn try_from_path(path: PathBuf) -> Result { - Self::read_from(path) + pub fn try_from_current_dir() -> Result<&'static Self, ConfigError> { + Self::try_get_with(|| Ok("sqlx.toml".into())) + } + + /// Get the cached config, or attempt to read it from the path returned by the closure. + /// + /// On success, the config is cached in a `static` and returned by future calls. + /// + /// Errors if the config file does not exist, or could not be read. + pub fn try_get_with( + make_path: impl FnOnce() -> Result, + ) -> Result<&'static Self, ConfigError> { + CACHE.get_or_try_init(|| { + let path = make_path()?; + Self::read_from(path) + }) } - #[cfg(feature = "sqlx-toml")] fn read_from(path: PathBuf) -> Result { // The `toml` crate doesn't provide an incremental reader. let toml_s = match std::fs::read_to_string(&path) { Ok(toml) => toml, Err(error) => { - return Err(ConfigError::from_io(path, error)); + return Err(ConfigError::Read { path, error }); } }; @@ -184,24 +201,6 @@ impl Config { // Motivation: https://github.com/toml-rs/toml/issues/761 tracing::debug!("read config TOML from {path:?}:\n{toml_s}"); - toml::from_str(&toml_s).map_err(|error| ConfigError::Parse { - path, - error: Box::new(error), - }) - } - - #[cfg(not(feature = "sqlx-toml"))] - fn read_from(path: PathBuf) -> Result { - match path.try_exists() { - Ok(true) => Err(ConfigError::ParseDisabled { path }), - Ok(false) => Err(ConfigError::NotFound { path }), - Err(e) => Err(ConfigError::from_io(path, e)), - } + toml::from_str(&toml_s).map_err(|error| ConfigError::Parse { path, error }) } } - -fn get_crate_path() -> Result { - let mut path = PathBuf::from(std::env::var("CARGO_MANIFEST_DIR")?); - path.push("sqlx.toml"); - Ok(path) -} diff --git a/sqlx-core/src/config/reference.toml b/sqlx-core/src/config/reference.toml index 77833fb5a8..fae92f3422 100644 --- a/sqlx-core/src/config/reference.toml +++ b/sqlx-core/src/config/reference.toml @@ -13,31 +13,20 @@ # This is used by both the macros and `sqlx-cli`. # # If not specified, defaults to `DATABASE_URL` -database-url-var = "FOO_DATABASE_URL" +database_url_var = "FOO_DATABASE_URL" ############################################################################################### # Configuration for the `query!()` family of macros. [macros] - -[macros.preferred-crates] # Force the macros to use the `chrono` crate for date/time types, even if `time` is enabled. # # Defaults to "inferred": use whichever crate is enabled (`time` takes precedence over `chrono`). -date-time = "chrono" +datetime_crate = "chrono" # Or, ensure the macros always prefer `time` # in case new date/time crates are added in the future: -# date-time = "time" - -# Force the macros to use the `rust_decimal` crate for `NUMERIC`, even if `bigdecimal` is enabled. -# -# Defaults to "inferred": use whichever crate is enabled (`bigdecimal` takes precedence over `rust_decimal`). -numeric = "rust_decimal" - -# Or, ensure the macros always prefer `bigdecimal` -# in case new decimal crates are added in the future: -# numeric = "bigdecimal" +# datetime_crate = "time" # Set global overrides for mapping SQL types to Rust types. # @@ -49,11 +38,9 @@ numeric = "rust_decimal" # ### Note: Orthogonal to Nullability # These overrides do not affect whether `query!()` decides to wrap a column in `Option<_>` # or not. They only override the inner type used. -[macros.type-overrides] +[macros.type_overrides] # Override a built-in type (map all `UUID` columns to `crate::types::MyUuid`) -# Note: currently, the case of the type name MUST match. -# Built-in types are spelled in all-uppercase to match SQL convention. -'UUID' = "crate::types::MyUuid" +'uuid' = "crate::types::MyUuid" # Support an external or custom wrapper type (e.g. from the `isn` Postgres extension) # (NOTE: FOR DOCUMENTATION PURPOSES ONLY; THIS CRATE/TYPE DOES NOT EXIST AS OF WRITING) @@ -80,7 +67,7 @@ numeric = "rust_decimal" # Quoted schema and type name '"Foo"."Bar"' = "crate::schema::foo::Bar" -# Set per-table and per-column overrides for mapping SQL types to Rust types. +# Set per-column overrides for mapping SQL types to Rust types. # # Note: table name is required in the header. # @@ -89,7 +76,7 @@ numeric = "rust_decimal" # ### Note: Orthogonal to Nullability # These overrides do not affect whether `query!()` decides to wrap a column in `Option<_>` # or not. They only override the inner type used. -[macros.table-overrides.'foo'] +[macros.column_overrides.'foo'] # Map column `bar` of table `foo` to Rust type `crate::types::Foo`: 'bar' = "crate::types::Bar" @@ -101,17 +88,17 @@ numeric = "rust_decimal" # "Bar" = "crate::types::Bar" # Table name may be quoted (note the wrapping single-quotes) -[macros.table-overrides.'"Foo"'] +[macros.column_overrides.'"Foo"'] 'bar' = "crate::types::Bar" '"Bar"' = "crate::types::Bar" # Table name may also be schema-qualified. # Note how the dot is inside the quotes. -[macros.table-overrides.'my_schema.my_table'] +[macros.column_overrides.'my_schema.my_table'] 'my_column' = "crate::types::MyType" # Quoted schema, table, and column names -[macros.table-overrides.'"My Schema"."My Table"'] +[macros.column_overrides.'"My Schema"."My Table"'] '"My Column"' = "crate::types::MyType" ############################################################################################### @@ -143,12 +130,12 @@ numeric = "rust_decimal" # You should create the new table as a copy of the existing migrations table (with contents!), # and be sure all instances of your application have been migrated to the new # table before deleting the old one. -table-name = "foo._sqlx_migrations" +table_name = "foo._sqlx_migrations" # Override the directory used for migrations files. # # Relative to the crate root for `sqlx::migrate!()`, or the current directory for `sqlx-cli`. -migrations-dir = "foo/migrations" +migrations_dir = "foo/migrations" # Specify characters that should be ignored when hashing migrations. # @@ -161,34 +148,28 @@ migrations-dir = "foo/migrations" # change the output of the hash. # # This may require manual rectification for deployed databases. -# ignored-chars = [] +# ignored_chars = [] # Ignore Carriage Returns (`` | `\r`) # Note that the TOML format requires double-quoted strings to process escapes. -# ignored-chars = ["\r"] +# ignored_chars = ["\r"] # Ignore common whitespace characters (beware syntatically significant whitespace!) -# Space, tab, CR, LF, zero-width non-breaking space (U+FEFF) -# -# U+FEFF is added by some editors as a magic number at the beginning of a text file indicating it is UTF-8 encoded, -# where it is known as a byte-order mark (BOM): https://en.wikipedia.org/wiki/Byte_order_mark -ignored-chars = [" ", "\t", "\r", "\n", "\uFEFF"] +ignored_chars = [" ", "\t", "\r", "\n"] # Space, tab, CR, LF -# Set default options for new migrations. -[migrate.defaults] # Specify reversible migrations by default (for `sqlx migrate create`). # # Defaults to "inferred": uses the type of the last migration, or "simple" otherwise. -migration-type = "reversible" +default_type = "reversible" # Specify simple (non-reversible) migrations by default. -# migration-type = "simple" +# default_type = "simple" # Specify sequential versioning by default (for `sqlx migrate create`). # # Defaults to "inferred": guesses the versioning scheme from the latest migrations, # or "timestamp" otherwise. -migration-versioning = "sequential" +default_versioning = "sequential" # Specify timestamp versioning by default. -# migration-versioning = "timestamp" +# default_versioning = "timestamp" diff --git a/sqlx-core/src/config/tests.rs b/sqlx-core/src/config/tests.rs index 0b0b590919..bf042069a2 100644 --- a/sqlx-core/src/config/tests.rs +++ b/sqlx-core/src/config/tests.rs @@ -8,7 +8,11 @@ fn reference_parses_as_config() { .unwrap_or_else(|e| panic!("expected reference.toml to parse as Config: {e}")); assert_common_config(&config.common); + + #[cfg(feature = "config-macros")] assert_macros_config(&config.macros); + + #[cfg(feature = "config-migrate")] assert_migrate_config(&config.migrate); } @@ -16,16 +20,14 @@ fn assert_common_config(config: &config::common::Config) { assert_eq!(config.database_url_var.as_deref(), Some("FOO_DATABASE_URL")); } +#[cfg(feature = "config-macros")] fn assert_macros_config(config: &config::macros::Config) { use config::macros::*; - assert_eq!(config.preferred_crates.date_time, DateTimeCrate::Chrono); - assert_eq!(config.preferred_crates.numeric, NumericCrate::RustDecimal); + assert_eq!(config.datetime_crate, DateTimeCrate::Chrono); // Type overrides // Don't need to cover everything, just some important canaries. - assert_eq!(config.type_override("UUID"), Some("crate::types::MyUuid")); - assert_eq!(config.type_override("foo"), Some("crate::types::Foo")); assert_eq!(config.type_override(r#""Bar""#), Some("crate::types::Bar"),); @@ -72,22 +74,17 @@ fn assert_macros_config(config: &config::macros::Config) { ); } +#[cfg(feature = "config-migrate")] fn assert_migrate_config(config: &config::migrate::Config) { use config::migrate::*; assert_eq!(config.table_name.as_deref(), Some("foo._sqlx_migrations")); assert_eq!(config.migrations_dir.as_deref(), Some("foo/migrations")); - let ignored_chars = BTreeSet::from([' ', '\t', '\r', '\n', '\u{FEFF}']); + let ignored_chars = BTreeSet::from([' ', '\t', '\r', '\n']); assert_eq!(config.ignored_chars, ignored_chars); - assert_eq!( - config.defaults.migration_type, - DefaultMigrationType::Reversible - ); - assert_eq!( - config.defaults.migration_versioning, - DefaultVersioning::Sequential - ); + assert_eq!(config.default_type, DefaultMigrationType::Reversible); + assert_eq!(config.default_versioning, DefaultVersioning::Sequential); } diff --git a/sqlx-core/src/lib.rs b/sqlx-core/src/lib.rs index 09f2900ba8..8b831ecaff 100644 --- a/sqlx-core/src/lib.rs +++ b/sqlx-core/src/lib.rs @@ -91,6 +91,7 @@ pub mod any; #[cfg(feature = "migrate")] pub mod testing; +#[cfg(feature = "config")] pub mod config; pub use error::{Error, Result}; diff --git a/sqlx-macros-core/Cargo.toml b/sqlx-macros-core/Cargo.toml index 02b773af07..3ed1ae4072 100644 --- a/sqlx-macros-core/Cargo.toml +++ b/sqlx-macros-core/Cargo.toml @@ -27,7 +27,9 @@ derive = [] macros = [] migrate = ["sqlx-core/migrate"] -sqlx-toml = ["sqlx-core/sqlx-toml"] +config = ["sqlx-core/config"] +config-macros = ["config", "sqlx-core/config-macros"] +config-migrate = ["config", "sqlx-core/config-migrate"] # database mysql = ["sqlx-mysql"] diff --git a/sqlx-macros/Cargo.toml b/sqlx-macros/Cargo.toml index 23079a3810..49117afac7 100644 --- a/sqlx-macros/Cargo.toml +++ b/sqlx-macros/Cargo.toml @@ -28,7 +28,8 @@ derive = ["sqlx-macros-core/derive"] macros = ["sqlx-macros-core/macros"] migrate = ["sqlx-macros-core/migrate"] -sqlx-toml = ["sqlx-macros-core/sqlx-toml"] +config-macros = ["sqlx-macros-core/config-macros"] +config-migrate = ["sqlx-macros-core/config-migrate"] # database mysql = ["sqlx-macros-core/mysql"] diff --git a/src/lib.rs b/src/lib.rs index c608e02aea..2e801540dd 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -175,35 +175,4 @@ pub mod prelude { } #[cfg(feature = "_unstable-doc")] -#[cfg_attr(docsrs, doc(cfg(feature = "_unstable-doc")))] -pub use sqlx_core::config as _config; - -// NOTE: APIs exported in this module are SemVer-exempt. -#[doc(hidden)] -pub mod _unstable { - pub use sqlx_core::config; -} - -#[doc(hidden)] -#[cfg_attr( - all(feature = "chrono", feature = "time"), - deprecated = "SQLx has both `chrono` and `time` features enabled, \ - which presents an ambiguity when the `query!()` macros are mapping date/time types. \ - The `query!()` macros prefer types from `time` by default, \ - but this behavior should not be relied upon; \ - to resolve the ambiguity, we recommend specifying the preferred crate in a `sqlx.toml` file: \ - https://docs.rs/sqlx/latest/sqlx/config/macros/PreferredCrates.html#field.date_time" -)] -pub fn warn_on_ambiguous_inferred_date_time_crate() {} - -#[doc(hidden)] -#[cfg_attr( - all(feature = "bigdecimal", feature = "rust_decimal"), - deprecated = "SQLx has both `bigdecimal` and `rust_decimal` features enabled, \ - which presents an ambiguity when the `query!()` macros are mapping `NUMERIC`. \ - The `query!()` macros prefer `bigdecimal::BigDecimal` by default, \ - but this behavior should not be relied upon; \ - to resolve the ambiguity, we recommend specifying the preferred crate in a `sqlx.toml` file: \ - https://docs.rs/sqlx/latest/sqlx/config/macros/PreferredCrates.html#field.numeric" -)] -pub fn warn_on_ambiguous_inferred_numeric_crate() {} +pub use sqlx_core::config; From fdce6b89a7ada787b72f5415fb116bb04f71e212 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Mon, 9 Sep 2024 00:24:01 -0700 Subject: [PATCH 02/78] feat: add support for ignored_chars config to sqlx_core::migrate --- sqlx-core/src/migrate/migration.rs | 2 +- sqlx-core/src/migrate/migrator.rs | 15 +------------ sqlx-core/src/migrate/source.rs | 34 ++++++++++++------------------ 3 files changed, 16 insertions(+), 35 deletions(-) diff --git a/sqlx-core/src/migrate/migration.rs b/sqlx-core/src/migrate/migration.rs index 1f1175ce58..df7a11d78b 100644 --- a/sqlx-core/src/migrate/migration.rs +++ b/sqlx-core/src/migrate/migration.rs @@ -76,7 +76,7 @@ pub fn checksum_fragments<'a>(fragments: impl Iterator) -> Vec, +} #[doc(hidden)] pub create_schemas: Cow<'static, [Cow<'static, str>]>, @@ -38,7 +39,6 @@ impl Migrator { no_tx: false, locking: true, table_name: Cow::Borrowed("_sqlx_migrations"), - create_schemas: Cow::Borrowed(&[]), }; /// Creates a new instance with the given source. @@ -88,19 +88,6 @@ impl Migrator { self } - /// Add a schema name to be created if it does not already exist. - /// - /// May be used with [`Self::dangerous_set_table_name()`] to place the migrations table - /// in a new schema without requiring it to exist first. - /// - /// ### Note: Support Depends on Database - /// SQLite cannot create new schemas without attaching them to a database file, - /// the path of which must be specified separately in an [`ATTACH DATABASE`](https://www.sqlite.org/lang_attach.html) command. - pub fn create_schema(&mut self, schema_name: impl Into>) -> &Self { - self.create_schemas.to_mut().push(schema_name.into()); - self - } - /// Specify whether applied migrations that are missing from the resolved migrations should be ignored. pub fn set_ignore_missing(&mut self, ignore_missing: bool) -> &mut Self { self.ignore_missing = ignore_missing; diff --git a/sqlx-core/src/migrate/source.rs b/sqlx-core/src/migrate/source.rs index 9c2ef7719b..6c3d780bb3 100644 --- a/sqlx-core/src/migrate/source.rs +++ b/sqlx-core/src/migrate/source.rs @@ -52,9 +52,9 @@ impl MigrationSource<'static> for PathBuf { } /// A [`MigrationSource`] implementation with configurable resolution. -/// +/// /// `S` may be `PathBuf`, `&Path` or any type that implements `Into`. -/// +/// /// See [`ResolveConfig`] for details. #[derive(Debug)] pub struct ResolveWith(pub S, pub ResolveConfig); @@ -97,20 +97,20 @@ impl ResolveConfig { } /// Ignore a character when hashing migrations. - /// + /// /// The migration SQL string itself will still contain the character, /// but it will not be included when calculating the checksum. - /// + /// /// This can be used to ignore whitespace characters so changing formatting /// does not change the checksum. - /// + /// /// Adding the same `char` more than once is a no-op. - /// + /// /// ### Note: Changes Migration Checksum - /// This will change the checksum of resolved migrations, + /// This will change the checksum of resolved migrations, /// which may cause problems with existing deployments. /// - /// **Use at your own risk.** + /// **Use at your own risk.** pub fn ignore_char(&mut self, c: char) -> &mut Self { self.ignored_chars.insert(c); self @@ -123,21 +123,21 @@ impl ResolveConfig { /// /// This can be used to ignore whitespace characters so changing formatting /// does not change the checksum. - /// + /// /// Adding the same `char` more than once is a no-op. /// /// ### Note: Changes Migration Checksum - /// This will change the checksum of resolved migrations, + /// This will change the checksum of resolved migrations, /// which may cause problems with existing deployments. /// - /// **Use at your own risk.** + /// **Use at your own risk.** pub fn ignore_chars(&mut self, chars: impl IntoIterator) -> &mut Self { self.ignored_chars.extend(chars); self } /// Iterate over the set of ignored characters. - /// + /// /// Duplicate `char`s are not included. pub fn ignored_chars(&self) -> impl Iterator + '_ { self.ignored_chars.iter().copied() @@ -266,17 +266,11 @@ fn checksum_with(sql: &str, ignored_chars: &BTreeSet) -> Vec { fn checksum_with_ignored_chars() { // Ensure that `checksum_with` returns the same digest for a given set of ignored chars // as the equivalent string with the characters removed. - let ignored_chars = [ - ' ', '\t', '\r', '\n', - // Zero-width non-breaking space (ZWNBSP), often added as a magic-number at the beginning - // of UTF-8 encoded files as a byte-order mark (BOM): - // https://en.wikipedia.org/wiki/Byte_order_mark - '\u{FEFF}', - ]; + let ignored_chars = [' ', '\t', '\r', '\n']; // Copied from `examples/postgres/axum-social-with-tests/migrations/3_comment.sql` let sql = "\ - \u{FEFF}create table comment (\r\n\ + create table comment (\r\n\ \tcomment_id uuid primary key default gen_random_uuid(),\r\n\ \tpost_id uuid not null references post(post_id),\r\n\ \tuser_id uuid not null references \"user\"(user_id),\r\n\ From 44db4701d1d3d9bd291ee9922870a1a3055b9508 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Mon, 9 Sep 2024 00:49:20 -0700 Subject: [PATCH 03/78] chore: test ignored_chars with `U+FEFF` (ZWNBSP/BOM) https://en.wikipedia.org/wiki/Byte_order_mark --- sqlx-core/src/config/reference.toml | 6 ++++- sqlx-core/src/config/tests.rs | 2 +- sqlx-core/src/migrate/migration.rs | 2 +- sqlx-core/src/migrate/source.rs | 34 +++++++++++++++++------------ 4 files changed, 27 insertions(+), 17 deletions(-) diff --git a/sqlx-core/src/config/reference.toml b/sqlx-core/src/config/reference.toml index fae92f3422..6d52f615eb 100644 --- a/sqlx-core/src/config/reference.toml +++ b/sqlx-core/src/config/reference.toml @@ -155,7 +155,11 @@ migrations_dir = "foo/migrations" # ignored_chars = ["\r"] # Ignore common whitespace characters (beware syntatically significant whitespace!) -ignored_chars = [" ", "\t", "\r", "\n"] # Space, tab, CR, LF +# Space, tab, CR, LF, zero-width non-breaking space (U+FEFF) +# +# U+FEFF is added by some editors as a magic number at the beginning of a text file indicating it is UTF-8 encoded, +# where it is known as a byte-order mark (BOM): https://en.wikipedia.org/wiki/Byte_order_mark +ignored_chars = [" ", "\t", "\r", "\n", "\uFEFF"] # Specify reversible migrations by default (for `sqlx migrate create`). # diff --git a/sqlx-core/src/config/tests.rs b/sqlx-core/src/config/tests.rs index bf042069a2..521e7074b3 100644 --- a/sqlx-core/src/config/tests.rs +++ b/sqlx-core/src/config/tests.rs @@ -81,7 +81,7 @@ fn assert_migrate_config(config: &config::migrate::Config) { assert_eq!(config.table_name.as_deref(), Some("foo._sqlx_migrations")); assert_eq!(config.migrations_dir.as_deref(), Some("foo/migrations")); - let ignored_chars = BTreeSet::from([' ', '\t', '\r', '\n']); + let ignored_chars = BTreeSet::from([' ', '\t', '\r', '\n', '\u{FEFF}']); assert_eq!(config.ignored_chars, ignored_chars); diff --git a/sqlx-core/src/migrate/migration.rs b/sqlx-core/src/migrate/migration.rs index df7a11d78b..1f1175ce58 100644 --- a/sqlx-core/src/migrate/migration.rs +++ b/sqlx-core/src/migrate/migration.rs @@ -76,7 +76,7 @@ pub fn checksum_fragments<'a>(fragments: impl Iterator) -> Vec for PathBuf { } /// A [`MigrationSource`] implementation with configurable resolution. -/// +/// /// `S` may be `PathBuf`, `&Path` or any type that implements `Into`. -/// +/// /// See [`ResolveConfig`] for details. #[derive(Debug)] pub struct ResolveWith(pub S, pub ResolveConfig); @@ -97,20 +97,20 @@ impl ResolveConfig { } /// Ignore a character when hashing migrations. - /// + /// /// The migration SQL string itself will still contain the character, /// but it will not be included when calculating the checksum. - /// + /// /// This can be used to ignore whitespace characters so changing formatting /// does not change the checksum. - /// + /// /// Adding the same `char` more than once is a no-op. - /// + /// /// ### Note: Changes Migration Checksum - /// This will change the checksum of resolved migrations, + /// This will change the checksum of resolved migrations, /// which may cause problems with existing deployments. /// - /// **Use at your own risk.** + /// **Use at your own risk.** pub fn ignore_char(&mut self, c: char) -> &mut Self { self.ignored_chars.insert(c); self @@ -123,21 +123,21 @@ impl ResolveConfig { /// /// This can be used to ignore whitespace characters so changing formatting /// does not change the checksum. - /// + /// /// Adding the same `char` more than once is a no-op. /// /// ### Note: Changes Migration Checksum - /// This will change the checksum of resolved migrations, + /// This will change the checksum of resolved migrations, /// which may cause problems with existing deployments. /// - /// **Use at your own risk.** + /// **Use at your own risk.** pub fn ignore_chars(&mut self, chars: impl IntoIterator) -> &mut Self { self.ignored_chars.extend(chars); self } /// Iterate over the set of ignored characters. - /// + /// /// Duplicate `char`s are not included. pub fn ignored_chars(&self) -> impl Iterator + '_ { self.ignored_chars.iter().copied() @@ -266,11 +266,17 @@ fn checksum_with(sql: &str, ignored_chars: &BTreeSet) -> Vec { fn checksum_with_ignored_chars() { // Ensure that `checksum_with` returns the same digest for a given set of ignored chars // as the equivalent string with the characters removed. - let ignored_chars = [' ', '\t', '\r', '\n']; + let ignored_chars = [ + ' ', '\t', '\r', '\n', + // Zero-width non-breaking space (ZWNBSP), often added as a magic-number at the beginning + // of UTF-8 encoded files as a byte-order mark (BOM): + // https://en.wikipedia.org/wiki/Byte_order_mark + '\u{FEFF}', + ]; // Copied from `examples/postgres/axum-social-with-tests/migrations/3_comment.sql` let sql = "\ - create table comment (\r\n\ + \u{FEFF}create table comment (\r\n\ \tcomment_id uuid primary key default gen_random_uuid(),\r\n\ \tpost_id uuid not null references post(post_id),\r\n\ \tuser_id uuid not null references \"user\"(user_id),\r\n\ From 62c0c6f3a4c0a61f97c9741d5b751f9d322d44e9 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Wed, 18 Sep 2024 01:54:22 -0700 Subject: [PATCH 04/78] refactor: make `Config` always compiled simplifies usage while still making parsing optional for less generated code --- Cargo.toml | 10 ++-- sqlx-cli/Cargo.toml | 7 +-- sqlx-core/Cargo.toml | 12 ++-- sqlx-core/src/config/common.rs | 9 ++- sqlx-core/src/config/macros.rs | 12 ++-- sqlx-core/src/config/migrate.rs | 20 +++++-- sqlx-core/src/config/mod.rs | 95 ++++++++++++++++++++----------- sqlx-core/src/config/tests.rs | 2 - sqlx-core/src/lib.rs | 1 - sqlx-macros-core/Cargo.toml | 4 +- sqlx-macros-core/src/query/mod.rs | 9 ++- sqlx-macros/Cargo.toml | 3 +- 12 files changed, 115 insertions(+), 69 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 37f719b770..b9226b43ef 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -58,16 +58,14 @@ features = ["all-databases", "_unstable-all-types", "_unstable-doc"] rustdoc-args = ["--cfg", "docsrs"] [features] -default = ["any", "macros", "migrate", "json", "config-all"] +default = ["any", "macros", "migrate", "json", "sqlx-toml"] derive = ["sqlx-macros/derive"] macros = ["derive", "sqlx-macros/macros"] migrate = ["sqlx-core/migrate", "sqlx-macros?/migrate", "sqlx-mysql?/migrate", "sqlx-postgres?/migrate", "sqlx-sqlite?/migrate"] -# Enable parsing of `sqlx.toml` for configuring macros, migrations, or both. -config-macros = ["sqlx-macros?/config-macros"] -config-migrate = ["sqlx-macros?/config-migrate"] -config-all = ["config-macros", "config-migrate"] +# Enable parsing of `sqlx.toml` for configuring macros and migrations. +sqlx-toml = ["sqlx-core/sqlx-toml", "sqlx-macros?/sqlx-toml"] # intended mainly for CI and docs all-databases = ["mysql", "sqlite", "postgres", "any"] @@ -85,7 +83,7 @@ _unstable-all-types = [ "bstr" ] # Render documentation that wouldn't otherwise be shown (e.g. `sqlx_core::config`). -_unstable-doc = ["config-all", "sqlx-core/_unstable-doc"] +_unstable-doc = [] # Base runtime features without TLS runtime-async-std = ["_rt-async-std", "sqlx-core/_rt-async-std", "sqlx-macros?/_rt-async-std"] diff --git a/sqlx-cli/Cargo.toml b/sqlx-cli/Cargo.toml index 5a57e46720..0cb428e6aa 100644 --- a/sqlx-cli/Cargo.toml +++ b/sqlx-cli/Cargo.toml @@ -55,8 +55,8 @@ features = [ [features] default = ["postgres", "sqlite", "mysql", "native-tls", "completions", "sqlx-toml"] -rustls = ["sqlx/tls-rustls"] -native-tls = ["sqlx/tls-native-tls"] +rustls = ["sqlx/runtime-tokio-rustls"] +native-tls = ["sqlx/runtime-tokio-native-tls"] # databases mysql = ["sqlx/mysql"] @@ -71,9 +71,6 @@ completions = ["dep:clap_complete"] sqlx-toml = ["sqlx/sqlx-toml"] -# Conditional compilation only -_sqlite = [] - [dev-dependencies] assert_cmd = "2.0.11" tempfile = "3.10.1" diff --git a/sqlx-core/Cargo.toml b/sqlx-core/Cargo.toml index c12de18889..bf57849733 100644 --- a/sqlx-core/Cargo.toml +++ b/sqlx-core/Cargo.toml @@ -13,7 +13,7 @@ features = ["offline"] [features] default = [] -migrate = ["sha2", "crc", "config-migrate"] +migrate = ["sha2", "crc"] any = [] @@ -32,11 +32,13 @@ _tls-none = [] # support offline/decoupled building (enables serialization of `Describe`) offline = ["serde", "either/serde"] -config = ["serde", "toml/parse"] -config-macros = ["config"] -config-migrate = ["config"] +# Enable parsing of `sqlx.toml`. +# For simplicity, the `config` module is always enabled, +# but disabling this disables the `serde` derives and the `toml` crate, +# which is a good bit less code to compile if the feature isn't being used. +sqlx-toml = ["serde", "toml/parse"] -_unstable-doc = ["config-macros", "config-migrate"] +_unstable-doc = ["sqlx-toml"] [dependencies] # Runtimes diff --git a/sqlx-core/src/config/common.rs b/sqlx-core/src/config/common.rs index 8c774fc60f..1468f24abd 100644 --- a/sqlx-core/src/config/common.rs +++ b/sqlx-core/src/config/common.rs @@ -1,5 +1,6 @@ /// Configuration shared by multiple components. -#[derive(Debug, Default, serde::Deserialize)] +#[derive(Debug, Default)] +#[cfg_attr(feature = "sqlx-toml", derive(serde::Deserialize))] pub struct Config { /// Override the database URL environment variable. /// @@ -36,3 +37,9 @@ pub struct Config { /// and the ones used in `bar` will use `BAR_DATABASE_URL`. pub database_url_var: Option, } + +impl Config { + pub fn database_url_var(&self) -> &str { + self.database_url_var.as_deref().unwrap_or("DATABASE_URL") + } +} \ No newline at end of file diff --git a/sqlx-core/src/config/macros.rs b/sqlx-core/src/config/macros.rs index 5edd30dc15..142f059da4 100644 --- a/sqlx-core/src/config/macros.rs +++ b/sqlx-core/src/config/macros.rs @@ -1,8 +1,8 @@ use std::collections::BTreeMap; /// Configuration for the `query!()` family of macros. -#[derive(Debug, Default, serde::Deserialize)] -#[serde(default)] +#[derive(Debug, Default)] +#[cfg_attr(feature = "sqlx-toml", derive(serde::Deserialize), serde(default))] pub struct Config { /// Specify the crate to use for mapping date/time types to Rust. /// @@ -235,8 +235,12 @@ pub struct Config { } /// The crate to use for mapping date/time types to Rust. -#[derive(Debug, Default, PartialEq, Eq, serde::Deserialize)] -#[serde(rename_all = "snake_case")] +#[derive(Debug, Default, PartialEq, Eq)] +#[cfg_attr( + feature = "sqlx-toml", + derive(serde::Deserialize), + serde(rename_all = "snake_case") +)] pub enum DateTimeCrate { /// Use whichever crate is enabled (`time` then `chrono`). #[default] diff --git a/sqlx-core/src/config/migrate.rs b/sqlx-core/src/config/migrate.rs index 5878f9a24f..efc03a0155 100644 --- a/sqlx-core/src/config/migrate.rs +++ b/sqlx-core/src/config/migrate.rs @@ -12,8 +12,8 @@ use std::collections::BTreeSet; /// if the proper precautions are not taken. /// /// Be sure you know what you are doing and that you read all relevant documentation _thoroughly_. -#[derive(Debug, Default, serde::Deserialize)] -#[serde(default)] +#[derive(Debug, Default)] +#[cfg_attr(feature = "sqlx-toml", derive(serde::Deserialize), serde(default))] pub struct Config { /// Override the name of the table used to track executed migrations. /// @@ -118,8 +118,12 @@ pub struct Config { } /// The default type of migration that `sqlx migrate create` should create by default. -#[derive(Debug, Default, PartialEq, Eq, serde::Deserialize)] -#[serde(rename_all = "snake_case")] +#[derive(Debug, Default, PartialEq, Eq)] +#[cfg_attr( + feature = "sqlx-toml", + derive(serde::Deserialize), + serde(rename_all = "snake_case") +)] pub enum DefaultMigrationType { /// Create the same migration type as that of the latest existing migration, /// or `Simple` otherwise. @@ -134,8 +138,12 @@ pub enum DefaultMigrationType { } /// The default scheme that `sqlx migrate create` should use for version integers. -#[derive(Debug, Default, PartialEq, Eq, serde::Deserialize)] -#[serde(rename_all = "snake_case")] +#[derive(Debug, Default, PartialEq, Eq)] +#[cfg_attr( + feature = "sqlx-toml", + derive(serde::Deserialize), + serde(rename_all = "snake_case") +)] pub enum DefaultVersioning { /// Infer the versioning scheme from existing migrations: /// diff --git a/sqlx-core/src/config/mod.rs b/sqlx-core/src/config/mod.rs index 979477241f..3bbde5c2f1 100644 --- a/sqlx-core/src/config/mod.rs +++ b/sqlx-core/src/config/mod.rs @@ -7,6 +7,7 @@ //! //! See the [reference][`_reference`] for the full `sqlx.toml` file. +use std::error::Error; use std::fmt::Debug; use std::io; use std::path::{Path, PathBuf}; @@ -23,13 +24,11 @@ pub mod common; /// Configuration for the `query!()` family of macros. /// /// See [`macros::Config`] for details. -#[cfg(feature = "config-macros")] pub mod macros; /// Configuration for migrations when executed using `sqlx::migrate!()` or through `sqlx-cli`. /// /// See [`migrate::Config`] for details. -#[cfg(feature = "config-migrate")] pub mod migrate; /// Reference for `sqlx.toml` files @@ -41,11 +40,12 @@ pub mod migrate; /// ``` pub mod _reference {} -#[cfg(test)] +#[cfg(all(test, feature = "sqlx-toml"))] mod tests; /// The parsed structure of a `sqlx.toml` file. -#[derive(Debug, Default, serde::Deserialize)] +#[derive(Debug, Default)] +#[cfg_attr(feature = "sqlx-toml", derive(serde::Deserialize))] pub struct Config { /// Configuration shared by multiple components. /// @@ -55,21 +55,11 @@ pub struct Config { /// Configuration for the `query!()` family of macros. /// /// See [`macros::Config`] for details. - #[cfg_attr( - docsrs, - doc(cfg(any(feature = "config-all", feature = "config-macros"))) - )] - #[cfg(feature = "config-macros")] pub macros: macros::Config, /// Configuration for migrations when executed using `sqlx::migrate!()` or through `sqlx-cli`. /// /// See [`migrate::Config`] for details. - #[cfg_attr( - docsrs, - doc(cfg(any(feature = "config-all", feature = "config-migrate"))) - )] - #[cfg(feature = "config-migrate")] pub migrate: migrate::Config, } @@ -90,13 +80,17 @@ pub enum ConfigError { std::env::VarError, ), + /// No configuration file was found. Not necessarily fatal. + #[error("config file {path:?} not found")] + NotFound { + path: PathBuf, + }, + /// An I/O error occurred while attempting to read the config file at `path`. /// - /// This includes [`io::ErrorKind::NotFound`]. - /// - /// [`Self::not_found_path()`] will return the path if the file was not found. + /// If the error is [`io::ErrorKind::NotFound`], [`Self::NotFound`] is returned instead. #[error("error reading config file {path:?}")] - Read { + Io { path: PathBuf, #[source] error: io::Error, @@ -105,22 +99,41 @@ pub enum ConfigError { /// An error in the TOML was encountered while parsing the config file at `path`. /// /// The error gives line numbers and context when printed with `Display`/`ToString`. + /// + /// Only returned if the `sqlx-toml` feature is enabled. #[error("error parsing config file {path:?}")] Parse { path: PathBuf, + /// Type-erased [`toml::de::Error`]. #[source] - error: toml::de::Error, + error: Box, + }, + + /// A `sqlx.toml` file was found or specified, but the `sqlx-toml` feature is not enabled. + #[error("SQLx found config file at {path:?} but the `sqlx-toml` feature was not enabled")] + ParseDisabled { + path: PathBuf }, } impl ConfigError { + /// Create a [`ConfigError`] from a [`std::io::Error`]. + /// + /// Maps to either `NotFound` or `Io`. + pub fn from_io(path: PathBuf, error: io::Error) -> Self { + if error.kind() == io::ErrorKind::NotFound { + Self::NotFound { path } + } else { + Self::Io { path, error } + } + } + /// If this error means the file was not found, return the path that was attempted. pub fn not_found_path(&self) -> Option<&Path> { - match self { - ConfigError::Read { path, error } if error.kind() == io::ErrorKind::NotFound => { - Some(path) - } - _ => None, + if let Self::NotFound { path } = self { + Some(path) + } else { + None } } } @@ -140,14 +153,22 @@ impl Config { /// If the file exists but an unrecoverable error was encountered while parsing it. pub fn from_crate() -> &'static Self { Self::try_from_crate().unwrap_or_else(|e| { - if let Some(path) = e.not_found_path() { - // Non-fatal - tracing::debug!("Not reading config, file {path:?} not found (error: {e})"); - CACHE.get_or_init(Config::default) - } else { + match e { + ConfigError::NotFound { path } => { + // Non-fatal + tracing::debug!("Not reading config, file {path:?} not found"); + CACHE.get_or_init(Config::default) + } + // FATAL ERRORS BELOW: // In the case of migrations, // we can't proceed with defaults as they may be completely wrong. - panic!("failed to read sqlx config: {e}") + e @ ConfigError::ParseDisabled { .. } => { + // Only returned if the file exists but the feature is not enabled. + panic!("{e}") + } + e => { + panic!("failed to read sqlx config: {e}") + } } }) } @@ -188,12 +209,13 @@ impl Config { }) } + #[cfg(feature = "sqlx-toml")] fn read_from(path: PathBuf) -> Result { // The `toml` crate doesn't provide an incremental reader. let toml_s = match std::fs::read_to_string(&path) { Ok(toml) => toml, Err(error) => { - return Err(ConfigError::Read { path, error }); + return Err(ConfigError::from_io(path, error)); } }; @@ -201,6 +223,15 @@ impl Config { // Motivation: https://github.com/toml-rs/toml/issues/761 tracing::debug!("read config TOML from {path:?}:\n{toml_s}"); - toml::from_str(&toml_s).map_err(|error| ConfigError::Parse { path, error }) + toml::from_str(&toml_s).map_err(|error| ConfigError::Parse { path, error: Box::new(error) }) + } + + #[cfg(not(feature = "sqlx-toml"))] + fn read_from(path: PathBuf) -> Result { + match path.try_exists() { + Ok(true) => Err(ConfigError::ParseDisabled { path }), + Ok(false) => Err(ConfigError::NotFound { path }), + Err(e) => Err(ConfigError::from_io(path, e)) + } } } diff --git a/sqlx-core/src/config/tests.rs b/sqlx-core/src/config/tests.rs index 521e7074b3..e5033bb459 100644 --- a/sqlx-core/src/config/tests.rs +++ b/sqlx-core/src/config/tests.rs @@ -20,7 +20,6 @@ fn assert_common_config(config: &config::common::Config) { assert_eq!(config.database_url_var.as_deref(), Some("FOO_DATABASE_URL")); } -#[cfg(feature = "config-macros")] fn assert_macros_config(config: &config::macros::Config) { use config::macros::*; @@ -74,7 +73,6 @@ fn assert_macros_config(config: &config::macros::Config) { ); } -#[cfg(feature = "config-migrate")] fn assert_migrate_config(config: &config::migrate::Config) { use config::migrate::*; diff --git a/sqlx-core/src/lib.rs b/sqlx-core/src/lib.rs index 8b831ecaff..09f2900ba8 100644 --- a/sqlx-core/src/lib.rs +++ b/sqlx-core/src/lib.rs @@ -91,7 +91,6 @@ pub mod any; #[cfg(feature = "migrate")] pub mod testing; -#[cfg(feature = "config")] pub mod config; pub use error::{Error, Result}; diff --git a/sqlx-macros-core/Cargo.toml b/sqlx-macros-core/Cargo.toml index 3ed1ae4072..02b773af07 100644 --- a/sqlx-macros-core/Cargo.toml +++ b/sqlx-macros-core/Cargo.toml @@ -27,9 +27,7 @@ derive = [] macros = [] migrate = ["sqlx-core/migrate"] -config = ["sqlx-core/config"] -config-macros = ["config", "sqlx-core/config-macros"] -config-migrate = ["config", "sqlx-core/config-migrate"] +sqlx-toml = ["sqlx-core/sqlx-toml"] # database mysql = ["sqlx-mysql"] diff --git a/sqlx-macros-core/src/query/mod.rs b/sqlx-macros-core/src/query/mod.rs index f6c0cae6db..4f1d37ac52 100644 --- a/sqlx-macros-core/src/query/mod.rs +++ b/sqlx-macros-core/src/query/mod.rs @@ -17,6 +17,7 @@ use crate::query::input::RecordType; use either::Either; use sqlx_core::config::Config; use url::Url; +use sqlx_core::config::Config; mod args; mod data; @@ -123,8 +124,12 @@ fn init_metadata(manifest_dir: &String) -> crate::Result { .or(offline) .map(|s| s.eq_ignore_ascii_case("true") || s == "1") .unwrap_or(false); - - let config = Config::try_from_crate_or_default()?; + + let var_name = Config::from_crate() + .common + .database_url_var(); + + let database_url = env(var_name).ok(); let database_url = env(config.common.database_url_var()).ok().or(database_url); diff --git a/sqlx-macros/Cargo.toml b/sqlx-macros/Cargo.toml index 49117afac7..23079a3810 100644 --- a/sqlx-macros/Cargo.toml +++ b/sqlx-macros/Cargo.toml @@ -28,8 +28,7 @@ derive = ["sqlx-macros-core/derive"] macros = ["sqlx-macros-core/macros"] migrate = ["sqlx-macros-core/migrate"] -config-macros = ["sqlx-macros-core/config-macros"] -config-migrate = ["sqlx-macros-core/config-migrate"] +sqlx-toml = ["sqlx-macros-core/sqlx-toml"] # database mysql = ["sqlx-macros-core/mysql"] From d7b6cd2fdea19ab269af05e9a9cac196df5b20fd Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Wed, 18 Sep 2024 01:55:59 -0700 Subject: [PATCH 05/78] refactor: add origin information to `Column` --- sqlx-core/src/column.rs | 22 ++++----- sqlx-mysql/src/connection/executor.rs | 8 ++-- sqlx-mysql/src/protocol/text/column.rs | 2 +- sqlx-postgres/src/column.rs | 3 +- sqlx-postgres/src/connection/describe.rs | 59 +++++++++++++++++++++++- sqlx-postgres/src/connection/mod.rs | 1 - sqlx-sqlite/src/column.rs | 2 +- sqlx-sqlite/src/connection/describe.rs | 2 + sqlx-sqlite/src/statement/handle.rs | 33 +++++++------ 9 files changed, 94 insertions(+), 38 deletions(-) diff --git a/sqlx-core/src/column.rs b/sqlx-core/src/column.rs index fddc048c4b..7483375765 100644 --- a/sqlx-core/src/column.rs +++ b/sqlx-core/src/column.rs @@ -23,17 +23,15 @@ pub trait Column: 'static + Send + Sync + Debug { fn type_info(&self) -> &::TypeInfo; /// If this column comes from a table, return the table and original column name. - /// + /// /// Returns [`ColumnOrigin::Expression`] if the column is the result of an expression /// or else the source table could not be determined. - /// + /// /// Returns [`ColumnOrigin::Unknown`] if the database driver does not have that information, /// or has not overridden this method. - // This method returns an owned value instead of a reference, + // This method returns an owned value instead of a reference, // to give the implementor more flexibility. - fn origin(&self) -> ColumnOrigin { - ColumnOrigin::Unknown - } + fn origin(&self) -> ColumnOrigin { ColumnOrigin::Unknown } } /// A [`Column`] that originates from a table. @@ -46,20 +44,20 @@ pub struct TableColumn { pub name: Arc, } -/// The possible statuses for our knowledge of the origin of a [`Column`]. +/// The possible statuses for our knowledge of the origin of a [`Column`]. #[derive(Debug, Clone, Default)] #[cfg_attr(feature = "offline", derive(serde::Serialize, serde::Deserialize))] pub enum ColumnOrigin { - /// The column is known to originate from a table. - /// - /// Included is the table name and original column name. + /// The column is known to originate from a table. + /// + /// Included is the table name and original column name. Table(TableColumn), /// The column originates from an expression, or else its origin could not be determined. Expression, /// The database driver does not know the column origin at this time. - /// + /// /// This may happen if: - /// * The connection is in the middle of executing a query, + /// * The connection is in the middle of executing a query, /// and cannot query the catalog to fetch this information. /// * The connection does not have access to the database catalog. /// * The implementation of [`Column`] did not override [`Column::origin()`]. diff --git a/sqlx-mysql/src/connection/executor.rs b/sqlx-mysql/src/connection/executor.rs index a6f8c20129..6ded62c609 100644 --- a/sqlx-mysql/src/connection/executor.rs +++ b/sqlx-mysql/src/connection/executor.rs @@ -21,9 +21,9 @@ use either::Either; use futures_core::future::BoxFuture; use futures_core::stream::BoxStream; use futures_core::Stream; -use futures_util::TryStreamExt; +use futures_util::{pin_mut, TryStreamExt}; +use std::{borrow::Cow, sync::Arc}; use sqlx_core::column::{ColumnOrigin, TableColumn}; -use std::{borrow::Cow, pin::pin, sync::Arc}; impl MySqlConnection { async fn prepare_statement( @@ -399,7 +399,7 @@ fn recv_next_result_column(def: &ColumnDefinition, ordinal: usize) -> Result Result Result<&str, Error> { str::from_utf8(&self.table).map_err(Error::protocol) } - + pub(crate) fn name(&self) -> Result<&str, Error> { str::from_utf8(&self.name).map_err(Error::protocol) } diff --git a/sqlx-postgres/src/column.rs b/sqlx-postgres/src/column.rs index 4dd3a1cbd2..8530267b5d 100644 --- a/sqlx-postgres/src/column.rs +++ b/sqlx-postgres/src/column.rs @@ -3,6 +3,7 @@ use crate::{PgTypeInfo, Postgres}; use sqlx_core::column::ColumnOrigin; pub(crate) use sqlx_core::column::{Column, ColumnIndex}; +use sqlx_core::column::ColumnOrigin; #[derive(Debug, Clone)] #[cfg_attr(feature = "offline", derive(serde::Serialize, serde::Deserialize))] @@ -13,7 +14,7 @@ pub struct PgColumn { #[cfg_attr(feature = "offline", serde(default))] pub(crate) origin: ColumnOrigin, - + #[cfg_attr(feature = "offline", serde(skip))] pub(crate) relation_id: Option, #[cfg_attr(feature = "offline", serde(skip))] diff --git a/sqlx-postgres/src/connection/describe.rs b/sqlx-postgres/src/connection/describe.rs index 0334357a6c..50bda84762 100644 --- a/sqlx-postgres/src/connection/describe.rs +++ b/sqlx-postgres/src/connection/describe.rs @@ -1,4 +1,4 @@ -use crate::connection::TableColumns; +use std::collections::btree_map; use crate::error::Error; use crate::ext::ustr::UStr; use crate::io::StatementId; @@ -15,6 +15,9 @@ use smallvec::SmallVec; use sqlx_core::column::{ColumnOrigin, TableColumn}; use sqlx_core::query_builder::QueryBuilder; use std::sync::Arc; +use sqlx_core::column::{ColumnOrigin, TableColumn}; +use sqlx_core::hash_map; +use crate::connection::TableColumns; /// Describes the type of the `pg_type.typtype` column /// @@ -124,6 +127,12 @@ impl PgConnection { let type_info = self .maybe_fetch_type_info_by_oid(field.data_type_id, fetch_type_info) .await?; + + let origin = if let (Some(relation_oid), Some(attribute_no)) = (field.relation_id, field.relation_attribute_no) { + self.maybe_fetch_column_origin(relation_oid, attribute_no, should_fetch).await? + } else { + ColumnOrigin::Expression + }; let origin = if let (Some(relation_oid), Some(attribute_no)) = (field.relation_id, field.relation_attribute_no) @@ -202,6 +211,54 @@ impl PgConnection { Ok(PgTypeInfo(PgType::DeclareWithOid(oid))) } } + + async fn maybe_fetch_column_origin( + &mut self, + relation_id: Oid, + attribute_no: i16, + should_fetch: bool, + ) -> Result { + let mut table_columns = match self.cache_table_to_column_names.entry(relation_id) { + hash_map::Entry::Occupied(table_columns) => { + table_columns.into_mut() + }, + hash_map::Entry::Vacant(vacant) => { + if !should_fetch { return Ok(ColumnOrigin::Unknown); } + + let table_name: String = query_scalar("SELECT $1::oid::regclass::text") + .bind(relation_id) + .fetch_one(&mut *self) + .await?; + + vacant.insert(TableColumns { + table_name: table_name.into(), + columns: Default::default(), + }) + } + }; + + let column_name = match table_columns.columns.entry(attribute_no) { + btree_map::Entry::Occupied(occupied) => Arc::clone(occupied.get()), + btree_map::Entry::Vacant(vacant) => { + if !should_fetch { return Ok(ColumnOrigin::Unknown); } + + let column_name: String = query_scalar( + "SELECT attname FROM pg_attribute WHERE attrelid = $1 AND attnum = $2" + ) + .bind(relation_id) + .bind(attribute_no) + .fetch_one(&mut *self) + .await?; + + Arc::clone(vacant.insert(column_name.into())) + } + }; + + Ok(ColumnOrigin::Table(TableColumn { + table: table_columns.table_name.clone(), + name: column_name + })) + } async fn maybe_fetch_column_origin( &mut self, diff --git a/sqlx-postgres/src/connection/mod.rs b/sqlx-postgres/src/connection/mod.rs index 74398d6a8b..dded00c4a5 100644 --- a/sqlx-postgres/src/connection/mod.rs +++ b/sqlx-postgres/src/connection/mod.rs @@ -1,4 +1,3 @@ -use std::borrow::Cow; use std::collections::BTreeMap; use std::fmt::{self, Debug, Formatter}; use std::sync::Arc; diff --git a/sqlx-sqlite/src/column.rs b/sqlx-sqlite/src/column.rs index d319bd46a8..390f3687fb 100644 --- a/sqlx-sqlite/src/column.rs +++ b/sqlx-sqlite/src/column.rs @@ -11,7 +11,7 @@ pub struct SqliteColumn { pub(crate) type_info: SqliteTypeInfo, #[cfg_attr(feature = "offline", serde(default))] - pub(crate) origin: ColumnOrigin, + pub(crate) origin: ColumnOrigin } impl Column for SqliteColumn { diff --git a/sqlx-sqlite/src/connection/describe.rs b/sqlx-sqlite/src/connection/describe.rs index 6db81374aa..b75cf98e56 100644 --- a/sqlx-sqlite/src/connection/describe.rs +++ b/sqlx-sqlite/src/connection/describe.rs @@ -49,6 +49,8 @@ pub(crate) fn describe(conn: &mut ConnectionState, query: &str) -> Result ColumnOrigin { - if let Some((table, name)) = self - .column_table_name(index) - .zip(self.column_origin_name(index)) + if let Some((table, name)) = + self.column_table_name(index).zip(self.column_origin_name(index)) { let table: Arc = self .column_db_name(index) @@ -126,20 +125,20 @@ impl StatementHandle { // TODO: check that SQLite returns the names properly quoted if necessary |db| format!("{db}.{table}").into(), ); - + ColumnOrigin::Table(TableColumn { table, - name: name.into(), + name: name.into() }) } else { ColumnOrigin::Expression } } - + fn column_db_name(&self, index: usize) -> Option<&str> { unsafe { let db_name = sqlite3_column_database_name(self.0.as_ptr(), check_col_idx!(index)); - + if !db_name.is_null() { Some(from_utf8_unchecked(CStr::from_ptr(db_name).to_bytes())) } else { @@ -171,7 +170,7 @@ impl StatementHandle { } } } - + pub(crate) fn column_type_info(&self, index: usize) -> SqliteTypeInfo { SqliteTypeInfo(DataType::from_code(self.column_type(index))) } From c0ed389f2fa69bc0621d5d84c7c05216a4101485 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Wed, 18 Sep 2024 18:17:43 -0700 Subject: [PATCH 06/78] feat(macros): implement `type_override` and `column_override` from `sqlx.toml` --- sqlx-macros-core/src/query/output.rs | 133 +++++++++------------------ 1 file changed, 41 insertions(+), 92 deletions(-) diff --git a/sqlx-macros-core/src/query/output.rs b/sqlx-macros-core/src/query/output.rs index 987dcaa3cb..09c674ff04 100644 --- a/sqlx-macros-core/src/query/output.rs +++ b/sqlx-macros-core/src/query/output.rs @@ -15,6 +15,8 @@ use sqlx_core::type_info::TypeInfo; use std::fmt::{self, Display, Formatter}; use syn::parse::{Parse, ParseStream}; use syn::Token; +use sqlx_core::config::Config; +use sqlx_core::type_info::TypeInfo; pub struct RustColumn { pub(super) ident: Ident, @@ -242,105 +244,52 @@ pub fn quote_query_scalar( }) } -fn get_column_type( - config: &Config, - warnings: &mut Warnings, - i: usize, - column: &DB::Column, -) -> TokenStream { +fn get_column_type(i: usize, column: &DB::Column) -> TokenStream { if let ColumnOrigin::Table(origin) = column.origin() { - if let Some(column_override) = config.macros.column_override(&origin.table, &origin.name) { + if let Some(column_override) = Config::from_crate() + .macros + .column_override(&origin.table, &origin.name) + { return column_override.parse().unwrap(); } } - + let type_info = column.type_info(); - if let Some(type_override) = config.macros.type_override(type_info.name()) { - return type_override.parse().unwrap(); + if let Some(type_override) = Config::from_crate() + .macros + .type_override(type_info.name()) + { + return type_override.parse().unwrap(); } - - let err = match ::return_type_for_id( - type_info, - &config.macros.preferred_crates, - ) { - Ok(t) => return t.parse().unwrap(), - Err(e) => e, - }; - - let message = match err { - type_checking::Error::NoMappingFound => { - if let Some(feature_gate) = ::get_feature_gate(type_info) { - format!( - "SQLx feature `{feat}` required for type {ty} of {col}", - ty = &type_info, - feat = feature_gate, - col = DisplayColumn { - idx: i, - name: column.name() - } - ) - } else { - format!( - "no built-in mapping found for type {ty} of {col}; \ - a type override may be required, see documentation for details", - ty = type_info, - col = DisplayColumn { - idx: i, - name: column.name() - } - ) - } - } - type_checking::Error::DateTimeCrateFeatureNotEnabled => { - let feature_gate = config - .macros - .preferred_crates - .date_time - .crate_name() - .expect("BUG: got feature-not-enabled error for DateTimeCrate::Inferred"); - - format!( - "SQLx feature `{feat}` required for type {ty} of {col} \ - (configured by `macros.preferred-crates.date-time` in sqlx.toml)", - ty = &type_info, - feat = feature_gate, - col = DisplayColumn { - idx: i, - name: column.name() - } - ) - } - type_checking::Error::NumericCrateFeatureNotEnabled => { - let feature_gate = config - .macros - .preferred_crates - .numeric - .crate_name() - .expect("BUG: got feature-not-enabled error for NumericCrate::Inferred"); - - format!( - "SQLx feature `{feat}` required for type {ty} of {col} \ - (configured by `macros.preferred-crates.numeric` in sqlx.toml)", - ty = &type_info, - feat = feature_gate, - col = DisplayColumn { - idx: i, - name: column.name() - } - ) - } - type_checking::Error::AmbiguousDateTimeType { fallback } => { - warnings.ambiguous_datetime = true; - return fallback.parse().unwrap(); - } - type_checking::Error::AmbiguousNumericType { fallback } => { - warnings.ambiguous_numeric = true; - return fallback.parse().unwrap(); - } - }; - - syn::Error::new(Span::call_site(), message).to_compile_error() + + ::return_type_for_id(type_info).map_or_else( + || { + let message = + if let Some(feature_gate) = ::get_feature_gate(type_info) { + format!( + "optional sqlx feature `{feat}` required for type {ty} of {col}", + ty = &type_info, + feat = feature_gate, + col = DisplayColumn { + idx: i, + name: column.name() + } + ) + } else { + format!( + "unsupported type {ty} of {col}", + ty = type_info, + col = DisplayColumn { + idx: i, + name: column.name() + } + ) + }; + syn::Error::new(Span::call_site(), message).to_compile_error() + }, + |t| t.parse().unwrap(), + ) } impl ColumnDecl { From ff3b532b614d3da937019a3261fa39f341c932a6 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Thu, 19 Sep 2024 19:23:03 -0700 Subject: [PATCH 07/78] refactor(sqlx.toml): make all keys kebab-case, create `macros.preferred-crates` --- sqlx-core/src/config/common.rs | 14 ++- sqlx-core/src/config/macros.rs | 179 +++++++++++++++++++++------- sqlx-core/src/config/migrate.rs | 47 +++++--- sqlx-core/src/config/mod.rs | 6 +- sqlx-core/src/config/reference.toml | 42 ++++--- sqlx-core/src/config/tests.rs | 10 +- 6 files changed, 206 insertions(+), 92 deletions(-) diff --git a/sqlx-core/src/config/common.rs b/sqlx-core/src/config/common.rs index 1468f24abd..c09ed80d7f 100644 --- a/sqlx-core/src/config/common.rs +++ b/sqlx-core/src/config/common.rs @@ -1,6 +1,10 @@ /// Configuration shared by multiple components. #[derive(Debug, Default)] -#[cfg_attr(feature = "sqlx-toml", derive(serde::Deserialize))] +#[cfg_attr( + feature = "sqlx-toml", + derive(serde::Deserialize), + serde(default, rename_all = "kebab-case") +)] pub struct Config { /// Override the database URL environment variable. /// @@ -17,14 +21,14 @@ pub struct Config { /// /// #### `foo/sqlx.toml` /// ```toml - /// [macros] - /// database_url_var = "FOO_DATABASE_URL" + /// [common] + /// database-url-var = "FOO_DATABASE_URL" /// ``` /// /// #### `bar/sqlx.toml` /// ```toml - /// [macros] - /// database_url_var = "BAR_DATABASE_URL" + /// [common] + /// database-url-var = "BAR_DATABASE_URL" /// ``` /// /// #### `.env` diff --git a/sqlx-core/src/config/macros.rs b/sqlx-core/src/config/macros.rs index 142f059da4..9f4cf4524f 100644 --- a/sqlx-core/src/config/macros.rs +++ b/sqlx-core/src/config/macros.rs @@ -2,33 +2,16 @@ use std::collections::BTreeMap; /// Configuration for the `query!()` family of macros. #[derive(Debug, Default)] -#[cfg_attr(feature = "sqlx-toml", derive(serde::Deserialize), serde(default))] +#[cfg_attr( + feature = "sqlx-toml", + derive(serde::Deserialize), + serde(default, rename_all = "kebab-case") +)] pub struct Config { - /// Specify the crate to use for mapping date/time types to Rust. - /// - /// The default behavior is to use whatever crate is enabled, - /// [`chrono`] or [`time`] (the latter takes precedent). - /// - /// [`chrono`]: crate::types::chrono - /// [`time`]: crate::types::time - /// - /// Example: Always Use Chrono - /// ------- - /// Thanks to Cargo's [feature unification], a crate in the dependency graph may enable - /// the `time` feature of SQLx which will force it on for all crates using SQLx, - /// which will result in problems if your crate wants to use types from [`chrono`]. - /// - /// You can use the type override syntax (see `sqlx::query!` for details), - /// or you can force an override globally by setting this option. - /// - /// #### `sqlx.toml` - /// ```toml - /// [macros] - /// datetime_crate = "chrono" - /// ``` - /// - /// [feature unification]: https://doc.rust-lang.org/cargo/reference/features.html#feature-unification - pub datetime_crate: DateTimeCrate, + /// Specify which crates' types to use when types from multiple crates apply. + /// + /// See [`PreferredCrates`] for details. + pub preferred_crates: PreferredCrates, /// Specify global overrides for mapping SQL type names to Rust type names. /// @@ -78,7 +61,7 @@ pub struct Config { /// /// #### `sqlx.toml` /// ```toml - /// [macros.type_overrides] + /// [macros.type-overrides] /// # Override a built-in type /// 'uuid' = "crate::types::MyUuid" /// @@ -115,7 +98,7 @@ pub struct Config { /// /// #### `sqlx.toml` /// ```toml - /// [macros.type_overrides] + /// [macros.type-overrides] /// # Map SQL type `foo` to `crate::types::Foo` /// 'foo' = "crate::types::Foo" /// ``` @@ -125,7 +108,7 @@ pub struct Config { /// (See `Note` section above for details.) /// /// ```toml - /// [macros.type_overrides] + /// [macros.type-overrides] /// # Map SQL type `foo.foo` to `crate::types::Foo` /// 'foo.foo' = "crate::types::Foo" /// ``` @@ -136,7 +119,7 @@ pub struct Config { /// it must be wrapped in quotes _twice_ for SQLx to know the difference: /// /// ```toml - /// [macros.type_overrides] + /// [macros.type-overrides] /// # `"Foo"` in SQLx /// '"Foo"' = "crate::types::Foo" /// # **NOT** `"Foo"` in SQLx (parses as just `Foo`) @@ -151,7 +134,7 @@ pub struct Config { /// (See `Note` section above for details.) pub type_overrides: BTreeMap, - /// Specify per-column overrides for mapping SQL types to Rust types. + /// Specify per-table and per-column overrides for mapping SQL types to Rust types. /// /// Default type mappings are defined by the database driver. /// Refer to the `sqlx::types` module for details. @@ -206,7 +189,7 @@ pub struct Config { /// /// #### `sqlx.toml` /// ```toml - /// [macros.column_overrides.'foo'] + /// [macros.table-overrides.'foo'] /// # Map column `bar` of table `foo` to Rust type `crate::types::Foo`: /// 'bar' = "crate::types::Bar" /// @@ -218,23 +201,83 @@ pub struct Config { /// # "Bar" = "crate::types::Bar" /// /// # Table name may be quoted (note the wrapping single-quotes) - /// [macros.column_overrides.'"Foo"'] + /// [macros.table-overrides.'"Foo"'] /// 'bar' = "crate::types::Bar" /// '"Bar"' = "crate::types::Bar" /// /// # Table name may also be schema-qualified. /// # Note how the dot is inside the quotes. - /// [macros.column_overrides.'my_schema.my_table'] + /// [macros.table-overrides.'my_schema.my_table'] /// 'my_column' = "crate::types::MyType" /// /// # Quoted schema, table, and column names - /// [macros.column_overrides.'"My Schema"."My Table"'] + /// [macros.table-overrides.'"My Schema"."My Table"'] /// '"My Column"' = "crate::types::MyType" /// ``` - pub column_overrides: BTreeMap>, + pub table_overrides: BTreeMap>, +} + +#[derive(Debug, Default)] +#[cfg_attr( + feature = "sqlx-toml", + derive(serde::Deserialize), + serde(rename_all = "kebab-case") +)] +pub struct PreferredCrates { + /// Specify the crate to use for mapping date/time types to Rust. + /// + /// The default behavior is to use whatever crate is enabled, + /// [`chrono`] or [`time`] (the latter takes precedent). + /// + /// [`chrono`]: crate::types::chrono + /// [`time`]: crate::types::time + /// + /// Example: Always Use Chrono + /// ------- + /// Thanks to Cargo's [feature unification], a crate in the dependency graph may enable + /// the `time` feature of SQLx which will force it on for all crates using SQLx, + /// which will result in problems if your crate wants to use types from [`chrono`]. + /// + /// You can use the type override syntax (see `sqlx::query!` for details), + /// or you can force an override globally by setting this option. + /// + /// #### `sqlx.toml` + /// ```toml + /// [macros.preferred-crates] + /// date-time = "chrono" + /// ``` + /// + /// [feature unification]: https://doc.rust-lang.org/cargo/reference/features.html#feature-unification + pub date_time: DateTimeCrate, + + /// Specify the crate to use for mapping `NUMERIC` types to Rust. + /// + /// The default behavior is to use whatever crate is enabled, + /// [`bigdecimal`] or [`rust_decimal`] (the latter takes precedent). + /// + /// [`bigdecimal`]: crate::types::bigdecimal + /// [`rust_decimal`]: crate::types::rust_decimal + /// + /// Example: Always Use `bigdecimal` + /// ------- + /// Thanks to Cargo's [feature unification], a crate in the dependency graph may enable + /// the `rust_decimal` feature of SQLx which will force it on for all crates using SQLx, + /// which will result in problems if your crate wants to use types from [`bigdecimal`]. + /// + /// You can use the type override syntax (see `sqlx::query!` for details), + /// or you can force an override globally by setting this option. + /// + /// #### `sqlx.toml` + /// ```toml + /// [macros.preferred-crates] + /// numeric = "bigdecimal" + /// ``` + /// + /// [feature unification]: https://doc.rust-lang.org/cargo/reference/features.html#feature-unification + pub numeric: NumericCrate, } -/// The crate to use for mapping date/time types to Rust. +/// The preferred crate to use for mapping date/time types to Rust. #[derive(Debug, Default, PartialEq, Eq)] #[cfg_attr( feature = "sqlx-toml", @@ -249,33 +292,63 @@ pub enum DateTimeCrate { /// Always use types from [`chrono`][crate::types::chrono]. /// /// ```toml - /// [macros] - /// datetime_crate = "chrono" + /// [macros.preferred-crates] + /// date-time = "chrono" /// ``` Chrono, /// Always use types from [`time`][crate::types::time]. /// /// ```toml - /// [macros] - /// datetime_crate = "time" + /// [macros.preferred-crates] + /// date-time = "time" /// ``` Time, } +/// The preferred crate to use for mapping `NUMERIC` types to Rust. +#[derive(Debug, Default, PartialEq, Eq)] +#[cfg_attr( + feature = "sqlx-toml", + derive(serde::Deserialize), + serde(rename_all = "snake_case") +)] +pub enum NumericCrate { + /// Use whichever crate is enabled (`rust_decimal` then `bigdecimal`). + #[default] + Inferred, + + /// Always use types from [`bigdecimal`][crate::types::bigdecimal]. + /// + /// ```toml + /// [macros.preferred-crates] + /// numeric = "bigdecimal" + /// ``` + #[cfg_attr(feature = "sqlx-toml", serde(rename = "bigdecimal"))] + BigDecimal, + + /// Always use types from [`rust_decimal`][crate::types::rust_decimal]. + /// + /// ```toml + /// [macros.preferred-crates] + /// numeric = "rust_decimal" + /// ``` + RustDecimal, +} + /// A SQL type name; may optionally be schema-qualified. /// -/// See [`macros.type_overrides`][Config::type_overrides] for usages. +/// See [`macros.type-overrides`][Config::type_overrides] for usages. pub type SqlType = Box; /// A SQL table name; may optionally be schema-qualified. /// -/// See [`macros.column_overrides`][Config::column_overrides] for usages. +/// See [`macros.table-overrides`][Config::table_overrides] for usages. pub type TableName = Box; /// A column in a SQL table. /// -/// See [`macros.column_overrides`][Config::column_overrides] for usages. +/// See [`macros.table-overrides`][Config::table_overrides] for usages. pub type ColumnName = Box; /// A Rust type name or path. @@ -292,9 +365,25 @@ impl Config { /// Get the override for a given column and table name (optionally schema-qualified). pub fn column_override(&self, table: &str, column: &str) -> Option<&str> { - self.column_overrides + self.table_overrides .get(table) .and_then(|by_column| by_column.get(column)) .map(|s| &**s) } } + +impl DateTimeCrate { + /// Returns `self == Self::Inferred` + #[inline(always)] + pub fn is_inferred(&self) -> bool { + *self == Self::Inferred + } +} + +impl NumericCrate { + /// Returns `self == Self::Inferred` + #[inline(always)] + pub fn is_inferred(&self) -> bool { + *self == Self::Inferred + } +} \ No newline at end of file diff --git a/sqlx-core/src/config/migrate.rs b/sqlx-core/src/config/migrate.rs index efc03a0155..d0e55b35d8 100644 --- a/sqlx-core/src/config/migrate.rs +++ b/sqlx-core/src/config/migrate.rs @@ -13,7 +13,11 @@ use std::collections::BTreeSet; /// /// Be sure you know what you are doing and that you read all relevant documentation _thoroughly_. #[derive(Debug, Default)] -#[cfg_attr(feature = "sqlx-toml", derive(serde::Deserialize), serde(default))] +#[cfg_attr( + feature = "sqlx-toml", + derive(serde::Deserialize), + serde(default, rename_all = "kebab-case") +)] pub struct Config { /// Override the name of the table used to track executed migrations. /// @@ -35,7 +39,7 @@ pub struct Config { /// ```toml /// [migrate] /// # Put `_sqlx_migrations` in schema `foo` - /// table_name = "foo._sqlx_migrations" + /// table-name = "foo._sqlx_migrations" /// ``` pub table_name: Option>, @@ -63,7 +67,7 @@ pub struct Config { /// `sqlx.toml`: /// ```toml /// [migrate] - /// ignored_chars = ["\r"] + /// ignored-chars = ["\r"] /// ``` /// /// For projects using Git, this can also be addressed using [`.gitattributes`]: @@ -91,33 +95,44 @@ pub struct Config { /// ```toml /// [migrate] /// # Ignore common whitespace characters when hashing - /// ignored_chars = [" ", "\t", "\r", "\n"] # Space, tab, CR, LF + /// ignored-chars = [" ", "\t", "\r", "\n"] # Space, tab, CR, LF /// ``` // Likely lower overhead for small sets than `HashSet`. pub ignored_chars: BTreeSet, - /// Specify the default type of migration that `sqlx migrate create` should create by default. + /// Specify default options for new migrations created with `sqlx migrate add`. + pub defaults: MigrationDefaults, +} + +#[derive(Debug, Default)] +#[cfg_attr( + feature = "sqlx-toml", + derive(serde::Deserialize), + serde(default, rename_all = "kebab-case") +)] +pub struct MigrationDefaults { + /// Specify the default type of migration that `sqlx migrate add` should create by default. /// /// ### Example: Use Reversible Migrations by Default /// `sqlx.toml`: /// ```toml - /// [migrate] - /// default_type = "reversible" + /// [migrate.defaults] + /// migration-type = "reversible" /// ``` - pub default_type: DefaultMigrationType, + pub migration_type: DefaultMigrationType, - /// Specify the default scheme that `sqlx migrate create` should use for version integers. + /// Specify the default scheme that `sqlx migrate add` should use for version integers. /// /// ### Example: Use Sequential Versioning by Default /// `sqlx.toml`: /// ```toml - /// [migrate] - /// default_versioning = "sequential" + /// [migrate.defaults] + /// migration-versioning = "sequential" /// ``` - pub default_versioning: DefaultVersioning, + pub migration_versioning: DefaultVersioning, } -/// The default type of migration that `sqlx migrate create` should create by default. +/// The default type of migration that `sqlx migrate add` should create by default. #[derive(Debug, Default, PartialEq, Eq)] #[cfg_attr( feature = "sqlx-toml", @@ -130,14 +145,14 @@ pub enum DefaultMigrationType { #[default] Inferred, - /// Create a non-reversible migration (`_.sql`). + /// Create non-reversible migrations (`_.sql`) by default. Simple, - /// Create a reversible migration (`_.up.sql` and `[...].down.sql`). + /// Create reversible migrations (`_.up.sql` and `[...].down.sql`) by default. Reversible, } -/// The default scheme that `sqlx migrate create` should use for version integers. +/// The default scheme that `sqlx migrate add` should use for version integers. #[derive(Debug, Default, PartialEq, Eq)] #[cfg_attr( feature = "sqlx-toml", diff --git a/sqlx-core/src/config/mod.rs b/sqlx-core/src/config/mod.rs index 3bbde5c2f1..696752a51b 100644 --- a/sqlx-core/src/config/mod.rs +++ b/sqlx-core/src/config/mod.rs @@ -45,7 +45,11 @@ mod tests; /// The parsed structure of a `sqlx.toml` file. #[derive(Debug, Default)] -#[cfg_attr(feature = "sqlx-toml", derive(serde::Deserialize))] +#[cfg_attr( + feature = "sqlx-toml", + derive(serde::Deserialize), + serde(default, rename_all = "kebab-case") +)] pub struct Config { /// Configuration shared by multiple components. /// diff --git a/sqlx-core/src/config/reference.toml b/sqlx-core/src/config/reference.toml index 6d52f615eb..e042824c72 100644 --- a/sqlx-core/src/config/reference.toml +++ b/sqlx-core/src/config/reference.toml @@ -13,20 +13,24 @@ # This is used by both the macros and `sqlx-cli`. # # If not specified, defaults to `DATABASE_URL` -database_url_var = "FOO_DATABASE_URL" +database-url-var = "FOO_DATABASE_URL" ############################################################################################### # Configuration for the `query!()` family of macros. [macros] + +[macros.preferred-crates] # Force the macros to use the `chrono` crate for date/time types, even if `time` is enabled. # # Defaults to "inferred": use whichever crate is enabled (`time` takes precedence over `chrono`). -datetime_crate = "chrono" +date-time = "chrono" # Or, ensure the macros always prefer `time` # in case new date/time crates are added in the future: -# datetime_crate = "time" +# date-time = "time" + + # Set global overrides for mapping SQL types to Rust types. # @@ -38,7 +42,7 @@ datetime_crate = "chrono" # ### Note: Orthogonal to Nullability # These overrides do not affect whether `query!()` decides to wrap a column in `Option<_>` # or not. They only override the inner type used. -[macros.type_overrides] +[macros.type-overrides] # Override a built-in type (map all `UUID` columns to `crate::types::MyUuid`) 'uuid' = "crate::types::MyUuid" @@ -67,7 +71,7 @@ datetime_crate = "chrono" # Quoted schema and type name '"Foo"."Bar"' = "crate::schema::foo::Bar" -# Set per-column overrides for mapping SQL types to Rust types. +# Set per-table and per-column overrides for mapping SQL types to Rust types. # # Note: table name is required in the header. # @@ -76,7 +80,7 @@ datetime_crate = "chrono" # ### Note: Orthogonal to Nullability # These overrides do not affect whether `query!()` decides to wrap a column in `Option<_>` # or not. They only override the inner type used. -[macros.column_overrides.'foo'] +[macros.table-overrides.'foo'] # Map column `bar` of table `foo` to Rust type `crate::types::Foo`: 'bar' = "crate::types::Bar" @@ -88,17 +92,17 @@ datetime_crate = "chrono" # "Bar" = "crate::types::Bar" # Table name may be quoted (note the wrapping single-quotes) -[macros.column_overrides.'"Foo"'] +[macros.table-overrides.'"Foo"'] 'bar' = "crate::types::Bar" '"Bar"' = "crate::types::Bar" # Table name may also be schema-qualified. # Note how the dot is inside the quotes. -[macros.column_overrides.'my_schema.my_table'] +[macros.table-overrides.'my_schema.my_table'] 'my_column' = "crate::types::MyType" # Quoted schema, table, and column names -[macros.column_overrides.'"My Schema"."My Table"'] +[macros.table-overrides.'"My Schema"."My Table"'] '"My Column"' = "crate::types::MyType" ############################################################################################### @@ -130,12 +134,12 @@ datetime_crate = "chrono" # You should create the new table as a copy of the existing migrations table (with contents!), # and be sure all instances of your application have been migrated to the new # table before deleting the old one. -table_name = "foo._sqlx_migrations" +table-name = "foo._sqlx_migrations" # Override the directory used for migrations files. # # Relative to the crate root for `sqlx::migrate!()`, or the current directory for `sqlx-cli`. -migrations_dir = "foo/migrations" +migrations-dir = "foo/migrations" # Specify characters that should be ignored when hashing migrations. # @@ -148,32 +152,34 @@ migrations_dir = "foo/migrations" # change the output of the hash. # # This may require manual rectification for deployed databases. -# ignored_chars = [] +# ignored-chars = [] # Ignore Carriage Returns (`` | `\r`) # Note that the TOML format requires double-quoted strings to process escapes. -# ignored_chars = ["\r"] +# ignored-chars = ["\r"] # Ignore common whitespace characters (beware syntatically significant whitespace!) # Space, tab, CR, LF, zero-width non-breaking space (U+FEFF) # # U+FEFF is added by some editors as a magic number at the beginning of a text file indicating it is UTF-8 encoded, # where it is known as a byte-order mark (BOM): https://en.wikipedia.org/wiki/Byte_order_mark -ignored_chars = [" ", "\t", "\r", "\n", "\uFEFF"] +ignored-chars = [" ", "\t", "\r", "\n", "\uFEFF"] +# Set default options for new migrations. +[migrate.defaults] # Specify reversible migrations by default (for `sqlx migrate create`). # # Defaults to "inferred": uses the type of the last migration, or "simple" otherwise. -default_type = "reversible" +migration-type = "reversible" # Specify simple (non-reversible) migrations by default. -# default_type = "simple" +# migration-type = "simple" # Specify sequential versioning by default (for `sqlx migrate create`). # # Defaults to "inferred": guesses the versioning scheme from the latest migrations, # or "timestamp" otherwise. -default_versioning = "sequential" +migration-versioning = "sequential" # Specify timestamp versioning by default. -# default_versioning = "timestamp" +# migration-versioning = "timestamp" diff --git a/sqlx-core/src/config/tests.rs b/sqlx-core/src/config/tests.rs index e5033bb459..6c2883d58b 100644 --- a/sqlx-core/src/config/tests.rs +++ b/sqlx-core/src/config/tests.rs @@ -8,11 +8,7 @@ fn reference_parses_as_config() { .unwrap_or_else(|e| panic!("expected reference.toml to parse as Config: {e}")); assert_common_config(&config.common); - - #[cfg(feature = "config-macros")] assert_macros_config(&config.macros); - - #[cfg(feature = "config-migrate")] assert_migrate_config(&config.migrate); } @@ -23,7 +19,7 @@ fn assert_common_config(config: &config::common::Config) { fn assert_macros_config(config: &config::macros::Config) { use config::macros::*; - assert_eq!(config.datetime_crate, DateTimeCrate::Chrono); + assert_eq!(config.preferred_crates.date_time, DateTimeCrate::Chrono); // Type overrides // Don't need to cover everything, just some important canaries. @@ -83,6 +79,6 @@ fn assert_migrate_config(config: &config::migrate::Config) { assert_eq!(config.ignored_chars, ignored_chars); - assert_eq!(config.default_type, DefaultMigrationType::Reversible); - assert_eq!(config.default_versioning, DefaultVersioning::Sequential); + assert_eq!(config.defaults.migration_type, DefaultMigrationType::Reversible); + assert_eq!(config.defaults.migration_versioning, DefaultVersioning::Sequential); } From 0791e060ae2022647967070cf175640975c63942 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Thu, 19 Sep 2024 22:54:48 -0700 Subject: [PATCH 08/78] feat: make macros aware of `macros.preferred-crates` --- sqlx-core/src/column.rs | 22 ++-- sqlx-core/src/config/common.rs | 4 +- sqlx-core/src/config/macros.rs | 39 ++++++- sqlx-core/src/config/migrate.rs | 4 +- sqlx-core/src/config/mod.rs | 23 ++-- sqlx-core/src/config/reference.toml | 11 +- sqlx-core/src/config/tests.rs | 13 ++- sqlx-core/src/type_checking.rs | 128 +++++---------------- sqlx-macros-core/src/query/args.rs | 19 +--- sqlx-macros-core/src/query/mod.rs | 41 +++---- sqlx-macros-core/src/query/output.rs | 133 ++++++++++++++-------- sqlx-mysql/src/connection/executor.rs | 6 +- sqlx-mysql/src/protocol/text/column.rs | 2 +- sqlx-postgres/src/column.rs | 3 +- sqlx-postgres/src/connection/describe.rs | 103 +++++++++-------- sqlx-postgres/src/connection/establish.rs | 3 +- sqlx-sqlite/src/column.rs | 2 +- sqlx-sqlite/src/connection/describe.rs | 2 +- sqlx-sqlite/src/statement/handle.rs | 33 +++--- src/lib.rs | 24 ++++ 20 files changed, 320 insertions(+), 295 deletions(-) diff --git a/sqlx-core/src/column.rs b/sqlx-core/src/column.rs index 7483375765..fddc048c4b 100644 --- a/sqlx-core/src/column.rs +++ b/sqlx-core/src/column.rs @@ -23,15 +23,17 @@ pub trait Column: 'static + Send + Sync + Debug { fn type_info(&self) -> &::TypeInfo; /// If this column comes from a table, return the table and original column name. - /// + /// /// Returns [`ColumnOrigin::Expression`] if the column is the result of an expression /// or else the source table could not be determined. - /// + /// /// Returns [`ColumnOrigin::Unknown`] if the database driver does not have that information, /// or has not overridden this method. - // This method returns an owned value instead of a reference, + // This method returns an owned value instead of a reference, // to give the implementor more flexibility. - fn origin(&self) -> ColumnOrigin { ColumnOrigin::Unknown } + fn origin(&self) -> ColumnOrigin { + ColumnOrigin::Unknown + } } /// A [`Column`] that originates from a table. @@ -44,20 +46,20 @@ pub struct TableColumn { pub name: Arc, } -/// The possible statuses for our knowledge of the origin of a [`Column`]. +/// The possible statuses for our knowledge of the origin of a [`Column`]. #[derive(Debug, Clone, Default)] #[cfg_attr(feature = "offline", derive(serde::Serialize, serde::Deserialize))] pub enum ColumnOrigin { - /// The column is known to originate from a table. - /// - /// Included is the table name and original column name. + /// The column is known to originate from a table. + /// + /// Included is the table name and original column name. Table(TableColumn), /// The column originates from an expression, or else its origin could not be determined. Expression, /// The database driver does not know the column origin at this time. - /// + /// /// This may happen if: - /// * The connection is in the middle of executing a query, + /// * The connection is in the middle of executing a query, /// and cannot query the catalog to fetch this information. /// * The connection does not have access to the database catalog. /// * The implementation of [`Column`] did not override [`Column::origin()`]. diff --git a/sqlx-core/src/config/common.rs b/sqlx-core/src/config/common.rs index c09ed80d7f..d2bf639e5f 100644 --- a/sqlx-core/src/config/common.rs +++ b/sqlx-core/src/config/common.rs @@ -44,6 +44,6 @@ pub struct Config { impl Config { pub fn database_url_var(&self) -> &str { - self.database_url_var.as_deref().unwrap_or("DATABASE_URL") + self.database_url_var.as_deref().unwrap_or("DATABASE_URL") } -} \ No newline at end of file +} diff --git a/sqlx-core/src/config/macros.rs b/sqlx-core/src/config/macros.rs index 9f4cf4524f..19e5f42fa0 100644 --- a/sqlx-core/src/config/macros.rs +++ b/sqlx-core/src/config/macros.rs @@ -3,13 +3,13 @@ use std::collections::BTreeMap; /// Configuration for the `query!()` family of macros. #[derive(Debug, Default)] #[cfg_attr( - feature = "sqlx-toml", - derive(serde::Deserialize), + feature = "sqlx-toml", + derive(serde::Deserialize), serde(default, rename_all = "kebab-case") )] pub struct Config { /// Specify which crates' types to use when types from multiple crates apply. - /// + /// /// See [`PreferredCrates`] for details. pub preferred_crates: PreferredCrates, @@ -18,6 +18,12 @@ pub struct Config { /// Default type mappings are defined by the database driver. /// Refer to the `sqlx::types` module for details. /// + /// ## Note: Case-Sensitive + /// Currently, the case of the type name MUST match the name SQLx knows it by. + /// Built-in types are spelled in all-uppercase to match SQL convention. + /// + /// However, user-created types in Postgres are all-lowercase unless quoted. + /// /// ## Note: Orthogonal to Nullability /// These overrides do not affect whether `query!()` decides to wrap a column in `Option<_>` /// or not. They only override the inner type used. @@ -63,7 +69,7 @@ pub struct Config { /// ```toml /// [macros.type-overrides] /// # Override a built-in type - /// 'uuid' = "crate::types::MyUuid" + /// 'UUID' = "crate::types::MyUuid" /// /// # Support an external or custom wrapper type (e.g. from the `isn` Postgres extension) /// # (NOTE: FOR DOCUMENTATION PURPOSES ONLY; THIS CRATE/TYPE DOES NOT EXIST AS OF WRITING) @@ -132,6 +138,8 @@ pub struct Config { /// ``` /// /// (See `Note` section above for details.) + // TODO: allow specifying different types for input vs output + // e.g. to accept `&[T]` on input but output `Vec` pub type_overrides: BTreeMap, /// Specify per-table and per-column overrides for mapping SQL types to Rust types. @@ -221,7 +229,7 @@ pub struct Config { #[cfg_attr( feature = "sqlx-toml", derive(serde::Deserialize), - serde(rename_all = "kebab-case") + serde(default, rename_all = "kebab-case") )] pub struct PreferredCrates { /// Specify the crate to use for mapping date/time types to Rust. @@ -360,6 +368,7 @@ pub type RustType = Box; impl Config { /// Get the override for a given type name (optionally schema-qualified). pub fn type_override(&self, type_name: &str) -> Option<&str> { + // TODO: make this case-insensitive self.type_overrides.get(type_name).map(|s| &**s) } @@ -378,6 +387,15 @@ impl DateTimeCrate { pub fn is_inferred(&self) -> bool { *self == Self::Inferred } + + #[inline(always)] + pub fn crate_name(&self) -> Option<&str> { + match self { + Self::Inferred => None, + Self::Chrono => Some("chrono"), + Self::Time => Some("time"), + } + } } impl NumericCrate { @@ -386,4 +404,13 @@ impl NumericCrate { pub fn is_inferred(&self) -> bool { *self == Self::Inferred } -} \ No newline at end of file + + #[inline(always)] + pub fn crate_name(&self) -> Option<&str> { + match self { + Self::Inferred => None, + Self::BigDecimal => Some("bigdecimal"), + Self::RustDecimal => Some("rust_decimal"), + } + } +} diff --git a/sqlx-core/src/config/migrate.rs b/sqlx-core/src/config/migrate.rs index d0e55b35d8..64529f9f02 100644 --- a/sqlx-core/src/config/migrate.rs +++ b/sqlx-core/src/config/migrate.rs @@ -14,8 +14,8 @@ use std::collections::BTreeSet; /// Be sure you know what you are doing and that you read all relevant documentation _thoroughly_. #[derive(Debug, Default)] #[cfg_attr( - feature = "sqlx-toml", - derive(serde::Deserialize), + feature = "sqlx-toml", + derive(serde::Deserialize), serde(default, rename_all = "kebab-case") )] pub struct Config { diff --git a/sqlx-core/src/config/mod.rs b/sqlx-core/src/config/mod.rs index 696752a51b..b3afd9ea1b 100644 --- a/sqlx-core/src/config/mod.rs +++ b/sqlx-core/src/config/mod.rs @@ -86,9 +86,7 @@ pub enum ConfigError { /// No configuration file was found. Not necessarily fatal. #[error("config file {path:?} not found")] - NotFound { - path: PathBuf, - }, + NotFound { path: PathBuf }, /// An I/O error occurred while attempting to read the config file at `path`. /// @@ -103,7 +101,7 @@ pub enum ConfigError { /// An error in the TOML was encountered while parsing the config file at `path`. /// /// The error gives line numbers and context when printed with `Display`/`ToString`. - /// + /// /// Only returned if the `sqlx-toml` feature is enabled. #[error("error parsing config file {path:?}")] Parse { @@ -115,14 +113,12 @@ pub enum ConfigError { /// A `sqlx.toml` file was found or specified, but the `sqlx-toml` feature is not enabled. #[error("SQLx found config file at {path:?} but the `sqlx-toml` feature was not enabled")] - ParseDisabled { - path: PathBuf - }, + ParseDisabled { path: PathBuf }, } impl ConfigError { /// Create a [`ConfigError`] from a [`std::io::Error`]. - /// + /// /// Maps to either `NotFound` or `Io`. pub fn from_io(path: PathBuf, error: io::Error) -> Self { if error.kind() == io::ErrorKind::NotFound { @@ -131,7 +127,7 @@ impl ConfigError { Self::Io { path, error } } } - + /// If this error means the file was not found, return the path that was attempted. pub fn not_found_path(&self) -> Option<&Path> { if let Self::NotFound { path } = self { @@ -227,15 +223,18 @@ impl Config { // Motivation: https://github.com/toml-rs/toml/issues/761 tracing::debug!("read config TOML from {path:?}:\n{toml_s}"); - toml::from_str(&toml_s).map_err(|error| ConfigError::Parse { path, error: Box::new(error) }) + toml::from_str(&toml_s).map_err(|error| ConfigError::Parse { + path, + error: Box::new(error), + }) } - + #[cfg(not(feature = "sqlx-toml"))] fn read_from(path: PathBuf) -> Result { match path.try_exists() { Ok(true) => Err(ConfigError::ParseDisabled { path }), Ok(false) => Err(ConfigError::NotFound { path }), - Err(e) => Err(ConfigError::from_io(path, e)) + Err(e) => Err(ConfigError::from_io(path, e)), } } } diff --git a/sqlx-core/src/config/reference.toml b/sqlx-core/src/config/reference.toml index e042824c72..77833fb5a8 100644 --- a/sqlx-core/src/config/reference.toml +++ b/sqlx-core/src/config/reference.toml @@ -30,7 +30,14 @@ date-time = "chrono" # in case new date/time crates are added in the future: # date-time = "time" +# Force the macros to use the `rust_decimal` crate for `NUMERIC`, even if `bigdecimal` is enabled. +# +# Defaults to "inferred": use whichever crate is enabled (`bigdecimal` takes precedence over `rust_decimal`). +numeric = "rust_decimal" +# Or, ensure the macros always prefer `bigdecimal` +# in case new decimal crates are added in the future: +# numeric = "bigdecimal" # Set global overrides for mapping SQL types to Rust types. # @@ -44,7 +51,9 @@ date-time = "chrono" # or not. They only override the inner type used. [macros.type-overrides] # Override a built-in type (map all `UUID` columns to `crate::types::MyUuid`) -'uuid' = "crate::types::MyUuid" +# Note: currently, the case of the type name MUST match. +# Built-in types are spelled in all-uppercase to match SQL convention. +'UUID' = "crate::types::MyUuid" # Support an external or custom wrapper type (e.g. from the `isn` Postgres extension) # (NOTE: FOR DOCUMENTATION PURPOSES ONLY; THIS CRATE/TYPE DOES NOT EXIST AS OF WRITING) diff --git a/sqlx-core/src/config/tests.rs b/sqlx-core/src/config/tests.rs index 6c2883d58b..0b0b590919 100644 --- a/sqlx-core/src/config/tests.rs +++ b/sqlx-core/src/config/tests.rs @@ -20,9 +20,12 @@ fn assert_macros_config(config: &config::macros::Config) { use config::macros::*; assert_eq!(config.preferred_crates.date_time, DateTimeCrate::Chrono); + assert_eq!(config.preferred_crates.numeric, NumericCrate::RustDecimal); // Type overrides // Don't need to cover everything, just some important canaries. + assert_eq!(config.type_override("UUID"), Some("crate::types::MyUuid")); + assert_eq!(config.type_override("foo"), Some("crate::types::Foo")); assert_eq!(config.type_override(r#""Bar""#), Some("crate::types::Bar"),); @@ -79,6 +82,12 @@ fn assert_migrate_config(config: &config::migrate::Config) { assert_eq!(config.ignored_chars, ignored_chars); - assert_eq!(config.defaults.migration_type, DefaultMigrationType::Reversible); - assert_eq!(config.defaults.migration_versioning, DefaultVersioning::Sequential); + assert_eq!( + config.defaults.migration_type, + DefaultMigrationType::Reversible + ); + assert_eq!( + config.defaults.migration_versioning, + DefaultVersioning::Sequential + ); } diff --git a/sqlx-core/src/type_checking.rs b/sqlx-core/src/type_checking.rs index a3ded72abb..c92a3816b2 100644 --- a/sqlx-core/src/type_checking.rs +++ b/sqlx-core/src/type_checking.rs @@ -60,10 +60,6 @@ pub enum Error { DateTimeCrateFeatureNotEnabled, #[error("Cargo feature for configured `macros.preferred-crates.numeric` not enabled")] NumericCrateFeatureNotEnabled, - #[error("multiple date-time types are possible; falling back to `{fallback}`")] - AmbiguousDateTimeType { fallback: &'static str }, - #[error("multiple numeric types are possible; falling back to `{fallback}`")] - AmbiguousNumericType { fallback: &'static str }, } /// An adapter for [`Value`] which attempts to decode the value and format it when printed using [`Debug`]. @@ -190,22 +186,6 @@ macro_rules! impl_type_checking { use $crate::config::macros::{DateTimeCrate, NumericCrate}; use $crate::type_checking::Error; - // Check non-special types - // --------------------- - $( - $(#[$meta])? - if <$ty as sqlx_core::types::Type<$database>>::type_info() == *info { - return Ok($crate::select_input_type!($ty $(, $input)?)); - } - )* - - $( - $(#[$meta])? - if <$ty as sqlx_core::types::Type<$database>>::compatible(info) { - return Ok($crate::select_input_type!($ty $(, $input)?)); - } - )* - // Check `macros.preferred-crates.date-time` // // Due to legacy reasons, `time` takes precedent over `chrono` if both are enabled. @@ -215,24 +195,12 @@ macro_rules! impl_type_checking { if matches!(preferred_crates.date_time, DateTimeCrate::Time | DateTimeCrate::Inferred) { $( if <$time_ty as sqlx_core::types::Type<$database>>::type_info() == *info { - if cfg!(feature = "chrono") { - return Err($crate::type_checking::Error::AmbiguousDateTimeType { - fallback: $crate::select_input_type!($time_ty $(, $time_input)?), - }); - } - return Ok($crate::select_input_type!($time_ty $(, $time_input)?)); } )* $( if <$time_ty as sqlx_core::types::Type<$database>>::compatible(info) { - if cfg!(feature = "chrono") { - return Err($crate::type_checking::Error::AmbiguousDateTimeType { - fallback: $crate::select_input_type!($time_ty $(, $time_input)?), - }); - } - return Ok($crate::select_input_type!($time_ty $(, $time_input)?)); } )* @@ -272,24 +240,12 @@ macro_rules! impl_type_checking { if matches!(preferred_crates.numeric, NumericCrate::BigDecimal | NumericCrate::Inferred) { $( if <$bigdecimal_ty as sqlx_core::types::Type<$database>>::type_info() == *info { - if cfg!(feature = "rust_decimal") { - return Err($crate::type_checking::Error::AmbiguousNumericType { - fallback: $crate::select_input_type!($bigdecimal_ty $(, $bigdecimal_input)?), - }); - } - return Ok($crate::select_input_type!($bigdecimal_ty $(, $bigdecimal_input)?)); } )* $( if <$bigdecimal_ty as sqlx_core::types::Type<$database>>::compatible(info) { - if cfg!(feature = "rust_decimal") { - return Err($crate::type_checking::Error::AmbiguousNumericType { - fallback: $crate::select_input_type!($bigdecimal_ty $(, $bigdecimal_input)?), - }); - } - return Ok($crate::select_input_type!($bigdecimal_ty $(, $bigdecimal_input)?)); } )* @@ -320,32 +276,32 @@ macro_rules! impl_type_checking { return Err(Error::NumericCrateFeatureNotEnabled); } - Err(Error::NoMappingFound) - } - - fn return_type_for_id( - info: &Self::TypeInfo, - preferred_crates: &$crate::config::macros::PreferredCrates, - ) -> Result<&'static str, $crate::type_checking::Error> { - use $crate::config::macros::{DateTimeCrate, NumericCrate}; - use $crate::type_checking::Error; - - // Check non-special types + // Check all other types // --------------------- $( $(#[$meta])? if <$ty as sqlx_core::types::Type<$database>>::type_info() == *info { - return Ok(stringify!($ty)); + return Ok($crate::select_input_type!($ty $(, $input)?)); } )* $( $(#[$meta])? if <$ty as sqlx_core::types::Type<$database>>::compatible(info) { - return Ok(stringify!($ty)); + return Ok($crate::select_input_type!($ty $(, $input)?)); } )* + Err(Error::NoMappingFound) + } + + fn return_type_for_id( + info: &Self::TypeInfo, + preferred_crates: &$crate::config::macros::PreferredCrates, + ) -> Result<&'static str, $crate::type_checking::Error> { + use $crate::config::macros::{DateTimeCrate, NumericCrate}; + use $crate::type_checking::Error; + // Check `macros.preferred-crates.date-time` // // Due to legacy reasons, `time` takes precedent over `chrono` if both are enabled. @@ -355,24 +311,12 @@ macro_rules! impl_type_checking { if matches!(preferred_crates.date_time, DateTimeCrate::Time | DateTimeCrate::Inferred) { $( if <$time_ty as sqlx_core::types::Type<$database>>::type_info() == *info { - if cfg!(feature = "chrono") { - return Err($crate::type_checking::Error::AmbiguousDateTimeType { - fallback: stringify!($time_ty), - }); - } - return Ok(stringify!($time_ty)); } )* $( if <$time_ty as sqlx_core::types::Type<$database>>::compatible(info) { - if cfg!(feature = "chrono") { - return Err($crate::type_checking::Error::AmbiguousDateTimeType { - fallback: stringify!($time_ty), - }); - } - return Ok(stringify!($time_ty)); } )* @@ -412,24 +356,12 @@ macro_rules! impl_type_checking { if matches!(preferred_crates.numeric, NumericCrate::BigDecimal | NumericCrate::Inferred) { $( if <$bigdecimal_ty as sqlx_core::types::Type<$database>>::type_info() == *info { - if cfg!(feature = "rust_decimal") { - return Err($crate::type_checking::Error::AmbiguousNumericType { - fallback: stringify!($bigdecimal_ty), - }); - } - return Ok(stringify!($bigdecimal_ty)); } )* $( if <$bigdecimal_ty as sqlx_core::types::Type<$database>>::compatible(info) { - if cfg!(feature = "rust_decimal") { - return Err($crate::type_checking::Error::AmbiguousNumericType { - fallback: stringify!($bigdecimal_ty), - }); - } - return Ok(stringify!($bigdecimal_ty)); } )* @@ -460,6 +392,22 @@ macro_rules! impl_type_checking { return Err(Error::NumericCrateFeatureNotEnabled); } + // Check all other types + // --------------------- + $( + $(#[$meta])? + if <$ty as sqlx_core::types::Type<$database>>::type_info() == *info { + return Ok(stringify!($ty)); + } + )* + + $( + $(#[$meta])? + if <$ty as sqlx_core::types::Type<$database>>::compatible(info) { + return Ok(stringify!($ty)); + } + )* + Err(Error::NoMappingFound) } @@ -490,24 +438,6 @@ macro_rules! impl_type_checking { )* } - #[cfg(feature = "bigdecimal")] - { - $( - if <$bigdecimal_ty as sqlx_core::types::Type<$database>>::compatible(&info) { - return $crate::type_checking::FmtValue::debug::<$bigdecimal_ty>(value); - } - )* - } - - #[cfg(feature = "rust_decimal")] - { - $( - if <$rust_decimal_ty as sqlx_core::types::Type<$database>>::compatible(&info) { - return $crate::type_checking::FmtValue::debug::<$rust_decimal_ty>(value); - } - )* - } - $( $(#[$meta])? if <$ty as sqlx_core::types::Type<$database>>::compatible(&info) { diff --git a/sqlx-macros-core/src/query/args.rs b/sqlx-macros-core/src/query/args.rs index 1b338efa3e..e546702e64 100644 --- a/sqlx-macros-core/src/query/args.rs +++ b/sqlx-macros-core/src/query/args.rs @@ -15,7 +15,6 @@ use syn::{Expr, ExprCast, ExprGroup, Type}; pub fn quote_args( input: &QueryMacroInput, config: &Config, - warnings: &mut Warnings, info: &Describe, ) -> crate::Result { let db_path = DB::db_path(); @@ -60,7 +59,7 @@ pub fn quote_args( return Ok(quote!()); } - let param_ty = get_param_type::(param_ty, config, warnings, i)?; + let param_ty = get_param_type::(param_ty, config, i)?; Ok(quote_spanned!(expr.span() => // this shouldn't actually run @@ -108,7 +107,6 @@ pub fn quote_args( fn get_param_type( param_ty: &DB::TypeInfo, config: &Config, - warnings: &mut Warnings, i: usize, ) -> crate::Result { if let Some(type_override) = config.macros.type_override(param_ty.name()) { @@ -129,10 +127,7 @@ fn get_param_type( "optional sqlx feature `{feature_gate}` required for type {param_ty} of param #{param_num}", ) } else { - format!( - "no built-in mapping for type {param_ty} of param #{param_num}; \ - a type override may be required, see documentation for details" - ) + format!("unsupported type {param_ty} for param #{param_num}") } } type_checking::Error::DateTimeCrateFeatureNotEnabled => { @@ -161,16 +156,6 @@ fn get_param_type( (configured by `macros.preferred-crates.numeric` in sqlx.toml)", ) } - - type_checking::Error::AmbiguousDateTimeType { fallback } => { - warnings.ambiguous_datetime = true; - return Ok(fallback.parse()?); - } - - type_checking::Error::AmbiguousNumericType { fallback } => { - warnings.ambiguous_numeric = true; - return Ok(fallback.parse()?); - } }; Err(message.into()) diff --git a/sqlx-macros-core/src/query/mod.rs b/sqlx-macros-core/src/query/mod.rs index 4f1d37ac52..198a3f236b 100644 --- a/sqlx-macros-core/src/query/mod.rs +++ b/sqlx-macros-core/src/query/mod.rs @@ -17,7 +17,6 @@ use crate::query::input::RecordType; use either::Either; use sqlx_core::config::Config; use url::Url; -use sqlx_core::config::Config; mod args; mod data; @@ -124,11 +123,9 @@ fn init_metadata(manifest_dir: &String) -> crate::Result { .or(offline) .map(|s| s.eq_ignore_ascii_case("true") || s == "1") .unwrap_or(false); - - let var_name = Config::from_crate() - .common - .database_url_var(); - + + let var_name = Config::from_crate().common.database_url_var(); + let database_url = env(var_name).ok(); let database_url = env(config.common.database_url_var()).ok().or(database_url); @@ -265,6 +262,8 @@ fn expand_with_data( where Describe: DescribeExt, { + let config = Config::from_crate(); + // validate at the minimum that our args match the query's input parameters let num_parameters = match data.describe.parameters() { Some(Either::Left(params)) => Some(params.len()), @@ -281,9 +280,7 @@ where } } - let mut warnings = Warnings::default(); - - let args_tokens = args::quote_args(&input, config, &mut warnings, &data.describe)?; + let args_tokens = args::quote_args(&input, config, &data.describe)?; let query_args = format_ident!("query_args"); @@ -302,7 +299,7 @@ where } else { match input.record_type { RecordType::Generated => { - let columns = output::columns_to_rust::(&data.describe, config, &mut warnings)?; + let columns = output::columns_to_rust::(&data.describe, config)?; let record_name: Type = syn::parse_str("Record").unwrap(); @@ -338,32 +335,28 @@ where record_tokens } RecordType::Given(ref out_ty) => { - let columns = output::columns_to_rust::(&data.describe, config, &mut warnings)?; + let columns = output::columns_to_rust::(&data.describe, config)?; output::quote_query_as::(&input, out_ty, &query_args, &columns) } - RecordType::Scalar => output::quote_query_scalar::( - &input, - config, - &mut warnings, - &query_args, - &data.describe, - )?, + RecordType::Scalar => { + output::quote_query_scalar::(&input, config, &query_args, &data.describe)? + } } }; - let mut warnings_out = TokenStream::new(); + let mut warnings = TokenStream::new(); - if warnings.ambiguous_datetime { + if config.macros.preferred_crates.date_time.is_inferred() { // Warns if the date-time crate is inferred but both `chrono` and `time` are enabled - warnings_out.extend(quote! { + warnings.extend(quote! { ::sqlx::warn_on_ambiguous_inferred_date_time_crate(); }); } - if warnings.ambiguous_numeric { + if config.macros.preferred_crates.numeric.is_inferred() { // Warns if the numeric crate is inferred but both `bigdecimal` and `rust_decimal` are enabled - warnings_out.extend(quote! { + warnings.extend(quote! { ::sqlx::warn_on_ambiguous_inferred_numeric_crate(); }); } @@ -374,7 +367,7 @@ where { use ::sqlx::Arguments as _; - #warnings_out + #warnings #args_tokens diff --git a/sqlx-macros-core/src/query/output.rs b/sqlx-macros-core/src/query/output.rs index 09c674ff04..1a145e3a75 100644 --- a/sqlx-macros-core/src/query/output.rs +++ b/sqlx-macros-core/src/query/output.rs @@ -7,7 +7,7 @@ use sqlx_core::describe::Describe; use crate::database::DatabaseExt; -use crate::query::{QueryMacroInput, Warnings}; +use crate::query::QueryMacroInput; use sqlx_core::config::Config; use sqlx_core::type_checking; use sqlx_core::type_checking::TypeChecking; @@ -15,8 +15,6 @@ use sqlx_core::type_info::TypeInfo; use std::fmt::{self, Display, Formatter}; use syn::parse::{Parse, ParseStream}; use syn::Token; -use sqlx_core::config::Config; -use sqlx_core::type_info::TypeInfo; pub struct RustColumn { pub(super) ident: Ident, @@ -84,17 +82,15 @@ impl Display for DisplayColumn<'_> { pub fn columns_to_rust( describe: &Describe, config: &Config, - warnings: &mut Warnings, ) -> crate::Result> { (0..describe.columns().len()) - .map(|i| column_to_rust(describe, config, warnings, i)) + .map(|i| column_to_rust(describe, config, i)) .collect::>>() } fn column_to_rust( describe: &Describe, config: &Config, - warnings: &mut Warnings, i: usize, ) -> crate::Result { let column = &describe.columns()[i]; @@ -120,7 +116,7 @@ fn column_to_rust( (ColumnTypeOverride::Wildcard, true) => ColumnType::OptWildcard, (ColumnTypeOverride::None, _) => { - let type_ = get_column_type::(config, warnings, i, column); + let type_ = get_column_type::(config, i, column); if !nullable { ColumnType::Exact(type_) } else { @@ -208,7 +204,6 @@ pub fn quote_query_as( pub fn quote_query_scalar( input: &QueryMacroInput, config: &Config, - warnings: &mut Warnings, bind_args: &Ident, describe: &Describe, ) -> crate::Result { @@ -223,10 +218,10 @@ pub fn quote_query_scalar( } // attempt to parse a column override, otherwise fall back to the inferred type of the column - let ty = if let Ok(rust_col) = column_to_rust(describe, config, warnings, 0) { + let ty = if let Ok(rust_col) = column_to_rust(describe, config, 0) { rust_col.type_.to_token_stream() } else if input.checked { - let ty = get_column_type::(config, warnings, 0, &columns[0]); + let ty = get_column_type::(config, 0, &columns[0]); if describe.nullable(0).unwrap_or(true) { quote! { ::std::option::Option<#ty> } } else { @@ -244,52 +239,92 @@ pub fn quote_query_scalar( }) } -fn get_column_type(i: usize, column: &DB::Column) -> TokenStream { +fn get_column_type(config: &Config, i: usize, column: &DB::Column) -> TokenStream { if let ColumnOrigin::Table(origin) = column.origin() { - if let Some(column_override) = Config::from_crate() - .macros - .column_override(&origin.table, &origin.name) - { + if let Some(column_override) = config.macros.column_override(&origin.table, &origin.name) { return column_override.parse().unwrap(); } } - + let type_info = column.type_info(); - if let Some(type_override) = Config::from_crate() - .macros - .type_override(type_info.name()) - { - return type_override.parse().unwrap(); + if let Some(type_override) = config.macros.type_override(type_info.name()) { + return type_override.parse().unwrap(); } - - ::return_type_for_id(type_info).map_or_else( - || { - let message = - if let Some(feature_gate) = ::get_feature_gate(type_info) { - format!( - "optional sqlx feature `{feat}` required for type {ty} of {col}", - ty = &type_info, - feat = feature_gate, - col = DisplayColumn { - idx: i, - name: column.name() - } - ) - } else { - format!( - "unsupported type {ty} of {col}", - ty = type_info, - col = DisplayColumn { - idx: i, - name: column.name() - } - ) - }; - syn::Error::new(Span::call_site(), message).to_compile_error() - }, - |t| t.parse().unwrap(), - ) + + let err = match ::return_type_for_id( + type_info, + &config.macros.preferred_crates, + ) { + Ok(t) => return t.parse().unwrap(), + Err(e) => e, + }; + + let message = match err { + type_checking::Error::NoMappingFound => { + if let Some(feature_gate) = ::get_feature_gate(type_info) { + format!( + "SQLx feature `{feat}` required for type {ty} of {col}", + ty = &type_info, + feat = feature_gate, + col = DisplayColumn { + idx: i, + name: column.name() + } + ) + } else { + format!( + "no built-in mapping found for type {ty} of {col}; \ + a type override may be required, see documentation for details", + ty = type_info, + col = DisplayColumn { + idx: i, + name: column.name() + } + ) + } + } + type_checking::Error::DateTimeCrateFeatureNotEnabled => { + let feature_gate = config + .macros + .preferred_crates + .date_time + .crate_name() + .expect("BUG: got feature-not-enabled error for DateTimeCrate::Inferred"); + + format!( + "SQLx feature `{feat}` required for type {ty} of {col} \ + (configured by `macros.preferred-crates.date-time` in sqlx.toml)", + ty = &type_info, + feat = feature_gate, + col = DisplayColumn { + idx: i, + name: column.name() + } + ) + } + type_checking::Error::NumericCrateFeatureNotEnabled => { + let feature_gate = config + .macros + .preferred_crates + .numeric + .crate_name() + .expect("BUG: got feature-not-enabled error for NumericCrate::Inferred"); + + format!( + "SQLx feature `{feat}` required for type {ty} of {col} \ + (configured by `macros.preferred-crates.numeric` in sqlx.toml)", + ty = &type_info, + feat = feature_gate, + col = DisplayColumn { + idx: i, + name: column.name() + } + ) + } + }; + + syn::Error::new(Span::call_site(), message).to_compile_error() } impl ColumnDecl { diff --git a/sqlx-mysql/src/connection/executor.rs b/sqlx-mysql/src/connection/executor.rs index 6ded62c609..40928d7065 100644 --- a/sqlx-mysql/src/connection/executor.rs +++ b/sqlx-mysql/src/connection/executor.rs @@ -22,8 +22,8 @@ use futures_core::future::BoxFuture; use futures_core::stream::BoxStream; use futures_core::Stream; use futures_util::{pin_mut, TryStreamExt}; -use std::{borrow::Cow, sync::Arc}; use sqlx_core::column::{ColumnOrigin, TableColumn}; +use std::{borrow::Cow, sync::Arc}; impl MySqlConnection { async fn prepare_statement( @@ -399,7 +399,7 @@ fn recv_next_result_column(def: &ColumnDefinition, ordinal: usize) -> Result Result Result<&str, Error> { str::from_utf8(&self.table).map_err(Error::protocol) } - + pub(crate) fn name(&self) -> Result<&str, Error> { str::from_utf8(&self.name).map_err(Error::protocol) } diff --git a/sqlx-postgres/src/column.rs b/sqlx-postgres/src/column.rs index 8530267b5d..4dd3a1cbd2 100644 --- a/sqlx-postgres/src/column.rs +++ b/sqlx-postgres/src/column.rs @@ -3,7 +3,6 @@ use crate::{PgTypeInfo, Postgres}; use sqlx_core::column::ColumnOrigin; pub(crate) use sqlx_core::column::{Column, ColumnIndex}; -use sqlx_core::column::ColumnOrigin; #[derive(Debug, Clone)] #[cfg_attr(feature = "offline", derive(serde::Serialize, serde::Deserialize))] @@ -14,7 +13,7 @@ pub struct PgColumn { #[cfg_attr(feature = "offline", serde(default))] pub(crate) origin: ColumnOrigin, - + #[cfg_attr(feature = "offline", serde(skip))] pub(crate) relation_id: Option, #[cfg_attr(feature = "offline", serde(skip))] diff --git a/sqlx-postgres/src/connection/describe.rs b/sqlx-postgres/src/connection/describe.rs index 50bda84762..1b8b18edd8 100644 --- a/sqlx-postgres/src/connection/describe.rs +++ b/sqlx-postgres/src/connection/describe.rs @@ -1,4 +1,4 @@ -use std::collections::btree_map; +use crate::connection::TableColumns; use crate::error::Error; use crate::ext::ustr::UStr; use crate::io::StatementId; @@ -15,9 +15,6 @@ use smallvec::SmallVec; use sqlx_core::column::{ColumnOrigin, TableColumn}; use sqlx_core::query_builder::QueryBuilder; use std::sync::Arc; -use sqlx_core::column::{ColumnOrigin, TableColumn}; -use sqlx_core::hash_map; -use crate::connection::TableColumns; /// Describes the type of the `pg_type.typtype` column /// @@ -127,9 +124,12 @@ impl PgConnection { let type_info = self .maybe_fetch_type_info_by_oid(field.data_type_id, fetch_type_info) .await?; - - let origin = if let (Some(relation_oid), Some(attribute_no)) = (field.relation_id, field.relation_attribute_no) { - self.maybe_fetch_column_origin(relation_oid, attribute_no, should_fetch).await? + + let origin = if let (Some(relation_oid), Some(attribute_no)) = + (field.relation_id, field.relation_attribute_no) + { + self.maybe_fetch_column_origin(relation_oid, attribute_no, should_fetch) + .await? } else { ColumnOrigin::Expression }; @@ -211,52 +211,65 @@ impl PgConnection { Ok(PgTypeInfo(PgType::DeclareWithOid(oid))) } } - + async fn maybe_fetch_column_origin( - &mut self, - relation_id: Oid, + &mut self, + relation_id: Oid, attribute_no: i16, should_fetch: bool, ) -> Result { - let mut table_columns = match self.cache_table_to_column_names.entry(relation_id) { - hash_map::Entry::Occupied(table_columns) => { - table_columns.into_mut() - }, - hash_map::Entry::Vacant(vacant) => { - if !should_fetch { return Ok(ColumnOrigin::Unknown); } - - let table_name: String = query_scalar("SELECT $1::oid::regclass::text") - .bind(relation_id) - .fetch_one(&mut *self) - .await?; - - vacant.insert(TableColumns { - table_name: table_name.into(), - columns: Default::default(), + if let Some(origin) = + self.cache_table_to_column_names + .get(&relation_id) + .and_then(|table_columns| { + let column_name = table_columns.columns.get(&attribute_no).cloned()?; + + Some(ColumnOrigin::Table(TableColumn { + table: table_columns.table_name.clone(), + name: column_name, + })) }) - } - }; - - let column_name = match table_columns.columns.entry(attribute_no) { - btree_map::Entry::Occupied(occupied) => Arc::clone(occupied.get()), - btree_map::Entry::Vacant(vacant) => { - if !should_fetch { return Ok(ColumnOrigin::Unknown); } - - let column_name: String = query_scalar( - "SELECT attname FROM pg_attribute WHERE attrelid = $1 AND attnum = $2" - ) - .bind(relation_id) - .bind(attribute_no) - .fetch_one(&mut *self) - .await?; - - Arc::clone(vacant.insert(column_name.into())) - } + { + return Ok(origin); + } + + if !should_fetch { + return Ok(ColumnOrigin::Unknown); + } + + // Looking up the table name _may_ end up being redundant, + // but the round-trip to the server is by far the most expensive part anyway. + let Some((table_name, column_name)): Option<(String, String)> = query_as( + // language=PostgreSQL + "SELECT $1::oid::regclass::text, attname \ + FROM pg_catalog.pg_attribute \ + WHERE attrelid = $1 AND attnum = $2", + ) + .bind(relation_id) + .bind(attribute_no) + .fetch_optional(&mut *self) + .await? + else { + // The column/table doesn't exist anymore for whatever reason. + return Ok(ColumnOrigin::Unknown); }; - + + let table_columns = self + .cache_table_to_column_names + .entry(relation_id) + .or_insert_with(|| TableColumns { + table_name: table_name.into(), + columns: Default::default(), + }); + + let column_name = table_columns + .columns + .entry(attribute_no) + .or_insert(column_name.into()); + Ok(ColumnOrigin::Table(TableColumn { table: table_columns.table_name.clone(), - name: column_name + name: Arc::clone(column_name), })) } diff --git a/sqlx-postgres/src/connection/establish.rs b/sqlx-postgres/src/connection/establish.rs index 634b71de4b..684bf26599 100644 --- a/sqlx-postgres/src/connection/establish.rs +++ b/sqlx-postgres/src/connection/establish.rs @@ -149,8 +149,7 @@ impl PgConnection { cache_type_info: HashMap::new(), cache_elem_type_to_array: HashMap::new(), cache_table_to_column_names: HashMap::new(), - log_settings: options.log_settings.clone(), - }), + log_settings: options.log_settings.clone(),}), }) } } diff --git a/sqlx-sqlite/src/column.rs b/sqlx-sqlite/src/column.rs index 390f3687fb..d319bd46a8 100644 --- a/sqlx-sqlite/src/column.rs +++ b/sqlx-sqlite/src/column.rs @@ -11,7 +11,7 @@ pub struct SqliteColumn { pub(crate) type_info: SqliteTypeInfo, #[cfg_attr(feature = "offline", serde(default))] - pub(crate) origin: ColumnOrigin + pub(crate) origin: ColumnOrigin, } impl Column for SqliteColumn { diff --git a/sqlx-sqlite/src/connection/describe.rs b/sqlx-sqlite/src/connection/describe.rs index b75cf98e56..5ff7081502 100644 --- a/sqlx-sqlite/src/connection/describe.rs +++ b/sqlx-sqlite/src/connection/describe.rs @@ -49,7 +49,7 @@ pub(crate) fn describe(conn: &mut ConnectionState, query: &str) -> Result ColumnOrigin { - if let Some((table, name)) = - self.column_table_name(index).zip(self.column_origin_name(index)) + if let Some((table, name)) = self + .column_table_name(index) + .zip(self.column_origin_name(index)) { let table: Arc = self .column_db_name(index) @@ -125,20 +126,20 @@ impl StatementHandle { // TODO: check that SQLite returns the names properly quoted if necessary |db| format!("{db}.{table}").into(), ); - + ColumnOrigin::Table(TableColumn { table, - name: name.into() + name: name.into(), }) } else { ColumnOrigin::Expression } } - + fn column_db_name(&self, index: usize) -> Option<&str> { unsafe { let db_name = sqlite3_column_database_name(self.0.as_ptr(), check_col_idx!(index)); - + if !db_name.is_null() { Some(from_utf8_unchecked(CStr::from_ptr(db_name).to_bytes())) } else { @@ -170,7 +171,7 @@ impl StatementHandle { } } } - + pub(crate) fn column_type_info(&self, index: usize) -> SqliteTypeInfo { SqliteTypeInfo(DataType::from_code(self.column_type(index))) } diff --git a/src/lib.rs b/src/lib.rs index 2e801540dd..3d5fb9bb26 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -176,3 +176,27 @@ pub mod prelude { #[cfg(feature = "_unstable-doc")] pub use sqlx_core::config; + +#[doc(hidden)] +#[cfg_attr( + all(feature = "chrono", feature = "time"), + deprecated = "SQLx has both `chrono` and `time` features enabled, \ + which presents an ambiguity when the `query!()` macros are mapping date/time types. \ + The `query!()` macros prefer types from `time` by default, \ + but this behavior should not be relied upon; \ + to resolve the ambiguity, we recommend specifying the preferred crate in a `sqlx.toml` file: \ + https://docs.rs/sqlx/latest/sqlx/config/macros/PreferredCrates.html#field.date_time" +)] +pub fn warn_on_ambiguous_inferred_date_time_crate() {} + +#[doc(hidden)] +#[cfg_attr( + all(feature = "bigdecimal", feature = "rust_decimal"), + deprecated = "SQLx has both `bigdecimal` and `rust_decimal` features enabled, \ + which presents an ambiguity when the `query!()` macros are mapping `NUMERIC`. \ + The `query!()` macros prefer `bigdecimal::BigDecimal` by default, \ + but this behavior should not be relied upon; \ + to resolve the ambiguity, we recommend specifying the preferred crate in a `sqlx.toml` file: \ + https://docs.rs/sqlx/latest/sqlx/config/macros/PreferredCrates.html#field.numeric" +)] +pub fn warn_on_ambiguous_inferred_numeric_crate() {} From 6b0df6d925af526a80544869a6c8cf9c1d8fc23d Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Fri, 20 Sep 2024 00:46:43 -0700 Subject: [PATCH 09/78] feat: make `sqlx-cli` aware of `database-url-var` --- sqlx-cli/src/database.rs | 2 +- sqlx-cli/src/lib.rs | 122 ++++++++++++------------------------ sqlx-cli/src/opt.rs | 52 ++------------- sqlx-core/src/config/mod.rs | 66 +++++++++++-------- src/lib.rs | 6 ++ 5 files changed, 94 insertions(+), 154 deletions(-) diff --git a/sqlx-cli/src/database.rs b/sqlx-cli/src/database.rs index eaba46eed9..4847eec2ff 100644 --- a/sqlx-cli/src/database.rs +++ b/sqlx-cli/src/database.rs @@ -26,7 +26,7 @@ pub async fn create(connect_opts: &ConnectOpts) -> anyhow::Result<()> { } pub async fn drop(connect_opts: &ConnectOpts, confirm: bool, force: bool) -> anyhow::Result<()> { - if confirm && !ask_to_continue_drop(connect_opts.expect_db_url()?.to_owned()).await { + if confirm && !ask_to_continue_drop(connect_opts.expect_db_url()?) { return Ok(()); } diff --git a/sqlx-cli/src/lib.rs b/sqlx-cli/src/lib.rs index adcc6a1306..4d2815d8af 100644 --- a/sqlx-cli/src/lib.rs +++ b/sqlx-cli/src/lib.rs @@ -1,8 +1,10 @@ use std::future::Future; use std::io; +use std::path::{Path, PathBuf}; use std::time::Duration; -use futures_util::TryFutureExt; +use anyhow::{Context, Result}; +use futures::{Future, TryFutureExt}; use sqlx::{AnyConnection, Connection}; use tokio::{select, signal}; @@ -21,38 +23,12 @@ mod prepare; pub use crate::opt::Opt; -pub use sqlx::_unstable::config::{self, Config}; +pub use sqlx::_unstable::config; +use crate::config::Config; -/// Check arguments for `--no-dotenv` _before_ Clap parsing, and apply `.env` if not set. -pub fn maybe_apply_dotenv() { - if std::env::args().any(|arg| arg == "--no-dotenv") { - return; - } +pub async fn run(opt: Opt) -> Result<()> { + let config = config_from_current_dir()?; - dotenvy::dotenv().ok(); -} - -pub async fn run(opt: Opt) -> anyhow::Result<()> { - // This `select!` is here so that when the process receives a `SIGINT` (CTRL + C), - // the futures currently running on this task get dropped before the program exits. - // This is currently necessary for the consumers of the `dialoguer` crate to restore - // the user's terminal if the process is interrupted while a dialog is being displayed. - - let ctrlc_fut = signal::ctrl_c(); - let do_run_fut = do_run(opt); - - select! { - biased; - _ = ctrlc_fut => { - Ok(()) - }, - do_run_outcome = do_run_fut => { - do_run_outcome - } - } -} - -async fn do_run(opt: Opt) -> anyhow::Result<()> { match opt.command { Command::Migrate(migrate) => match migrate.command { MigrateCommand::Add(opts) => migrate::add(opts).await?, @@ -64,9 +40,7 @@ async fn do_run(opt: Opt) -> anyhow::Result<()> { mut connect_opts, target_version, } => { - let config = config.load_config().await?; - - connect_opts.populate_db_url(&config)?; + connect_opts.populate_db_url(config)?; migrate::run( &config, @@ -86,9 +60,7 @@ async fn do_run(opt: Opt) -> anyhow::Result<()> { mut connect_opts, target_version, } => { - let config = config.load_config().await?; - - connect_opts.populate_db_url(&config)?; + connect_opts.populate_db_url(config)?; migrate::revert( &config, @@ -102,69 +74,44 @@ async fn do_run(opt: Opt) -> anyhow::Result<()> { } MigrateCommand::Info { source, - config, mut connect_opts, } => { - let config = config.load_config().await?; - - connect_opts.populate_db_url(&config)?; - - migrate::info(&config, &source, &connect_opts).await? - } - MigrateCommand::BuildScript { - source, - config, - force, - } => { - let config = config.load_config().await?; + connect_opts.populate_db_url(config)?; - migrate::build_script(&config, &source, force)? - } + migrate::info(&source, &connect_opts).await? + }, + MigrateCommand::BuildScript { source, force } => migrate::build_script(&source, force)?, }, Command::Database(database) => match database.command { - DatabaseCommand::Create { - config, - mut connect_opts, - } => { - let config = config.load_config().await?; - - connect_opts.populate_db_url(&config)?; + DatabaseCommand::Create { mut connect_opts } => { + connect_opts.populate_db_url(config)?; database::create(&connect_opts).await? - } + }, DatabaseCommand::Drop { confirmation, - config, mut connect_opts, force, } => { - let config = config.load_config().await?; - - connect_opts.populate_db_url(&config)?; + connect_opts.populate_db_url(config)?; database::drop(&connect_opts, !confirmation.yes, force).await? - } + }, DatabaseCommand::Reset { confirmation, source, - config, mut connect_opts, force, } => { - let config = config.load_config().await?; - - connect_opts.populate_db_url(&config)?; - database::reset(&config, &source, &connect_opts, !confirmation.yes, force).await? - } + connect_opts.populate_db_url(config)?; + database::reset(&source, &connect_opts, !confirmation.yes, force).await? + }, DatabaseCommand::Setup { source, - config, mut connect_opts, } => { - let config = config.load_config().await?; - - connect_opts.populate_db_url(&config)?; - database::setup(&config, &source, &connect_opts).await? - } + connect_opts.populate_db_url(config)?; + database::setup(&source, &connect_opts).await? + }, }, Command::Prepare { @@ -173,12 +120,10 @@ async fn do_run(opt: Opt) -> anyhow::Result<()> { workspace, mut connect_opts, args, - config, } => { - let config = config.load_config().await?; - connect_opts.populate_db_url(&config)?; + connect_opts.populate_db_url(config)?; prepare::run(check, all, workspace, connect_opts, args).await? - } + }, #[cfg(feature = "completions")] Command::Completions { shell } => completions::run(shell), @@ -231,3 +176,18 @@ where ) .await } + +async fn config_from_current_dir() -> anyhow::Result<&'static Config> { + // Tokio does file I/O on a background task anyway + tokio::task::spawn_blocking(|| { + let path = PathBuf::from("sqlx.toml"); + + if path.exists() { + eprintln!("Found `sqlx.toml` in current directory; reading..."); + } + + Config::read_with_or_default(move || Ok(path)) + }) + .await + .context("unexpected error loading config") +} diff --git a/sqlx-cli/src/opt.rs b/sqlx-cli/src/opt.rs index cb09bc2ff5..7ae6733e72 100644 --- a/sqlx-cli/src/opt.rs +++ b/sqlx-cli/src/opt.rs @@ -1,23 +1,10 @@ -use crate::config::migrate::{DefaultMigrationType, DefaultVersioning}; -use crate::config::Config; +use std::env; +use std::ops::{Deref, Not}; use anyhow::Context; -use chrono::Utc; -use clap::{ - builder::{styling::AnsiColor, Styles}, - Args, Parser, -}; +use clap::{Args, Parser}; #[cfg(feature = "completions")] use clap_complete::Shell; -use sqlx::migrate::{MigrateError, Migrator, ResolveWith}; -use std::env; -use std::ops::{Deref, Not}; -use std::path::PathBuf; - -const HELP_STYLES: Styles = Styles::styled() - .header(AnsiColor::Blue.on_default().bold()) - .usage(AnsiColor::Blue.on_default().bold()) - .literal(AnsiColor::White.on_default()) - .placeholder(AnsiColor::Green.on_default()); +use sqlx::config::Config; #[derive(Parser, Debug)] #[clap(version, about, author, styles = HELP_STYLES)] @@ -415,9 +402,7 @@ impl ConnectOpts { /// Require a database URL to be provided, otherwise /// return an error. pub fn expect_db_url(&self) -> anyhow::Result<&str> { - self.database_url - .as_deref() - .context("BUG: database_url not populated") + self.database_url.as_deref().context("BUG: database_url not populated") } /// Populate `database_url` from the environment, if not set. @@ -441,7 +426,7 @@ impl ConnectOpts { } self.database_url = Some(url) - } + }, Err(env::VarError::NotPresent) => { anyhow::bail!("`--database-url` or `{var}`{context} must be set") } @@ -454,31 +439,6 @@ impl ConnectOpts { } } -impl ConfigOpt { - pub async fn load_config(&self) -> anyhow::Result { - let path = self.config.clone(); - - // Tokio does file I/O on a background task anyway - tokio::task::spawn_blocking(|| { - if let Some(path) = path { - let err_str = format!("error reading config from {path:?}"); - Config::try_from_path(path).context(err_str) - } else { - let path = PathBuf::from("sqlx.toml"); - - if path.exists() { - eprintln!("Found `sqlx.toml` in current directory; reading..."); - Ok(Config::try_from_path(path)?) - } else { - Ok(Config::default()) - } - } - }) - .await - .context("unexpected error loading config")? - } -} - /// Argument for automatic confirmation. #[derive(Args, Copy, Clone, Debug)] pub struct Confirmation { diff --git a/sqlx-core/src/config/mod.rs b/sqlx-core/src/config/mod.rs index b3afd9ea1b..02bde20f73 100644 --- a/sqlx-core/src/config/mod.rs +++ b/sqlx-core/src/config/mod.rs @@ -152,25 +152,7 @@ impl Config { /// ### Panics /// If the file exists but an unrecoverable error was encountered while parsing it. pub fn from_crate() -> &'static Self { - Self::try_from_crate().unwrap_or_else(|e| { - match e { - ConfigError::NotFound { path } => { - // Non-fatal - tracing::debug!("Not reading config, file {path:?} not found"); - CACHE.get_or_init(Config::default) - } - // FATAL ERRORS BELOW: - // In the case of migrations, - // we can't proceed with defaults as they may be completely wrong. - e @ ConfigError::ParseDisabled { .. } => { - // Only returned if the file exists but the feature is not enabled. - panic!("{e}") - } - e => { - panic!("failed to read sqlx config: {e}") - } - } - }) + Self::read_with_or_default(get_crate_path) } /// Get the cached config, or to read `$CARGO_MANIFEST_DIR/sqlx.toml`. @@ -179,11 +161,7 @@ impl Config { /// /// Errors if `CARGO_MANIFEST_DIR` is not set, or if the config file could not be read. pub fn try_from_crate() -> Result<&'static Self, ConfigError> { - Self::try_get_with(|| { - let mut path = PathBuf::from(std::env::var("CARGO_MANIFEST_DIR")?); - path.push("sqlx.toml"); - Ok(path) - }) + Self::try_read_with(get_crate_path) } /// Get the cached config, or attempt to read `sqlx.toml` from the current working directory. @@ -192,7 +170,7 @@ impl Config { /// /// Errors if the config file does not exist, or could not be read. pub fn try_from_current_dir() -> Result<&'static Self, ConfigError> { - Self::try_get_with(|| Ok("sqlx.toml".into())) + Self::try_read_with(|| Ok("sqlx.toml".into())) } /// Get the cached config, or attempt to read it from the path returned by the closure. @@ -200,7 +178,7 @@ impl Config { /// On success, the config is cached in a `static` and returned by future calls. /// /// Errors if the config file does not exist, or could not be read. - pub fn try_get_with( + pub fn try_read_with( make_path: impl FnOnce() -> Result, ) -> Result<&'static Self, ConfigError> { CACHE.get_or_try_init(|| { @@ -209,6 +187,36 @@ impl Config { }) } + /// Get the cached config, or attempt to read it from the path returned by the closure. + /// + /// On success, the config is cached in a `static` and returned by future calls. + /// + /// Returns `Config::default()` if the file does not exist. + pub fn read_with_or_default( + make_path: impl FnOnce() -> Result, + ) -> &'static Self { + CACHE.get_or_init(|| { + match make_path().and_then(Self::read_from) { + Ok(config) => config, + Err(ConfigError::NotFound { path }) => { + // Non-fatal + tracing::debug!("Not reading config, file {path:?} not found"); + Config::default() + } + // FATAL ERRORS BELOW: + // In the case of migrations, + // we can't proceed with defaults as they may be completely wrong. + Err(e @ ConfigError::ParseDisabled { .. }) => { + // Only returned if the file exists but the feature is not enabled. + panic!("{e}") + } + Err(e) => { + panic!("failed to read sqlx config: {e}") + } + } + }) + } + #[cfg(feature = "sqlx-toml")] fn read_from(path: PathBuf) -> Result { // The `toml` crate doesn't provide an incremental reader. @@ -238,3 +246,9 @@ impl Config { } } } + +fn get_crate_path() -> Result { + let mut path = PathBuf::from(std::env::var("CARGO_MANIFEST_DIR")?); + path.push("sqlx.toml"); + Ok(path) +} diff --git a/src/lib.rs b/src/lib.rs index 3d5fb9bb26..ce34f0e851 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -177,6 +177,12 @@ pub mod prelude { #[cfg(feature = "_unstable-doc")] pub use sqlx_core::config; +// NOTE: APIs exported in this module are SemVer-exempt. +#[doc(hidden)] +pub mod _unstable { + pub use sqlx_core::config; +} + #[doc(hidden)] #[cfg_attr( all(feature = "chrono", feature = "time"), From c377de7e7a96edd81f8d1fe6bd27ca9eb81d388c Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Mon, 23 Sep 2024 02:06:46 -0700 Subject: [PATCH 10/78] feat: teach macros about `migrate.table-name`, `migrations-dir` --- sqlx-macros-core/src/migrate.rs | 31 ++++++++++++++++++++----------- sqlx-macros-core/src/test_attr.rs | 9 +++++---- sqlx-macros/src/lib.rs | 2 +- 3 files changed, 26 insertions(+), 16 deletions(-) diff --git a/sqlx-macros-core/src/migrate.rs b/sqlx-macros-core/src/migrate.rs index 4f051d1330..485c5c8f64 100644 --- a/sqlx-macros-core/src/migrate.rs +++ b/sqlx-macros-core/src/migrate.rs @@ -8,6 +8,9 @@ use quote::{quote, ToTokens, TokenStreamExt}; use sqlx_core::config::Config; use sqlx_core::migrate::{Migration, MigrationType}; use syn::LitStr; +use syn::spanned::Spanned; +use sqlx_core::config::Config; +use sqlx_core::migrate::{Migration, MigrationType}; pub const DEFAULT_PATH: &str = "./migrations"; @@ -84,22 +87,22 @@ impl ToTokens for QuoteMigration { } pub fn default_path(config: &Config) -> &str { - config - .migrate - .migrations_dir + config.migrate.migrations_dir .as_deref() .unwrap_or(DEFAULT_PATH) } pub fn expand(path_arg: Option) -> crate::Result { - let config = Config::try_from_crate_or_default()?; + let config = Config::from_crate(); - let path = match path_arg { - Some(path_arg) => crate::common::resolve_path(path_arg.value(), path_arg.span())?, - None => { crate::common::resolve_path(default_path(&config), Span::call_site()) }?, - }; + let path = match path_arg { + Some(path_arg) => crate::common::resolve_path(path_arg.value(), path_arg.span())?, + None => { + crate::common::resolve_path(default_path(config), Span::call_site()) + }? + }; - expand_with_path(&config, &path) + expand_with_path(config, &path) } pub fn expand_with_path(config: &Config, path: &Path) -> crate::Result { @@ -135,13 +138,19 @@ pub fn expand_with_path(config: &Config, path: &Path) -> crate::Result TokenStream { #[cfg(feature = "migrate")] fn expand_advanced(args: AttributeArgs, input: syn::ItemFn) -> crate::Result { - let config = sqlx_core::config::Config::try_from_crate_or_default()?; + let config = sqlx_core::config::Config::from_crate(); let ret = &input.sig.output; let name = &input.sig.ident; @@ -149,12 +149,13 @@ fn expand_advanced(args: AttributeArgs, input: syn::ItemFn) -> crate::Result { - let path = crate::migrate::default_path(&config); + let path = crate::migrate::default_path(config); - let resolved_path = crate::common::resolve_path(path, proc_macro2::Span::call_site())?; + let resolved_path = + crate::common::resolve_path(path, proc_macro2::Span::call_site())?; if resolved_path.is_dir() { - let migrator = crate::migrate::expand_with_path(&config, &resolved_path)?; + let migrator = crate::migrate::expand_with_path(config, &resolved_path)?; quote! { args.migrator(&#migrator); } } else { quote! {} diff --git a/sqlx-macros/src/lib.rs b/sqlx-macros/src/lib.rs index ccffc9bd2a..f527f5d2fd 100644 --- a/sqlx-macros/src/lib.rs +++ b/sqlx-macros/src/lib.rs @@ -68,7 +68,7 @@ pub fn derive_from_row(input: TokenStream) -> TokenStream { pub fn migrate(input: TokenStream) -> TokenStream { use syn::LitStr; - let input = syn::parse_macro_input!(input as Option); + let input = syn::parse_macro_input!(input as LitStr); match migrate::expand(input) { Ok(ts) => ts.into(), Err(e) => { From d41aa072fc204398518edbc2ee935498f2cde414 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Mon, 23 Sep 2024 02:15:14 -0700 Subject: [PATCH 11/78] feat: teach macros about `migrate.ignored-chars` --- sqlx-macros-core/src/migrate.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/sqlx-macros-core/src/migrate.rs b/sqlx-macros-core/src/migrate.rs index 485c5c8f64..0c36bb93e3 100644 --- a/sqlx-macros-core/src/migrate.rs +++ b/sqlx-macros-core/src/migrate.rs @@ -10,7 +10,7 @@ use sqlx_core::migrate::{Migration, MigrationType}; use syn::LitStr; use syn::spanned::Spanned; use sqlx_core::config::Config; -use sqlx_core::migrate::{Migration, MigrationType}; +use sqlx_core::migrate::{Migration, MigrationType, ResolveConfig}; pub const DEFAULT_PATH: &str = "./migrations"; @@ -113,7 +113,8 @@ pub fn expand_with_path(config: &Config, path: &Path) -> crate::Result Date: Sat, 5 Oct 2024 15:21:32 -0700 Subject: [PATCH 12/78] feat: teach `sqlx-cli` about `migrate.defaults` --- sqlx-cli/src/lib.rs | 9 ++- sqlx-cli/src/migrate.rs | 33 +++++++--- sqlx-cli/src/opt.rs | 140 ++++++++++++++++++++++++++-------------- 3 files changed, 120 insertions(+), 62 deletions(-) diff --git a/sqlx-cli/src/lib.rs b/sqlx-cli/src/lib.rs index 4d2815d8af..683affaead 100644 --- a/sqlx-cli/src/lib.rs +++ b/sqlx-cli/src/lib.rs @@ -1,6 +1,6 @@ use std::future::Future; use std::io; -use std::path::{Path, PathBuf}; +use std::path::{PathBuf}; use std::time::Duration; use anyhow::{Context, Result}; @@ -23,15 +23,14 @@ mod prepare; pub use crate::opt::Opt; -pub use sqlx::_unstable::config; -use crate::config::Config; +pub use sqlx::_unstable::config::{self, Config}; pub async fn run(opt: Opt) -> Result<()> { - let config = config_from_current_dir()?; + let config = config_from_current_dir().await?; match opt.command { Command::Migrate(migrate) => match migrate.command { - MigrateCommand::Add(opts) => migrate::add(opts).await?, + MigrateCommand::Add(opts)=> migrate::add(config, opts).await?, MigrateCommand::Run { source, config, diff --git a/sqlx-cli/src/migrate.rs b/sqlx-cli/src/migrate.rs index 6d32c9e846..b92c2ecef9 100644 --- a/sqlx-cli/src/migrate.rs +++ b/sqlx-cli/src/migrate.rs @@ -1,5 +1,4 @@ -use crate::config::Config; -use crate::opt::{AddMigrationOpts, ConnectOpts, MigrationSourceOpt}; +use crate::opt::{AddMigrationOpts, ConnectOpts}; use anyhow::{bail, Context}; use console::style; use sqlx::migrate::{AppliedMigration, Migrate, MigrateError, MigrationType, Migrator}; @@ -10,6 +9,7 @@ use std::fmt::Write; use std::fs::{self, File}; use std::path::Path; use std::time::Duration; +use crate::config::Config; pub async fn add(opts: AddMigrationOpts) -> anyhow::Result<()> { let config = opts.config.load_config().await?; @@ -22,22 +22,37 @@ pub async fn add(opts: AddMigrationOpts) -> anyhow::Result<()> { let version_prefix = opts.version_prefix(&config, &migrator); - if opts.reversible(&config, &migrator) { + std::io::Write::write_all(&mut file, migration_type.file_content().as_bytes())?; + + Ok(()) +} + +pub async fn add( + config: &Config, + opts: AddMigrationOpts, +) -> anyhow::Result<()> { + fs::create_dir_all(&opts.source).context("Unable to create migrations directory")?; + + let migrator = Migrator::new(opts.source.as_ref()).await?; + + let version_prefix = opts.version_prefix(config, &migrator); + + if opts.reversible(config, &migrator) { create_file( - source, + &opts.source, &version_prefix, &opts.description, MigrationType::ReversibleUp, )?; create_file( - source, + &opts.source, &version_prefix, &opts.description, MigrationType::ReversibleDown, )?; } else { create_file( - source, + &opts.source, &version_prefix, &opts.description, MigrationType::Simple, @@ -45,13 +60,13 @@ pub async fn add(opts: AddMigrationOpts) -> anyhow::Result<()> { } // if the migrations directory is empty - let has_existing_migrations = fs::read_dir(source) + let has_existing_migrations = fs::read_dir(&opts.source) .map(|mut dir| dir.next().is_some()) .unwrap_or(false); if !has_existing_migrations { - let quoted_source = if opts.source.source.is_some() { - format!("{source:?}") + let quoted_source = if *opts.source != "migrations" { + format!("{:?}", *opts.source) } else { "".to_string() }; diff --git a/sqlx-cli/src/opt.rs b/sqlx-cli/src/opt.rs index 7ae6733e72..3b1ae734e8 100644 --- a/sqlx-cli/src/opt.rs +++ b/sqlx-cli/src/opt.rs @@ -1,10 +1,14 @@ use std::env; use std::ops::{Deref, Not}; +use std::path::Path; use anyhow::Context; +use chrono::Utc; use clap::{Args, Parser}; #[cfg(feature = "completions")] use clap_complete::Shell; -use sqlx::config::Config; +use crate::config::Config; +use sqlx::migrate::Migrator; +use crate::config::migrate::{DefaultMigrationType, DefaultVersioning}; #[derive(Parser, Debug)] #[clap(version, about, author, styles = HELP_STYLES)] @@ -146,7 +150,7 @@ pub enum MigrateCommand { /// Create a new migration with the given description. /// /// -------------------------------- - /// + /// /// Migrations may either be simple, or reversible. /// /// Reversible migrations can be reverted with `sqlx migrate revert`, simple migrations cannot. @@ -169,7 +173,7 @@ pub enum MigrateCommand { /// It is recommended to always back up the database before running migrations. /// /// -------------------------------- - /// + /// /// For convenience, this command attempts to detect if reversible migrations are in-use. /// /// If the latest existing migration is reversible, the new migration will also be reversible. @@ -181,7 +185,7 @@ pub enum MigrateCommand { /// The default type to use can also be set in `sqlx.toml`. /// /// -------------------------------- - /// + /// /// A version number will be automatically assigned to the migration. /// /// Migrations are applied in ascending order by version number. @@ -191,9 +195,9 @@ pub enum MigrateCommand { /// less than _any_ previously applied migration. /// /// Migrations should only be created with increasing version number. - /// + /// /// -------------------------------- - /// + /// /// For convenience, this command will attempt to detect if sequential versioning is in use, /// and if so, continue the sequence. /// @@ -287,6 +291,35 @@ pub enum MigrateCommand { }, } +#[derive(Args, Debug)] +pub struct AddMigrationOpts { + pub description: String, + + #[clap(flatten)] + pub source: Source, + + /// If set, create an up-migration only. Conflicts with `--reversible`. + #[clap(long, conflicts_with = "reversible")] + simple: bool, + + /// If set, create a pair of up and down migration files with same version. + /// + /// Conflicts with `--simple`. + #[clap(short, long, conflicts_with = "simple")] + reversible: bool, + + /// If set, use timestamp versioning for the new migration. Conflicts with `--sequential`. + /// + /// Timestamp format: `YYYYMMDDHHMMSS` + #[clap(short, long, conflicts_with = "sequential")] + timestamp: bool, + + /// If set, use sequential versioning for the new migration. Conflicts with `--timestamp`. + #[clap(short, long, conflicts_with = "timestamp")] + sequential: bool, +} + +/// Argument for the migration scripts source. #[derive(Args, Debug)] pub struct AddMigrationOpts { pub description: String, @@ -346,6 +379,12 @@ impl MigrationSourceOpt { } } +impl AsRef for Source { + fn as_ref(&self) -> &Path { + Path::new(&self.source) + } +} + /// Argument for the database URL. #[derive(Args, Debug)] pub struct ConnectOpts { @@ -474,57 +513,38 @@ impl Not for IgnoreMissing { impl AddMigrationOpts { pub fn reversible(&self, config: &Config, migrator: &Migrator) -> bool { - if self.reversible { - return true; - } - if self.simple { - return false; - } + if self.reversible { return true; } + if self.simple { return false; } match config.migrate.defaults.migration_type { - DefaultMigrationType::Inferred => migrator - .iter() - .last() - .is_some_and(|m| m.migration_type.is_reversible()), - DefaultMigrationType::Simple => false, - DefaultMigrationType::Reversible => true, + DefaultMigrationType::Inferred => { + migrator + .iter() + .last() + .is_some_and(|m| m.migration_type.is_reversible()) + } + DefaultMigrationType::Simple => { + false + } + DefaultMigrationType::Reversible => { + true + } } } pub fn version_prefix(&self, config: &Config, migrator: &Migrator) -> String { let default_versioning = &config.migrate.defaults.migration_versioning; - match (self.timestamp, self.sequential, default_versioning) { - (true, false, _) | (false, false, DefaultVersioning::Timestamp) => next_timestamp(), - (false, true, _) | (false, false, DefaultVersioning::Sequential) => fmt_sequential( - migrator - .migrations - .last() - .map_or(1, |migration| migration.version + 1), - ), - (false, false, DefaultVersioning::Inferred) => { - migrator - .migrations - .rchunks(2) - .next() - .and_then(|migrations| { - match migrations { - [previous, latest] => { - // If the latest two versions differ by 1, infer sequential. - (latest.version - previous.version == 1) - .then_some(latest.version + 1) - } - [latest] => { - // If only one migration exists and its version is 0 or 1, infer sequential - matches!(latest.version, 0 | 1).then_some(latest.version + 1) - } - _ => unreachable!(), - } - }) - .map_or_else(next_timestamp, fmt_sequential) - } - (true, true, _) => unreachable!("BUG: Clap should have rejected this case"), + if self.timestamp || matches!(default_versioning, DefaultVersioning::Timestamp) { + return next_timestamp(); } + + if self.sequential || matches!(default_versioning, DefaultVersioning::Sequential) { + return next_sequential(migrator) + .unwrap_or_else(|| fmt_sequential(1)); + } + + next_sequential(migrator).unwrap_or_else(next_timestamp) } } @@ -532,6 +552,30 @@ fn next_timestamp() -> String { Utc::now().format("%Y%m%d%H%M%S").to_string() } +fn next_sequential(migrator: &Migrator) -> Option { + let next_version = migrator + .migrations + .windows(2) + .last() + .and_then(|migrations| { + match migrations { + [previous, latest] => { + // If the latest two versions differ by 1, infer sequential. + (latest.version - previous.version == 1) + .then_some(latest.version + 1) + }, + [latest] => { + // If only one migration exists and its version is 0 or 1, infer sequential + matches!(latest.version, 0 | 1) + .then_some(latest.version + 1) + } + _ => unreachable!(), + } + }); + + next_version.map(fmt_sequential) +} + fn fmt_sequential(version: i64) -> String { format!("{version:04}") } From b4efb9e01b7a228bc6e4110238eaef6ad681fdea Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Wed, 15 Jan 2025 10:31:03 -0800 Subject: [PATCH 13/78] feat: teach `sqlx-cli` about `migrate.migrations-dir` --- Cargo.lock | 27 +++++++-- sqlx-cli/src/database.rs | 12 ++-- sqlx-cli/src/lib.rs | 12 ++-- sqlx-cli/src/migrate.rs | 68 ++++++++--------------- sqlx-cli/src/opt.rs | 71 +----------------------- sqlx-core/src/config/migrate.rs | 15 ++++- sqlx-macros-core/src/migrate.rs | 3 +- sqlx-macros/src/lib.rs | 2 +- sqlx-postgres/src/connection/describe.rs | 4 +- 9 files changed, 79 insertions(+), 135 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3c0d9eccb5..e86b9d4a27 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2791,7 +2791,7 @@ version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ecf48c7ca261d60b74ab1a7b20da18bede46776b2e55535cb958eb595c5fa7b" dependencies = [ - "toml_edit", + "toml_edit 0.21.1", ] [[package]] @@ -3566,7 +3566,6 @@ dependencies = [ "hashbrown 0.15.2", "hashlink", "indexmap 2.7.0", - "ipnet", "ipnetwork", "log", "mac_address", @@ -4389,7 +4388,7 @@ dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit", + "toml_edit 0.22.22", ] [[package]] @@ -4401,6 +4400,17 @@ dependencies = [ "serde", ] +[[package]] +name = "toml_edit" +version = "0.22.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" +dependencies = [ + "indexmap 2.7.0", + "toml_datetime", + "winnow 0.5.40", +] + [[package]] name = "toml_edit" version = "0.22.22" @@ -4411,7 +4421,7 @@ dependencies = [ "serde", "serde_spanned", "toml_datetime", - "winnow", + "winnow 0.6.22", ] [[package]] @@ -5109,6 +5119,15 @@ dependencies = [ "memchr", ] +[[package]] +name = "winnow" +version = "0.6.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39281189af81c07ec09db316b302a3e67bf9bd7cbf6c820b50e35fee9c2fa980" +dependencies = [ + "memchr", +] + [[package]] name = "write16" version = "1.0.0" diff --git a/sqlx-cli/src/database.rs b/sqlx-cli/src/database.rs index 4847eec2ff..2f5f00b004 100644 --- a/sqlx-cli/src/database.rs +++ b/sqlx-cli/src/database.rs @@ -1,7 +1,7 @@ -use crate::opt::{ConnectOpts, MigrationSourceOpt}; use crate::{migrate, Config}; -use console::{style, Term}; -use dialoguer::Confirm; +use crate::opt::{ConnectOpts, MigrationSourceOpt}; +use console::style; +use promptly::{prompt, ReadlineError}; use sqlx::any::Any; use sqlx::migrate::MigrateDatabase; use std::{io, mem}; @@ -56,11 +56,7 @@ pub async fn reset( setup(config, migration_source, connect_opts).await } -pub async fn setup( - config: &Config, - migration_source: &MigrationSourceOpt, - connect_opts: &ConnectOpts, -) -> anyhow::Result<()> { +pub async fn setup(config: &Config, migration_source: &MigrationSourceOpt, connect_opts: &ConnectOpts) -> anyhow::Result<()> { create(connect_opts).await?; migrate::run(config, migration_source, connect_opts, false, false, None).await } diff --git a/sqlx-cli/src/lib.rs b/sqlx-cli/src/lib.rs index 683affaead..90b5440a9f 100644 --- a/sqlx-cli/src/lib.rs +++ b/sqlx-cli/src/lib.rs @@ -42,7 +42,7 @@ pub async fn run(opt: Opt) -> Result<()> { connect_opts.populate_db_url(config)?; migrate::run( - &config, + config, &source, &connect_opts, dry_run, @@ -62,7 +62,7 @@ pub async fn run(opt: Opt) -> Result<()> { connect_opts.populate_db_url(config)?; migrate::revert( - &config, + config, &source, &connect_opts, dry_run, @@ -77,9 +77,9 @@ pub async fn run(opt: Opt) -> Result<()> { } => { connect_opts.populate_db_url(config)?; - migrate::info(&source, &connect_opts).await? + migrate::info(config, &source, &connect_opts).await? }, - MigrateCommand::BuildScript { source, force } => migrate::build_script(&source, force)?, + MigrateCommand::BuildScript { source, force } => migrate::build_script(config, &source, force)?, }, Command::Database(database) => match database.command { @@ -102,14 +102,14 @@ pub async fn run(opt: Opt) -> Result<()> { force, } => { connect_opts.populate_db_url(config)?; - database::reset(&source, &connect_opts, !confirmation.yes, force).await? + database::reset(config, &source, &connect_opts, !confirmation.yes, force).await? }, DatabaseCommand::Setup { source, mut connect_opts, } => { connect_opts.populate_db_url(config)?; - database::setup(&source, &connect_opts).await? + database::setup(config, &source, &connect_opts).await? }, }, diff --git a/sqlx-cli/src/migrate.rs b/sqlx-cli/src/migrate.rs index b92c2ecef9..c124a0bdb2 100644 --- a/sqlx-cli/src/migrate.rs +++ b/sqlx-cli/src/migrate.rs @@ -1,7 +1,7 @@ -use crate::opt::{AddMigrationOpts, ConnectOpts}; +use crate::opt::{AddMigrationOpts, ConnectOpts, MigrationSourceOpt}; use anyhow::{bail, Context}; use console::style; -use sqlx::migrate::{AppliedMigration, Migrate, MigrateError, MigrationType, Migrator}; +use sqlx::migrate::{AppliedMigration, Migrate, MigrateError, MigrationType, Migrator, ResolveWith}; use sqlx::Connection; use std::borrow::Cow; use std::collections::{HashMap, HashSet}; @@ -11,48 +11,34 @@ use std::path::Path; use std::time::Duration; use crate::config::Config; -pub async fn add(opts: AddMigrationOpts) -> anyhow::Result<()> { - let config = opts.config.load_config().await?; - - let source = opts.source.resolve_path(&config); - - fs::create_dir_all(source).context("Unable to create migrations directory")?; - - let migrator = opts.source.resolve(&config).await?; - - let version_prefix = opts.version_prefix(&config, &migrator); - - std::io::Write::write_all(&mut file, migration_type.file_content().as_bytes())?; - - Ok(()) -} - pub async fn add( config: &Config, opts: AddMigrationOpts, ) -> anyhow::Result<()> { - fs::create_dir_all(&opts.source).context("Unable to create migrations directory")?; + let source = opts.source.resolve(config); + + fs::create_dir_all(source).context("Unable to create migrations directory")?; - let migrator = Migrator::new(opts.source.as_ref()).await?; + let migrator = Migrator::new(Path::new(source)).await?; let version_prefix = opts.version_prefix(config, &migrator); if opts.reversible(config, &migrator) { create_file( - &opts.source, + source, &version_prefix, &opts.description, MigrationType::ReversibleUp, )?; create_file( - &opts.source, + source, &version_prefix, &opts.description, MigrationType::ReversibleDown, )?; } else { create_file( - &opts.source, + source, &version_prefix, &opts.description, MigrationType::Simple, @@ -60,13 +46,13 @@ pub async fn add( } // if the migrations directory is empty - let has_existing_migrations = fs::read_dir(&opts.source) + let has_existing_migrations = fs::read_dir(source) .map(|mut dir| dir.next().is_some()) .unwrap_or(false); if !has_existing_migrations { - let quoted_source = if *opts.source != "migrations" { - format!("{:?}", *opts.source) + let quoted_source = if opts.source.source.is_some() { + format!("{source:?}") } else { "".to_string() }; @@ -138,13 +124,10 @@ fn short_checksum(checksum: &[u8]) -> String { s } -pub async fn info( - config: &Config, - migration_source: &MigrationSourceOpt, - connect_opts: &ConnectOpts, -) -> anyhow::Result<()> { - let migrator = migration_source.resolve(config).await?; - +pub async fn info(config: &Config, migration_source: &MigrationSourceOpt, connect_opts: &ConnectOpts) -> anyhow::Result<()> { + let source = migration_source.resolve(config); + + let migrator = Migrator::new(ResolveWith(Path::new(source), config.migrate.to_resolve_config())).await?; let mut conn = crate::connect(connect_opts).await?; // FIXME: we shouldn't actually be creating anything here @@ -236,8 +219,9 @@ pub async fn run( ignore_missing: bool, target_version: Option, ) -> anyhow::Result<()> { - let migrator = migration_source.resolve(config).await?; - + let source = migration_source.resolve(config); + + let migrator = Migrator::new(Path::new(source)).await?; if let Some(target_version) = target_version { if !migrator.version_exists(target_version) { bail!(MigrateError::VersionNotPresent(target_version)); @@ -338,8 +322,8 @@ pub async fn revert( ignore_missing: bool, target_version: Option, ) -> anyhow::Result<()> { - let migrator = migration_source.resolve(config).await?; - + let source = migration_source.resolve(config); + let migrator = Migrator::new(Path::new(source)).await?; if let Some(target_version) = target_version { if target_version != 0 && !migrator.version_exists(target_version) { bail!(MigrateError::VersionNotPresent(target_version)); @@ -434,13 +418,9 @@ pub async fn revert( Ok(()) } -pub fn build_script( - config: &Config, - migration_source: &MigrationSourceOpt, - force: bool, -) -> anyhow::Result<()> { - let source = migration_source.resolve_path(config); - +pub fn build_script(config: &Config, migration_source: &MigrationSourceOpt, force: bool) -> anyhow::Result<()> { + let source = migration_source.resolve(config); + anyhow::ensure!( Path::new("Cargo.toml").exists(), "must be run in a Cargo project root" diff --git a/sqlx-cli/src/opt.rs b/sqlx-cli/src/opt.rs index 3b1ae734e8..6ea009b281 100644 --- a/sqlx-cli/src/opt.rs +++ b/sqlx-cli/src/opt.rs @@ -1,6 +1,5 @@ use std::env; use std::ops::{Deref, Not}; -use std::path::Path; use anyhow::Context; use chrono::Utc; use clap::{Args, Parser}; @@ -114,9 +113,6 @@ pub enum DatabaseCommand { #[clap(flatten)] source: MigrationSourceOpt, - #[clap(flatten)] - config: ConfigOpt, - #[clap(flatten)] connect_opts: ConnectOpts, @@ -130,9 +126,6 @@ pub enum DatabaseCommand { #[clap(flatten)] source: MigrationSourceOpt, - #[clap(flatten)] - config: ConfigOpt, - #[clap(flatten)] connect_opts: ConnectOpts, }, @@ -219,9 +212,6 @@ pub enum MigrateCommand { #[clap(flatten)] source: MigrationSourceOpt, - #[clap(flatten)] - config: ConfigOpt, - /// List all the migrations to be run without applying #[clap(long)] dry_run: bool, @@ -243,9 +233,6 @@ pub enum MigrateCommand { #[clap(flatten)] source: MigrationSourceOpt, - #[clap(flatten)] - config: ConfigOpt, - /// List the migration to be reverted without applying #[clap(long)] dry_run: bool, @@ -268,9 +255,6 @@ pub enum MigrateCommand { #[clap(flatten)] source: MigrationSourceOpt, - #[clap(flatten)] - config: ConfigOpt, - #[clap(flatten)] connect_opts: ConnectOpts, }, @@ -282,44 +266,12 @@ pub enum MigrateCommand { #[clap(flatten)] source: MigrationSourceOpt, - #[clap(flatten)] - config: ConfigOpt, - /// Overwrite the build script if it already exists. #[clap(long)] force: bool, }, } -#[derive(Args, Debug)] -pub struct AddMigrationOpts { - pub description: String, - - #[clap(flatten)] - pub source: Source, - - /// If set, create an up-migration only. Conflicts with `--reversible`. - #[clap(long, conflicts_with = "reversible")] - simple: bool, - - /// If set, create a pair of up and down migration files with same version. - /// - /// Conflicts with `--simple`. - #[clap(short, long, conflicts_with = "simple")] - reversible: bool, - - /// If set, use timestamp versioning for the new migration. Conflicts with `--sequential`. - /// - /// Timestamp format: `YYYYMMDDHHMMSS` - #[clap(short, long, conflicts_with = "sequential")] - timestamp: bool, - - /// If set, use sequential versioning for the new migration. Conflicts with `--timestamp`. - #[clap(short, long, conflicts_with = "timestamp")] - sequential: bool, -} - -/// Argument for the migration scripts source. #[derive(Args, Debug)] pub struct AddMigrationOpts { pub description: String, @@ -327,9 +279,6 @@ pub struct AddMigrationOpts { #[clap(flatten)] pub source: MigrationSourceOpt, - #[clap(flatten)] - pub config: ConfigOpt, - /// If set, create an up-migration only. Conflicts with `--reversible`. #[clap(long, conflicts_with = "reversible")] simple: bool, @@ -355,34 +304,20 @@ pub struct AddMigrationOpts { #[derive(Args, Debug)] pub struct MigrationSourceOpt { /// Path to folder containing migrations. - /// + /// /// Defaults to `migrations/` if not specified, but a different default may be set by `sqlx.toml`. #[clap(long)] pub source: Option, } impl MigrationSourceOpt { - pub fn resolve_path<'a>(&'a self, config: &'a Config) -> &'a str { + pub fn resolve<'a>(&'a self, config: &'a Config) -> &'a str { if let Some(source) = &self.source { return source; } - + config.migrate.migrations_dir() } - - pub async fn resolve(&self, config: &Config) -> Result { - Migrator::new(ResolveWith( - self.resolve_path(config), - config.migrate.to_resolve_config(), - )) - .await - } -} - -impl AsRef for Source { - fn as_ref(&self) -> &Path { - Path::new(&self.source) - } } /// Argument for the database URL. diff --git a/sqlx-core/src/config/migrate.rs b/sqlx-core/src/config/migrate.rs index 64529f9f02..666ed5bf92 100644 --- a/sqlx-core/src/config/migrate.rs +++ b/sqlx-core/src/config/migrate.rs @@ -85,7 +85,7 @@ pub struct Config { /// To make your migrations amenable to reformatting, you may wish to tell SQLx to ignore /// _all_ whitespace characters in migrations. /// - /// ##### Warning: Beware Syntatically Significant Whitespace! + /// ##### Warning: Beware Syntactically Significant Whitespace! /// If your migrations use string literals or quoted identifiers which contain whitespace, /// this configuration will cause the migration machinery to ignore some changes to these. /// This may result in a mismatch between the development and production versions of @@ -179,3 +179,16 @@ pub enum DefaultVersioning { /// Use sequential integers for migration versions. Sequential, } + +#[cfg(feature = "migrate")] +impl Config { + pub fn migrations_dir(&self) -> &str { + self.migrations_dir.as_deref().unwrap_or("migrations") + } + + pub fn to_resolve_config(&self) -> crate::migrate::ResolveConfig { + let mut config = crate::migrate::ResolveConfig::new(); + config.ignore_chars(self.ignored_chars.iter().copied()); + config + } +} \ No newline at end of file diff --git a/sqlx-macros-core/src/migrate.rs b/sqlx-macros-core/src/migrate.rs index 0c36bb93e3..fc88da0a7b 100644 --- a/sqlx-macros-core/src/migrate.rs +++ b/sqlx-macros-core/src/migrate.rs @@ -113,8 +113,7 @@ pub fn expand_with_path(config: &Config, path: &Path) -> crate::Result TokenStream { pub fn migrate(input: TokenStream) -> TokenStream { use syn::LitStr; - let input = syn::parse_macro_input!(input as LitStr); + let input = syn::parse_macro_input!(input as Option); match migrate::expand(input) { Ok(ts) => ts.into(), Err(e) => { diff --git a/sqlx-postgres/src/connection/describe.rs b/sqlx-postgres/src/connection/describe.rs index 1b8b18edd8..7cfca80de2 100644 --- a/sqlx-postgres/src/connection/describe.rs +++ b/sqlx-postgres/src/connection/describe.rs @@ -219,7 +219,8 @@ impl PgConnection { should_fetch: bool, ) -> Result { if let Some(origin) = - self.cache_table_to_column_names + self.inner + .cache_table_to_column_names .get(&relation_id) .and_then(|table_columns| { let column_name = table_columns.columns.get(&attribute_no).cloned()?; @@ -255,6 +256,7 @@ impl PgConnection { }; let table_columns = self + .inner .cache_table_to_column_names .entry(relation_id) .or_insert_with(|| TableColumns { From 528a7d40461cb97776fb46fe5db390e444476c14 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Wed, 22 Jan 2025 14:24:18 -0800 Subject: [PATCH 14/78] feat: teach `sqlx-cli` about `migrate.table-name` --- sqlx-cli/src/migrate.rs | 31 ++--------- sqlx-cli/tests/common/mod.rs | 14 +++-- sqlx-core/src/any/migrate.rs | 38 ++------------ sqlx-core/src/config/migrate.rs | 4 ++ sqlx-core/src/migrate/migrate.rs | 15 ++---- sqlx-core/src/migrate/migrator.rs | 6 +-- sqlx-mysql/src/migrate.rs | 85 ++++++++++++------------------- sqlx-postgres/src/migrate.rs | 69 +++++++++---------------- sqlx-sqlite/src/migrate.rs | 76 ++++++++------------------- 9 files changed, 101 insertions(+), 237 deletions(-) diff --git a/sqlx-cli/src/migrate.rs b/sqlx-cli/src/migrate.rs index c124a0bdb2..9e0119682e 100644 --- a/sqlx-cli/src/migrate.rs +++ b/sqlx-cli/src/migrate.rs @@ -130,13 +130,7 @@ pub async fn info(config: &Config, migration_source: &MigrationSourceOpt, connec let migrator = Migrator::new(ResolveWith(Path::new(source), config.migrate.to_resolve_config())).await?; let mut conn = crate::connect(connect_opts).await?; - // FIXME: we shouldn't actually be creating anything here - for schema_name in &config.migrate.create_schemas { - conn.create_schema_if_not_exists(schema_name).await?; - } - - conn.ensure_migrations_table(config.migrate.table_name()) - .await?; + conn.ensure_migrations_table(config.migrate.table_name()).await?; let applied_migrations: HashMap<_, _> = conn .list_applied_migrations(config.migrate.table_name()) @@ -230,21 +224,14 @@ pub async fn run( let mut conn = crate::connect(connect_opts).await?; - for schema_name in &config.migrate.create_schemas { - conn.create_schema_if_not_exists(schema_name).await?; - } - - conn.ensure_migrations_table(config.migrate.table_name()) - .await?; + conn.ensure_migrations_table(config.migrate.table_name()).await?; let version = conn.dirty_version(config.migrate.table_name()).await?; if let Some(version) = version { bail!(MigrateError::Dirty(version)); } - let applied_migrations = conn - .list_applied_migrations(config.migrate.table_name()) - .await?; + let applied_migrations = conn.list_applied_migrations(config.migrate.table_name()).await?; validate_applied_migrations(&applied_migrations, &migrator, ignore_missing)?; let latest_version = applied_migrations @@ -332,22 +319,14 @@ pub async fn revert( let mut conn = crate::connect(connect_opts).await?; - // FIXME: we should not be creating anything here if it doesn't exist - for schema_name in &config.migrate.create_schemas { - conn.create_schema_if_not_exists(schema_name).await?; - } - - conn.ensure_migrations_table(config.migrate.table_name()) - .await?; + conn.ensure_migrations_table(config.migrate.table_name()).await?; let version = conn.dirty_version(config.migrate.table_name()).await?; if let Some(version) = version { bail!(MigrateError::Dirty(version)); } - let applied_migrations = conn - .list_applied_migrations(config.migrate.table_name()) - .await?; + let applied_migrations = conn.list_applied_migrations(config.migrate.table_name()).await?; validate_applied_migrations(&applied_migrations, &migrator, ignore_missing)?; let latest_version = applied_migrations diff --git a/sqlx-cli/tests/common/mod.rs b/sqlx-cli/tests/common/mod.rs index 66e7924859..0514ca721f 100644 --- a/sqlx-cli/tests/common/mod.rs +++ b/sqlx-cli/tests/common/mod.rs @@ -6,11 +6,12 @@ use std::{ env, fs, path::{Path, PathBuf}, }; +use sqlx::_unstable::config::Config; pub struct TestDatabase { file_path: PathBuf, - migrations_path: PathBuf, - pub config_path: Option, + migrations: String, + config: &'static Config, } impl TestDatabase { @@ -32,8 +33,8 @@ impl TestDatabase { let this = Self { file_path, - migrations_path: Path::new("tests").join(migrations), - config_path: None, + migrations: String::from(migrations_path.to_str().unwrap()), + config: Config::from_crate(), }; Command::cargo_bin("cargo-sqlx") @@ -92,10 +93,7 @@ impl TestDatabase { let mut conn = SqliteConnection::connect(&self.connection_string()) .await .unwrap(); - - let config = Config::default(); - - conn.list_applied_migrations(config.migrate.table_name()) + conn.list_applied_migrations(self.config.migrate.table_name()) .await .unwrap() .iter() diff --git a/sqlx-core/src/any/migrate.rs b/sqlx-core/src/any/migrate.rs index 69b5bf6ab6..b287ec45e5 100644 --- a/sqlx-core/src/any/migrate.rs +++ b/sqlx-core/src/any/migrate.rs @@ -44,44 +44,16 @@ impl MigrateDatabase for Any { } impl Migrate for AnyConnection { - fn create_schema_if_not_exists<'e>( - &'e mut self, - schema_name: &'e str, - ) -> BoxFuture<'e, Result<(), MigrateError>> { - Box::pin(async { - self.get_migrate()? - .create_schema_if_not_exists(schema_name) - .await - }) - } - - fn ensure_migrations_table<'e>( - &'e mut self, - table_name: &'e str, - ) -> BoxFuture<'e, Result<(), MigrateError>> { - Box::pin(async { - self.get_migrate()? - .ensure_migrations_table(table_name) - .await - }) + fn ensure_migrations_table<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result<(), MigrateError>> { + Box::pin(async { self.get_migrate()?.ensure_migrations_table(table_name).await }) } - fn dirty_version<'e>( - &'e mut self, - table_name: &'e str, - ) -> BoxFuture<'e, Result, MigrateError>> { + fn dirty_version<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>> { Box::pin(async { self.get_migrate()?.dirty_version(table_name).await }) } - fn list_applied_migrations<'e>( - &'e mut self, - table_name: &'e str, - ) -> BoxFuture<'e, Result, MigrateError>> { - Box::pin(async { - self.get_migrate()? - .list_applied_migrations(table_name) - .await - }) + fn list_applied_migrations<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>> { + Box::pin(async { self.get_migrate()?.list_applied_migrations(table_name).await }) } fn lock(&mut self) -> BoxFuture<'_, Result<(), MigrateError>> { diff --git a/sqlx-core/src/config/migrate.rs b/sqlx-core/src/config/migrate.rs index 666ed5bf92..a70938b209 100644 --- a/sqlx-core/src/config/migrate.rs +++ b/sqlx-core/src/config/migrate.rs @@ -186,6 +186,10 @@ impl Config { self.migrations_dir.as_deref().unwrap_or("migrations") } + pub fn table_name(&self) -> &str { + self.table_name.as_deref().unwrap_or("_sqlx_migrations") + } + pub fn to_resolve_config(&self) -> crate::migrate::ResolveConfig { let mut config = crate::migrate::ResolveConfig::new(); config.ignore_chars(self.ignored_chars.iter().copied()); diff --git a/sqlx-core/src/migrate/migrate.rs b/sqlx-core/src/migrate/migrate.rs index 841f775966..b2c36bc1d0 100644 --- a/sqlx-core/src/migrate/migrate.rs +++ b/sqlx-core/src/migrate/migrate.rs @@ -33,23 +33,14 @@ pub trait Migrate { // ensure migrations table exists // will create or migrate it if needed - fn ensure_migrations_table<'e>( - &'e mut self, - table_name: &'e str, - ) -> BoxFuture<'e, Result<(), MigrateError>>; + fn ensure_migrations_table<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result<(), MigrateError>>; // Return the version on which the database is dirty or None otherwise. // "dirty" means there is a partially applied migration that failed. - fn dirty_version<'e>( - &'e mut self, - table_name: &'e str, - ) -> BoxFuture<'e, Result, MigrateError>>; + fn dirty_version<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>>; // Return the ordered list of applied migrations - fn list_applied_migrations<'e>( - &'e mut self, - table_name: &'e str, - ) -> BoxFuture<'e, Result, MigrateError>>; + fn list_applied_migrations<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>>; // Should acquire a database lock so that only one migration process // can run at a time. [`Migrate`] will call this function before applying diff --git a/sqlx-core/src/migrate/migrator.rs b/sqlx-core/src/migrate/migrator.rs index 554073b918..bdb18aa6de 100644 --- a/sqlx-core/src/migrate/migrator.rs +++ b/sqlx-core/src/migrate/migrator.rs @@ -27,10 +27,6 @@ pub struct Migrator { pub table_name: Cow<'static, str>, } - #[doc(hidden)] - pub create_schemas: Cow<'static, [Cow<'static, str>]>, -} - impl Migrator { #[doc(hidden)] pub const DEFAULT: Migrator = Migrator { @@ -190,7 +186,7 @@ impl Migrator { // Target version reached break; } - + if migration.migration_type.is_down_migration() { continue; } diff --git a/sqlx-mysql/src/migrate.rs b/sqlx-mysql/src/migrate.rs index 45ca7d98ef..83e823dcf1 100644 --- a/sqlx-mysql/src/migrate.rs +++ b/sqlx-mysql/src/migrate.rs @@ -2,6 +2,8 @@ use std::str::FromStr; use std::time::Duration; use std::time::Instant; +use futures_core::future::BoxFuture; +pub(crate) use sqlx_core::migrate::*; use crate::connection::{ConnectOptions, Connection}; use crate::error::Error; use crate::executor::Executor; @@ -74,27 +76,11 @@ impl MigrateDatabase for MySql { } impl Migrate for MySqlConnection { - fn create_schema_if_not_exists<'e>( - &'e mut self, - schema_name: &'e str, - ) -> BoxFuture<'e, Result<(), MigrateError>> { - Box::pin(async move { - // language=SQL - self.execute(&*format!(r#"CREATE SCHEMA IF NOT EXISTS {schema_name};"#)) - .await?; - - Ok(()) - }) - } - - fn ensure_migrations_table<'e>( - &'e mut self, - table_name: &'e str, - ) -> BoxFuture<'e, Result<(), MigrateError>> { + fn ensure_migrations_table<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result<(), MigrateError>> { Box::pin(async move { // language=MySQL - self.execute(&*format!( - r#" + self.execute( + &*format!(r#" CREATE TABLE IF NOT EXISTS {table_name} ( version BIGINT PRIMARY KEY, description TEXT NOT NULL, @@ -103,23 +89,20 @@ CREATE TABLE IF NOT EXISTS {table_name} ( checksum BLOB NOT NULL, execution_time BIGINT NOT NULL ); - "# - )) + "#), + ) .await?; Ok(()) }) } - fn dirty_version<'e>( - &'e mut self, - table_name: &'e str, - ) -> BoxFuture<'e, Result, MigrateError>> { + fn dirty_version<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>> { Box::pin(async move { // language=SQL - let row: Option<(i64,)> = query_as(&format!( - "SELECT version FROM {table_name} WHERE success = false ORDER BY version LIMIT 1" - )) + let row: Option<(i64,)> = query_as( + &format!("SELECT version FROM {table_name} WHERE success = false ORDER BY version LIMIT 1"), + ) .fetch_optional(self) .await?; @@ -127,17 +110,13 @@ CREATE TABLE IF NOT EXISTS {table_name} ( }) } - fn list_applied_migrations<'e>( - &'e mut self, - table_name: &'e str, - ) -> BoxFuture<'e, Result, MigrateError>> { + fn list_applied_migrations<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>> { Box::pin(async move { // language=SQL - let rows: Vec<(i64, Vec)> = query_as(&format!( - "SELECT version, checksum FROM {table_name} ORDER BY version" - )) - .fetch_all(self) - .await?; + let rows: Vec<(i64, Vec)> = + query_as(&format!("SELECT version, checksum FROM {table_name} ORDER BY version")) + .fetch_all(self) + .await?; let migrations = rows .into_iter() @@ -208,12 +187,12 @@ CREATE TABLE IF NOT EXISTS {table_name} ( // `success=FALSE` and later modify the flag. // // language=MySQL - let _ = query(&format!( - r#" + let _ = query( + &format!(r#" INSERT INTO {table_name} ( version, description, success, checksum, execution_time ) VALUES ( ?, ?, FALSE, ?, -1 ) - "# - )) + "#), + ) .bind(migration.version) .bind(&*migration.description) .bind(&*migration.checksum) @@ -226,13 +205,13 @@ CREATE TABLE IF NOT EXISTS {table_name} ( .map_err(|e| MigrateError::ExecuteMigration(e, migration.version))?; // language=MySQL - let _ = query(&format!( - r#" + let _ = query( + &format!(r#" UPDATE {table_name} SET success = TRUE WHERE version = ? - "# - )) + "#), + ) .bind(migration.version) .execute(&mut *tx) .await?; @@ -246,13 +225,13 @@ CREATE TABLE IF NOT EXISTS {table_name} ( let elapsed = start.elapsed(); #[allow(clippy::cast_possible_truncation)] - let _ = query(&format!( - r#" + let _ = query( + &format!(r#" UPDATE {table_name} SET execution_time = ? WHERE version = ? - "# - )) + "#), + ) .bind(elapsed.as_nanos() as i64) .bind(migration.version) .execute(self) @@ -280,13 +259,13 @@ CREATE TABLE IF NOT EXISTS {table_name} ( // `success=FALSE` and later remove the migration altogether. // // language=MySQL - let _ = query(&format!( - r#" + let _ = query( + &format!(r#" UPDATE {table_name} SET success = FALSE WHERE version = ? - "# - )) + "#), + ) .bind(migration.version) .execute(&mut *tx) .await?; diff --git a/sqlx-postgres/src/migrate.rs b/sqlx-postgres/src/migrate.rs index 8275bda188..2646466399 100644 --- a/sqlx-postgres/src/migrate.rs +++ b/sqlx-postgres/src/migrate.rs @@ -111,27 +111,11 @@ impl MigrateDatabase for Postgres { } impl Migrate for PgConnection { - fn create_schema_if_not_exists<'e>( - &'e mut self, - schema_name: &'e str, - ) -> BoxFuture<'e, Result<(), MigrateError>> { - Box::pin(async move { - // language=SQL - self.execute(&*format!(r#"CREATE SCHEMA IF NOT EXISTS {schema_name};"#)) - .await?; - - Ok(()) - }) - } - - fn ensure_migrations_table<'e>( - &'e mut self, - table_name: &'e str, - ) -> BoxFuture<'e, Result<(), MigrateError>> { + fn ensure_migrations_table<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result<(), MigrateError>> { Box::pin(async move { // language=SQL - self.execute(&*format!( - r#" + self.execute( + &*format!(r#" CREATE TABLE IF NOT EXISTS {table_name} ( version BIGINT PRIMARY KEY, description TEXT NOT NULL, @@ -140,23 +124,20 @@ CREATE TABLE IF NOT EXISTS {table_name} ( checksum BYTEA NOT NULL, execution_time BIGINT NOT NULL ); - "# - )) + "#), + ) .await?; Ok(()) }) } - fn dirty_version<'e>( - &'e mut self, - table_name: &'e str, - ) -> BoxFuture<'e, Result, MigrateError>> { + fn dirty_version<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>> { Box::pin(async move { // language=SQL - let row: Option<(i64,)> = query_as(&format!( - "SELECT version FROM {table_name} WHERE success = false ORDER BY version LIMIT 1" - )) + let row: Option<(i64,)> = query_as( + &*format!("SELECT version FROM {table_name} WHERE success = false ORDER BY version LIMIT 1"), + ) .fetch_optional(self) .await?; @@ -164,17 +145,13 @@ CREATE TABLE IF NOT EXISTS {table_name} ( }) } - fn list_applied_migrations<'e>( - &'e mut self, - table_name: &'e str, - ) -> BoxFuture<'e, Result, MigrateError>> { + fn list_applied_migrations<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>> { Box::pin(async move { // language=SQL - let rows: Vec<(i64, Vec)> = query_as(&format!( - "SELECT version, checksum FROM {table_name} ORDER BY version" - )) - .fetch_all(self) - .await?; + let rows: Vec<(i64, Vec)> = + query_as(&*format!("SELECT version, checksum FROM {table_name} ORDER BY version")) + .fetch_all(self) + .await?; let migrations = rows .into_iter() @@ -253,13 +230,13 @@ CREATE TABLE IF NOT EXISTS {table_name} ( // language=SQL #[allow(clippy::cast_possible_truncation)] - let _ = query(&format!( - r#" + let _ = query( + &*format!(r#" UPDATE {table_name} SET execution_time = $1 WHERE version = $2 - "# - )) + "#), + ) .bind(elapsed.as_nanos() as i64) .bind(migration.version) .execute(self) @@ -306,12 +283,12 @@ async fn execute_migration( .map_err(|e| MigrateError::ExecuteMigration(e, migration.version))?; // language=SQL - let _ = query(&format!( - r#" + let _ = query( + &*format!(r#" INSERT INTO {table_name} ( version, description, success, checksum, execution_time ) VALUES ( $1, $2, TRUE, $3, -1 ) - "# - )) + "#), + ) .bind(migration.version) .bind(&*migration.description) .bind(&*migration.checksum) @@ -332,7 +309,7 @@ async fn revert_migration( .map_err(|e| MigrateError::ExecuteMigration(e, migration.version))?; // language=SQL - let _ = query(&format!(r#"DELETE FROM {table_name} WHERE version = $1"#)) + let _ = query(&*format!(r#"DELETE FROM {table_name} WHERE version = $1"#)) .bind(migration.version) .execute(conn) .await?; diff --git a/sqlx-sqlite/src/migrate.rs b/sqlx-sqlite/src/migrate.rs index e475f70308..bd339c7b51 100644 --- a/sqlx-sqlite/src/migrate.rs +++ b/sqlx-sqlite/src/migrate.rs @@ -65,35 +65,10 @@ impl MigrateDatabase for Sqlite { } impl Migrate for SqliteConnection { - fn create_schema_if_not_exists<'e>( - &'e mut self, - schema_name: &'e str, - ) -> BoxFuture<'e, Result<(), MigrateError>> { - Box::pin(async move { - // Check if the schema already exists; if so, don't error. - let schema_version: Option = - query_scalar(&format!("PRAGMA {schema_name}.schema_version")) - .fetch_optional(&mut *self) - .await?; - - if schema_version.is_some() { - return Ok(()); - } - - Err(MigrateError::CreateSchemasNotSupported( - format!("cannot create new schema {schema_name}; creation of additional schemas in SQLite requires attaching extra database files"), - )) - }) - } - - fn ensure_migrations_table<'e>( - &'e mut self, - table_name: &'e str, - ) -> BoxFuture<'e, Result<(), MigrateError>> { + fn ensure_migrations_table<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result<(), MigrateError>> { Box::pin(async move { // language=SQLite - self.execute(&*format!( - r#" + self.execute(&*format!(r#" CREATE TABLE IF NOT EXISTS {table_name} ( version BIGINT PRIMARY KEY, description TEXT NOT NULL, @@ -102,23 +77,20 @@ CREATE TABLE IF NOT EXISTS {table_name} ( checksum BLOB NOT NULL, execution_time BIGINT NOT NULL ); - "# - )) - .await?; + "#), + ) + .await?; Ok(()) }) } - fn dirty_version<'e>( - &'e mut self, - table_name: &'e str, - ) -> BoxFuture<'e, Result, MigrateError>> { + fn dirty_version<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>> { Box::pin(async move { // language=SQLite - let row: Option<(i64,)> = query_as(&format!( - "SELECT version FROM {table_name} WHERE success = false ORDER BY version LIMIT 1" - )) + let row: Option<(i64,)> = query_as( + &format!("SELECT version FROM {table_name} WHERE success = false ORDER BY version LIMIT 1"), + ) .fetch_optional(self) .await?; @@ -126,17 +98,13 @@ CREATE TABLE IF NOT EXISTS {table_name} ( }) } - fn list_applied_migrations<'e>( - &'e mut self, - table_name: &'e str, - ) -> BoxFuture<'e, Result, MigrateError>> { + fn list_applied_migrations<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>> { Box::pin(async move { // language=SQLite - let rows: Vec<(i64, Vec)> = query_as(&format!( - "SELECT version, checksum FROM {table_name} ORDER BY version" - )) - .fetch_all(self) - .await?; + let rows: Vec<(i64, Vec)> = + query_as(&format!("SELECT version, checksum FROM {table_name} ORDER BY version")) + .fetch_all(self) + .await?; let migrations = rows .into_iter() @@ -178,12 +146,12 @@ CREATE TABLE IF NOT EXISTS {table_name} ( .map_err(|e| MigrateError::ExecuteMigration(e, migration.version))?; // language=SQL - let _ = query(&format!( - r#" + let _ = query( + &format!(r#" INSERT INTO {table_name} ( version, description, success, checksum, execution_time ) VALUES ( ?1, ?2, TRUE, ?3, -1 ) - "# - )) + "#), + ) .bind(migration.version) .bind(&*migration.description) .bind(&*migration.checksum) @@ -200,13 +168,13 @@ CREATE TABLE IF NOT EXISTS {table_name} ( // language=SQL #[allow(clippy::cast_possible_truncation)] - let _ = query(&format!( - r#" + let _ = query( + &format!(r#" UPDATE {table_name} SET execution_time = ?1 WHERE version = ?2 - "# - )) + "#), + ) .bind(elapsed.as_nanos() as i64) .bind(migration.version) .execute(self) From 2ec36988126d4e7b53c1b0fd34f58cf67dadcee6 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Wed, 22 Jan 2025 15:32:50 -0800 Subject: [PATCH 15/78] feat: introduce `migrate.create-schemas` --- sqlx-cli/src/database.rs | 8 ++- sqlx-cli/src/lib.rs | 24 ++++--- sqlx-cli/src/migrate.rs | 68 +++++++++++++----- sqlx-cli/src/opt.rs | 67 +++++++++--------- sqlx-cli/tests/common/mod.rs | 1 - sqlx-core/src/any/migrate.rs | 38 ++++++++-- sqlx-core/src/config/migrate.rs | 20 +++++- sqlx-core/src/migrate/migrate.rs | 15 +++- sqlx-core/src/migrate/migrator.rs | 19 ++++- sqlx-macros-core/src/migrate.rs | 35 +++++----- sqlx-macros-core/src/test_attr.rs | 3 +- sqlx-mysql/src/migrate.rs | 85 ++++++++++++++--------- sqlx-postgres/src/connection/describe.rs | 24 +++---- sqlx-postgres/src/connection/establish.rs | 3 +- sqlx-postgres/src/migrate.rs | 67 ++++++++++++------ sqlx-sqlite/src/migrate.rs | 76 ++++++++++++++------ 16 files changed, 366 insertions(+), 187 deletions(-) diff --git a/sqlx-cli/src/database.rs b/sqlx-cli/src/database.rs index 2f5f00b004..bcfb911692 100644 --- a/sqlx-cli/src/database.rs +++ b/sqlx-cli/src/database.rs @@ -1,5 +1,5 @@ -use crate::{migrate, Config}; use crate::opt::{ConnectOpts, MigrationSourceOpt}; +use crate::{migrate, Config}; use console::style; use promptly::{prompt, ReadlineError}; use sqlx::any::Any; @@ -56,7 +56,11 @@ pub async fn reset( setup(config, migration_source, connect_opts).await } -pub async fn setup(config: &Config, migration_source: &MigrationSourceOpt, connect_opts: &ConnectOpts) -> anyhow::Result<()> { +pub async fn setup( + config: &Config, + migration_source: &MigrationSourceOpt, + connect_opts: &ConnectOpts, +) -> anyhow::Result<()> { create(connect_opts).await?; migrate::run(config, migration_source, connect_opts, false, false, None).await } diff --git a/sqlx-cli/src/lib.rs b/sqlx-cli/src/lib.rs index 90b5440a9f..987b956d9d 100644 --- a/sqlx-cli/src/lib.rs +++ b/sqlx-cli/src/lib.rs @@ -1,6 +1,6 @@ use std::future::Future; use std::io; -use std::path::{PathBuf}; +use std::path::PathBuf; use std::time::Duration; use anyhow::{Context, Result}; @@ -30,7 +30,7 @@ pub async fn run(opt: Opt) -> Result<()> { match opt.command { Command::Migrate(migrate) => match migrate.command { - MigrateCommand::Add(opts)=> migrate::add(config, opts).await?, + MigrateCommand::Add(opts) => migrate::add(config, opts).await?, MigrateCommand::Run { source, config, @@ -78,15 +78,17 @@ pub async fn run(opt: Opt) -> Result<()> { connect_opts.populate_db_url(config)?; migrate::info(config, &source, &connect_opts).await? - }, - MigrateCommand::BuildScript { source, force } => migrate::build_script(config, &source, force)?, + } + MigrateCommand::BuildScript { source, force } => { + migrate::build_script(config, &source, force)? + } }, Command::Database(database) => match database.command { DatabaseCommand::Create { mut connect_opts } => { connect_opts.populate_db_url(config)?; database::create(&connect_opts).await? - }, + } DatabaseCommand::Drop { confirmation, mut connect_opts, @@ -94,7 +96,7 @@ pub async fn run(opt: Opt) -> Result<()> { } => { connect_opts.populate_db_url(config)?; database::drop(&connect_opts, !confirmation.yes, force).await? - }, + } DatabaseCommand::Reset { confirmation, source, @@ -103,14 +105,14 @@ pub async fn run(opt: Opt) -> Result<()> { } => { connect_opts.populate_db_url(config)?; database::reset(config, &source, &connect_opts, !confirmation.yes, force).await? - }, + } DatabaseCommand::Setup { source, mut connect_opts, } => { connect_opts.populate_db_url(config)?; database::setup(config, &source, &connect_opts).await? - }, + } }, Command::Prepare { @@ -122,7 +124,7 @@ pub async fn run(opt: Opt) -> Result<()> { } => { connect_opts.populate_db_url(config)?; prepare::run(check, all, workspace, connect_opts, args).await? - }, + } #[cfg(feature = "completions")] Command::Completions { shell } => completions::run(shell), @@ -187,6 +189,6 @@ async fn config_from_current_dir() -> anyhow::Result<&'static Config> { Config::read_with_or_default(move || Ok(path)) }) - .await - .context("unexpected error loading config") + .await + .context("unexpected error loading config") } diff --git a/sqlx-cli/src/migrate.rs b/sqlx-cli/src/migrate.rs index 9e0119682e..3618fbe7a3 100644 --- a/sqlx-cli/src/migrate.rs +++ b/sqlx-cli/src/migrate.rs @@ -1,7 +1,10 @@ +use crate::config::Config; use crate::opt::{AddMigrationOpts, ConnectOpts, MigrationSourceOpt}; use anyhow::{bail, Context}; use console::style; -use sqlx::migrate::{AppliedMigration, Migrate, MigrateError, MigrationType, Migrator, ResolveWith}; +use sqlx::migrate::{ + AppliedMigration, Migrate, MigrateError, MigrationType, Migrator, ResolveWith, +}; use sqlx::Connection; use std::borrow::Cow; use std::collections::{HashMap, HashSet}; @@ -9,14 +12,10 @@ use std::fmt::Write; use std::fs::{self, File}; use std::path::Path; use std::time::Duration; -use crate::config::Config; -pub async fn add( - config: &Config, - opts: AddMigrationOpts, -) -> anyhow::Result<()> { +pub async fn add(config: &Config, opts: AddMigrationOpts) -> anyhow::Result<()> { let source = opts.source.resolve(config); - + fs::create_dir_all(source).context("Unable to create migrations directory")?; let migrator = Migrator::new(Path::new(source)).await?; @@ -124,13 +123,27 @@ fn short_checksum(checksum: &[u8]) -> String { s } -pub async fn info(config: &Config, migration_source: &MigrationSourceOpt, connect_opts: &ConnectOpts) -> anyhow::Result<()> { +pub async fn info( + config: &Config, + migration_source: &MigrationSourceOpt, + connect_opts: &ConnectOpts, +) -> anyhow::Result<()> { let source = migration_source.resolve(config); - - let migrator = Migrator::new(ResolveWith(Path::new(source), config.migrate.to_resolve_config())).await?; + + let migrator = Migrator::new(ResolveWith( + Path::new(source), + config.migrate.to_resolve_config(), + )) + .await?; let mut conn = crate::connect(connect_opts).await?; - conn.ensure_migrations_table(config.migrate.table_name()).await?; + // FIXME: we shouldn't actually be creating anything here + for schema_name in &config.migrate.create_schemas { + conn.create_schema_if_not_exists(schema_name).await?; + } + + conn.ensure_migrations_table(config.migrate.table_name()) + .await?; let applied_migrations: HashMap<_, _> = conn .list_applied_migrations(config.migrate.table_name()) @@ -214,7 +227,7 @@ pub async fn run( target_version: Option, ) -> anyhow::Result<()> { let source = migration_source.resolve(config); - + let migrator = Migrator::new(Path::new(source)).await?; if let Some(target_version) = target_version { if !migrator.version_exists(target_version) { @@ -224,14 +237,21 @@ pub async fn run( let mut conn = crate::connect(connect_opts).await?; - conn.ensure_migrations_table(config.migrate.table_name()).await?; + for schema_name in &config.migrate.create_schemas { + conn.create_schema_if_not_exists(schema_name).await?; + } + + conn.ensure_migrations_table(config.migrate.table_name()) + .await?; let version = conn.dirty_version(config.migrate.table_name()).await?; if let Some(version) = version { bail!(MigrateError::Dirty(version)); } - let applied_migrations = conn.list_applied_migrations(config.migrate.table_name()).await?; + let applied_migrations = conn + .list_applied_migrations(config.migrate.table_name()) + .await?; validate_applied_migrations(&applied_migrations, &migrator, ignore_missing)?; let latest_version = applied_migrations @@ -319,14 +339,22 @@ pub async fn revert( let mut conn = crate::connect(connect_opts).await?; - conn.ensure_migrations_table(config.migrate.table_name()).await?; + // FIXME: we should not be creating anything here if it doesn't exist + for schema_name in &config.migrate.create_schemas { + conn.create_schema_if_not_exists(schema_name).await?; + } + + conn.ensure_migrations_table(config.migrate.table_name()) + .await?; let version = conn.dirty_version(config.migrate.table_name()).await?; if let Some(version) = version { bail!(MigrateError::Dirty(version)); } - let applied_migrations = conn.list_applied_migrations(config.migrate.table_name()).await?; + let applied_migrations = conn + .list_applied_migrations(config.migrate.table_name()) + .await?; validate_applied_migrations(&applied_migrations, &migrator, ignore_missing)?; let latest_version = applied_migrations @@ -397,9 +425,13 @@ pub async fn revert( Ok(()) } -pub fn build_script(config: &Config, migration_source: &MigrationSourceOpt, force: bool) -> anyhow::Result<()> { +pub fn build_script( + config: &Config, + migration_source: &MigrationSourceOpt, + force: bool, +) -> anyhow::Result<()> { let source = migration_source.resolve(config); - + anyhow::ensure!( Path::new("Cargo.toml").exists(), "must be run in a Cargo project root" diff --git a/sqlx-cli/src/opt.rs b/sqlx-cli/src/opt.rs index 6ea009b281..e3211c72b6 100644 --- a/sqlx-cli/src/opt.rs +++ b/sqlx-cli/src/opt.rs @@ -1,13 +1,13 @@ -use std::env; -use std::ops::{Deref, Not}; +use crate::config::migrate::{DefaultMigrationType, DefaultVersioning}; +use crate::config::Config; use anyhow::Context; use chrono::Utc; use clap::{Args, Parser}; #[cfg(feature = "completions")] use clap_complete::Shell; -use crate::config::Config; use sqlx::migrate::Migrator; -use crate::config::migrate::{DefaultMigrationType, DefaultVersioning}; +use std::env; +use std::ops::{Deref, Not}; #[derive(Parser, Debug)] #[clap(version, about, author, styles = HELP_STYLES)] @@ -143,7 +143,7 @@ pub enum MigrateCommand { /// Create a new migration with the given description. /// /// -------------------------------- - /// + /// /// Migrations may either be simple, or reversible. /// /// Reversible migrations can be reverted with `sqlx migrate revert`, simple migrations cannot. @@ -166,7 +166,7 @@ pub enum MigrateCommand { /// It is recommended to always back up the database before running migrations. /// /// -------------------------------- - /// + /// /// For convenience, this command attempts to detect if reversible migrations are in-use. /// /// If the latest existing migration is reversible, the new migration will also be reversible. @@ -178,7 +178,7 @@ pub enum MigrateCommand { /// The default type to use can also be set in `sqlx.toml`. /// /// -------------------------------- - /// + /// /// A version number will be automatically assigned to the migration. /// /// Migrations are applied in ascending order by version number. @@ -188,9 +188,9 @@ pub enum MigrateCommand { /// less than _any_ previously applied migration. /// /// Migrations should only be created with increasing version number. - /// + /// /// -------------------------------- - /// + /// /// For convenience, this command will attempt to detect if sequential versioning is in use, /// and if so, continue the sequence. /// @@ -304,7 +304,7 @@ pub struct AddMigrationOpts { #[derive(Args, Debug)] pub struct MigrationSourceOpt { /// Path to folder containing migrations. - /// + /// /// Defaults to `migrations/` if not specified, but a different default may be set by `sqlx.toml`. #[clap(long)] pub source: Option, @@ -315,7 +315,7 @@ impl MigrationSourceOpt { if let Some(source) = &self.source { return source; } - + config.migrate.migrations_dir() } } @@ -376,7 +376,9 @@ impl ConnectOpts { /// Require a database URL to be provided, otherwise /// return an error. pub fn expect_db_url(&self) -> anyhow::Result<&str> { - self.database_url.as_deref().context("BUG: database_url not populated") + self.database_url + .as_deref() + .context("BUG: database_url not populated") } /// Populate `database_url` from the environment, if not set. @@ -400,7 +402,7 @@ impl ConnectOpts { } self.database_url = Some(url) - }, + } Err(env::VarError::NotPresent) => { anyhow::bail!("`--database-url` or `{var}`{context} must be set") } @@ -448,22 +450,20 @@ impl Not for IgnoreMissing { impl AddMigrationOpts { pub fn reversible(&self, config: &Config, migrator: &Migrator) -> bool { - if self.reversible { return true; } - if self.simple { return false; } + if self.reversible { + return true; + } + if self.simple { + return false; + } match config.migrate.defaults.migration_type { - DefaultMigrationType::Inferred => { - migrator - .iter() - .last() - .is_some_and(|m| m.migration_type.is_reversible()) - } - DefaultMigrationType::Simple => { - false - } - DefaultMigrationType::Reversible => { - true - } + DefaultMigrationType::Inferred => migrator + .iter() + .last() + .is_some_and(|m| m.migration_type.is_reversible()), + DefaultMigrationType::Simple => false, + DefaultMigrationType::Reversible => true, } } @@ -475,8 +475,7 @@ impl AddMigrationOpts { } if self.sequential || matches!(default_versioning, DefaultVersioning::Sequential) { - return next_sequential(migrator) - .unwrap_or_else(|| fmt_sequential(1)); + return next_sequential(migrator).unwrap_or_else(|| fmt_sequential(1)); } next_sequential(migrator).unwrap_or_else(next_timestamp) @@ -496,18 +495,16 @@ fn next_sequential(migrator: &Migrator) -> Option { match migrations { [previous, latest] => { // If the latest two versions differ by 1, infer sequential. - (latest.version - previous.version == 1) - .then_some(latest.version + 1) - }, + (latest.version - previous.version == 1).then_some(latest.version + 1) + } [latest] => { // If only one migration exists and its version is 0 or 1, infer sequential - matches!(latest.version, 0 | 1) - .then_some(latest.version + 1) + matches!(latest.version, 0 | 1).then_some(latest.version + 1) } _ => unreachable!(), } }); - + next_version.map(fmt_sequential) } diff --git a/sqlx-cli/tests/common/mod.rs b/sqlx-cli/tests/common/mod.rs index 0514ca721f..b4a70b7bec 100644 --- a/sqlx-cli/tests/common/mod.rs +++ b/sqlx-cli/tests/common/mod.rs @@ -6,7 +6,6 @@ use std::{ env, fs, path::{Path, PathBuf}, }; -use sqlx::_unstable::config::Config; pub struct TestDatabase { file_path: PathBuf, diff --git a/sqlx-core/src/any/migrate.rs b/sqlx-core/src/any/migrate.rs index b287ec45e5..69b5bf6ab6 100644 --- a/sqlx-core/src/any/migrate.rs +++ b/sqlx-core/src/any/migrate.rs @@ -44,16 +44,44 @@ impl MigrateDatabase for Any { } impl Migrate for AnyConnection { - fn ensure_migrations_table<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result<(), MigrateError>> { - Box::pin(async { self.get_migrate()?.ensure_migrations_table(table_name).await }) + fn create_schema_if_not_exists<'e>( + &'e mut self, + schema_name: &'e str, + ) -> BoxFuture<'e, Result<(), MigrateError>> { + Box::pin(async { + self.get_migrate()? + .create_schema_if_not_exists(schema_name) + .await + }) + } + + fn ensure_migrations_table<'e>( + &'e mut self, + table_name: &'e str, + ) -> BoxFuture<'e, Result<(), MigrateError>> { + Box::pin(async { + self.get_migrate()? + .ensure_migrations_table(table_name) + .await + }) } - fn dirty_version<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>> { + fn dirty_version<'e>( + &'e mut self, + table_name: &'e str, + ) -> BoxFuture<'e, Result, MigrateError>> { Box::pin(async { self.get_migrate()?.dirty_version(table_name).await }) } - fn list_applied_migrations<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>> { - Box::pin(async { self.get_migrate()?.list_applied_migrations(table_name).await }) + fn list_applied_migrations<'e>( + &'e mut self, + table_name: &'e str, + ) -> BoxFuture<'e, Result, MigrateError>> { + Box::pin(async { + self.get_migrate()? + .list_applied_migrations(table_name) + .await + }) } fn lock(&mut self) -> BoxFuture<'_, Result<(), MigrateError>> { diff --git a/sqlx-core/src/config/migrate.rs b/sqlx-core/src/config/migrate.rs index a70938b209..4865e24c76 100644 --- a/sqlx-core/src/config/migrate.rs +++ b/sqlx-core/src/config/migrate.rs @@ -19,6 +19,20 @@ use std::collections::BTreeSet; serde(default, rename_all = "kebab-case") )] pub struct Config { + /// Specify the names of schemas to create if they don't already exist. + /// + /// This is done before checking the existence of the migrations table + /// (`_sqlx_migrations` or overridden `table_name` below) so that it may be placed in + /// one of these schemas. + /// + /// ### Example + /// `sqlx.toml`: + /// ```toml + /// [migrate] + /// create-schemas = ["foo"] + /// ``` + pub create_schemas: BTreeSet>, + /// Override the name of the table used to track executed migrations. /// /// May be schema-qualified and/or contain quotes. Defaults to `_sqlx_migrations`. @@ -185,14 +199,14 @@ impl Config { pub fn migrations_dir(&self) -> &str { self.migrations_dir.as_deref().unwrap_or("migrations") } - + pub fn table_name(&self) -> &str { self.table_name.as_deref().unwrap_or("_sqlx_migrations") } - + pub fn to_resolve_config(&self) -> crate::migrate::ResolveConfig { let mut config = crate::migrate::ResolveConfig::new(); config.ignore_chars(self.ignored_chars.iter().copied()); config } -} \ No newline at end of file +} diff --git a/sqlx-core/src/migrate/migrate.rs b/sqlx-core/src/migrate/migrate.rs index b2c36bc1d0..841f775966 100644 --- a/sqlx-core/src/migrate/migrate.rs +++ b/sqlx-core/src/migrate/migrate.rs @@ -33,14 +33,23 @@ pub trait Migrate { // ensure migrations table exists // will create or migrate it if needed - fn ensure_migrations_table<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result<(), MigrateError>>; + fn ensure_migrations_table<'e>( + &'e mut self, + table_name: &'e str, + ) -> BoxFuture<'e, Result<(), MigrateError>>; // Return the version on which the database is dirty or None otherwise. // "dirty" means there is a partially applied migration that failed. - fn dirty_version<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>>; + fn dirty_version<'e>( + &'e mut self, + table_name: &'e str, + ) -> BoxFuture<'e, Result, MigrateError>>; // Return the ordered list of applied migrations - fn list_applied_migrations<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>>; + fn list_applied_migrations<'e>( + &'e mut self, + table_name: &'e str, + ) -> BoxFuture<'e, Result, MigrateError>>; // Should acquire a database lock so that only one migration process // can run at a time. [`Migrate`] will call this function before applying diff --git a/sqlx-core/src/migrate/migrator.rs b/sqlx-core/src/migrate/migrator.rs index bdb18aa6de..1ae4813106 100644 --- a/sqlx-core/src/migrate/migrator.rs +++ b/sqlx-core/src/migrate/migrator.rs @@ -25,6 +25,9 @@ pub struct Migrator { pub no_tx: bool, #[doc(hidden)] pub table_name: Cow<'static, str>, + + #[doc(hidden)] + pub create_schemas: Cow<'static, [Cow<'static, str>]>, } impl Migrator { @@ -35,6 +38,7 @@ impl Migrator { no_tx: false, locking: true, table_name: Cow::Borrowed("_sqlx_migrations"), + create_schemas: Cow::Borrowed(&[]), }; /// Creates a new instance with the given source. @@ -84,6 +88,19 @@ impl Migrator { self } + /// Add a schema name to be created if it does not already exist. + /// + /// May be used with [`Self::dangerous_set_table_name()`] to place the migrations table + /// in a new schema without requiring it to exist first. + /// + /// ### Note: Support Depends on Database + /// SQLite cannot create new schemas without attaching them to a database file, + /// the path of which must be specified separately in an [`ATTACH DATABASE`](https://www.sqlite.org/lang_attach.html) command. + pub fn create_schema(&mut self, schema_name: impl Into>) -> &Self { + self.create_schemas.to_mut().push(schema_name.into()); + self + } + /// Specify whether applied migrations that are missing from the resolved migrations should be ignored. pub fn set_ignore_missing(&mut self, ignore_missing: bool) -> &mut Self { self.ignore_missing = ignore_missing; @@ -186,7 +203,7 @@ impl Migrator { // Target version reached break; } - + if migration.migration_type.is_down_migration() { continue; } diff --git a/sqlx-macros-core/src/migrate.rs b/sqlx-macros-core/src/migrate.rs index fc88da0a7b..5342f8861e 100644 --- a/sqlx-macros-core/src/migrate.rs +++ b/sqlx-macros-core/src/migrate.rs @@ -6,11 +6,9 @@ use std::path::{Path, PathBuf}; use proc_macro2::{Span, TokenStream}; use quote::{quote, ToTokens, TokenStreamExt}; use sqlx_core::config::Config; -use sqlx_core::migrate::{Migration, MigrationType}; -use syn::LitStr; -use syn::spanned::Spanned; -use sqlx_core::config::Config; use sqlx_core::migrate::{Migration, MigrationType, ResolveConfig}; +use syn::spanned::Spanned; +use syn::LitStr; pub const DEFAULT_PATH: &str = "./migrations"; @@ -87,7 +85,9 @@ impl ToTokens for QuoteMigration { } pub fn default_path(config: &Config) -> &str { - config.migrate.migrations_dir + config + .migrate + .migrations_dir .as_deref() .unwrap_or(DEFAULT_PATH) } @@ -95,12 +95,10 @@ pub fn default_path(config: &Config) -> &str { pub fn expand(path_arg: Option) -> crate::Result { let config = Config::from_crate(); - let path = match path_arg { - Some(path_arg) => crate::common::resolve_path(path_arg.value(), path_arg.span())?, - None => { - crate::common::resolve_path(default_path(config), Span::call_site()) - }? - }; + let path = match path_arg { + Some(path_arg) => crate::common::resolve_path(path_arg.value(), path_arg.span())?, + None => { crate::common::resolve_path(default_path(config), Span::call_site()) }?, + }; expand_with_path(config, &path) } @@ -138,18 +136,21 @@ pub fn expand_with_path(config: &Config, path: &Path) -> crate::Result crate::Result { let path = crate::migrate::default_path(config); - let resolved_path = - crate::common::resolve_path(path, proc_macro2::Span::call_site())?; + let resolved_path = crate::common::resolve_path(path, proc_macro2::Span::call_site())?; if resolved_path.is_dir() { let migrator = crate::migrate::expand_with_path(config, &resolved_path)?; diff --git a/sqlx-mysql/src/migrate.rs b/sqlx-mysql/src/migrate.rs index 83e823dcf1..45ca7d98ef 100644 --- a/sqlx-mysql/src/migrate.rs +++ b/sqlx-mysql/src/migrate.rs @@ -2,8 +2,6 @@ use std::str::FromStr; use std::time::Duration; use std::time::Instant; -use futures_core::future::BoxFuture; -pub(crate) use sqlx_core::migrate::*; use crate::connection::{ConnectOptions, Connection}; use crate::error::Error; use crate::executor::Executor; @@ -76,11 +74,27 @@ impl MigrateDatabase for MySql { } impl Migrate for MySqlConnection { - fn ensure_migrations_table<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result<(), MigrateError>> { + fn create_schema_if_not_exists<'e>( + &'e mut self, + schema_name: &'e str, + ) -> BoxFuture<'e, Result<(), MigrateError>> { + Box::pin(async move { + // language=SQL + self.execute(&*format!(r#"CREATE SCHEMA IF NOT EXISTS {schema_name};"#)) + .await?; + + Ok(()) + }) + } + + fn ensure_migrations_table<'e>( + &'e mut self, + table_name: &'e str, + ) -> BoxFuture<'e, Result<(), MigrateError>> { Box::pin(async move { // language=MySQL - self.execute( - &*format!(r#" + self.execute(&*format!( + r#" CREATE TABLE IF NOT EXISTS {table_name} ( version BIGINT PRIMARY KEY, description TEXT NOT NULL, @@ -89,20 +103,23 @@ CREATE TABLE IF NOT EXISTS {table_name} ( checksum BLOB NOT NULL, execution_time BIGINT NOT NULL ); - "#), - ) + "# + )) .await?; Ok(()) }) } - fn dirty_version<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>> { + fn dirty_version<'e>( + &'e mut self, + table_name: &'e str, + ) -> BoxFuture<'e, Result, MigrateError>> { Box::pin(async move { // language=SQL - let row: Option<(i64,)> = query_as( - &format!("SELECT version FROM {table_name} WHERE success = false ORDER BY version LIMIT 1"), - ) + let row: Option<(i64,)> = query_as(&format!( + "SELECT version FROM {table_name} WHERE success = false ORDER BY version LIMIT 1" + )) .fetch_optional(self) .await?; @@ -110,13 +127,17 @@ CREATE TABLE IF NOT EXISTS {table_name} ( }) } - fn list_applied_migrations<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>> { + fn list_applied_migrations<'e>( + &'e mut self, + table_name: &'e str, + ) -> BoxFuture<'e, Result, MigrateError>> { Box::pin(async move { // language=SQL - let rows: Vec<(i64, Vec)> = - query_as(&format!("SELECT version, checksum FROM {table_name} ORDER BY version")) - .fetch_all(self) - .await?; + let rows: Vec<(i64, Vec)> = query_as(&format!( + "SELECT version, checksum FROM {table_name} ORDER BY version" + )) + .fetch_all(self) + .await?; let migrations = rows .into_iter() @@ -187,12 +208,12 @@ CREATE TABLE IF NOT EXISTS {table_name} ( // `success=FALSE` and later modify the flag. // // language=MySQL - let _ = query( - &format!(r#" + let _ = query(&format!( + r#" INSERT INTO {table_name} ( version, description, success, checksum, execution_time ) VALUES ( ?, ?, FALSE, ?, -1 ) - "#), - ) + "# + )) .bind(migration.version) .bind(&*migration.description) .bind(&*migration.checksum) @@ -205,13 +226,13 @@ CREATE TABLE IF NOT EXISTS {table_name} ( .map_err(|e| MigrateError::ExecuteMigration(e, migration.version))?; // language=MySQL - let _ = query( - &format!(r#" + let _ = query(&format!( + r#" UPDATE {table_name} SET success = TRUE WHERE version = ? - "#), - ) + "# + )) .bind(migration.version) .execute(&mut *tx) .await?; @@ -225,13 +246,13 @@ CREATE TABLE IF NOT EXISTS {table_name} ( let elapsed = start.elapsed(); #[allow(clippy::cast_possible_truncation)] - let _ = query( - &format!(r#" + let _ = query(&format!( + r#" UPDATE {table_name} SET execution_time = ? WHERE version = ? - "#), - ) + "# + )) .bind(elapsed.as_nanos() as i64) .bind(migration.version) .execute(self) @@ -259,13 +280,13 @@ CREATE TABLE IF NOT EXISTS {table_name} ( // `success=FALSE` and later remove the migration altogether. // // language=MySQL - let _ = query( - &format!(r#" + let _ = query(&format!( + r#" UPDATE {table_name} SET success = FALSE WHERE version = ? - "#), - ) + "# + )) .bind(migration.version) .execute(&mut *tx) .await?; diff --git a/sqlx-postgres/src/connection/describe.rs b/sqlx-postgres/src/connection/describe.rs index 7cfca80de2..b32331835b 100644 --- a/sqlx-postgres/src/connection/describe.rs +++ b/sqlx-postgres/src/connection/describe.rs @@ -218,18 +218,18 @@ impl PgConnection { attribute_no: i16, should_fetch: bool, ) -> Result { - if let Some(origin) = - self.inner - .cache_table_to_column_names - .get(&relation_id) - .and_then(|table_columns| { - let column_name = table_columns.columns.get(&attribute_no).cloned()?; - - Some(ColumnOrigin::Table(TableColumn { - table: table_columns.table_name.clone(), - name: column_name, - })) - }) + if let Some(origin) = self + .inner + .cache_table_to_column_names + .get(&relation_id) + .and_then(|table_columns| { + let column_name = table_columns.columns.get(&attribute_no).cloned()?; + + Some(ColumnOrigin::Table(TableColumn { + table: table_columns.table_name.clone(), + name: column_name, + })) + }) { return Ok(origin); } diff --git a/sqlx-postgres/src/connection/establish.rs b/sqlx-postgres/src/connection/establish.rs index 684bf26599..634b71de4b 100644 --- a/sqlx-postgres/src/connection/establish.rs +++ b/sqlx-postgres/src/connection/establish.rs @@ -149,7 +149,8 @@ impl PgConnection { cache_type_info: HashMap::new(), cache_elem_type_to_array: HashMap::new(), cache_table_to_column_names: HashMap::new(), - log_settings: options.log_settings.clone(),}), + log_settings: options.log_settings.clone(), + }), }) } } diff --git a/sqlx-postgres/src/migrate.rs b/sqlx-postgres/src/migrate.rs index 2646466399..90ebd49a73 100644 --- a/sqlx-postgres/src/migrate.rs +++ b/sqlx-postgres/src/migrate.rs @@ -111,11 +111,27 @@ impl MigrateDatabase for Postgres { } impl Migrate for PgConnection { - fn ensure_migrations_table<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result<(), MigrateError>> { + fn create_schema_if_not_exists<'e>( + &'e mut self, + schema_name: &'e str, + ) -> BoxFuture<'e, Result<(), MigrateError>> { + Box::pin(async move { + // language=SQL + self.execute(&*format!(r#"CREATE SCHEMA IF NOT EXISTS {schema_name};"#)) + .await?; + + Ok(()) + }) + } + + fn ensure_migrations_table<'e>( + &'e mut self, + table_name: &'e str, + ) -> BoxFuture<'e, Result<(), MigrateError>> { Box::pin(async move { // language=SQL - self.execute( - &*format!(r#" + self.execute(&*format!( + r#" CREATE TABLE IF NOT EXISTS {table_name} ( version BIGINT PRIMARY KEY, description TEXT NOT NULL, @@ -124,20 +140,23 @@ CREATE TABLE IF NOT EXISTS {table_name} ( checksum BYTEA NOT NULL, execution_time BIGINT NOT NULL ); - "#), - ) + "# + )) .await?; Ok(()) }) } - fn dirty_version<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>> { + fn dirty_version<'e>( + &'e mut self, + table_name: &'e str, + ) -> BoxFuture<'e, Result, MigrateError>> { Box::pin(async move { // language=SQL - let row: Option<(i64,)> = query_as( - &*format!("SELECT version FROM {table_name} WHERE success = false ORDER BY version LIMIT 1"), - ) + let row: Option<(i64,)> = query_as(&*format!( + "SELECT version FROM {table_name} WHERE success = false ORDER BY version LIMIT 1" + )) .fetch_optional(self) .await?; @@ -145,13 +164,17 @@ CREATE TABLE IF NOT EXISTS {table_name} ( }) } - fn list_applied_migrations<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>> { + fn list_applied_migrations<'e>( + &'e mut self, + table_name: &'e str, + ) -> BoxFuture<'e, Result, MigrateError>> { Box::pin(async move { // language=SQL - let rows: Vec<(i64, Vec)> = - query_as(&*format!("SELECT version, checksum FROM {table_name} ORDER BY version")) - .fetch_all(self) - .await?; + let rows: Vec<(i64, Vec)> = query_as(&*format!( + "SELECT version, checksum FROM {table_name} ORDER BY version" + )) + .fetch_all(self) + .await?; let migrations = rows .into_iter() @@ -230,13 +253,13 @@ CREATE TABLE IF NOT EXISTS {table_name} ( // language=SQL #[allow(clippy::cast_possible_truncation)] - let _ = query( - &*format!(r#" + let _ = query(&*format!( + r#" UPDATE {table_name} SET execution_time = $1 WHERE version = $2 - "#), - ) + "# + )) .bind(elapsed.as_nanos() as i64) .bind(migration.version) .execute(self) @@ -283,12 +306,12 @@ async fn execute_migration( .map_err(|e| MigrateError::ExecuteMigration(e, migration.version))?; // language=SQL - let _ = query( - &*format!(r#" + let _ = query(&*format!( + r#" INSERT INTO {table_name} ( version, description, success, checksum, execution_time ) VALUES ( $1, $2, TRUE, $3, -1 ) - "#), - ) + "# + )) .bind(migration.version) .bind(&*migration.description) .bind(&*migration.checksum) diff --git a/sqlx-sqlite/src/migrate.rs b/sqlx-sqlite/src/migrate.rs index bd339c7b51..e475f70308 100644 --- a/sqlx-sqlite/src/migrate.rs +++ b/sqlx-sqlite/src/migrate.rs @@ -65,10 +65,35 @@ impl MigrateDatabase for Sqlite { } impl Migrate for SqliteConnection { - fn ensure_migrations_table<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result<(), MigrateError>> { + fn create_schema_if_not_exists<'e>( + &'e mut self, + schema_name: &'e str, + ) -> BoxFuture<'e, Result<(), MigrateError>> { + Box::pin(async move { + // Check if the schema already exists; if so, don't error. + let schema_version: Option = + query_scalar(&format!("PRAGMA {schema_name}.schema_version")) + .fetch_optional(&mut *self) + .await?; + + if schema_version.is_some() { + return Ok(()); + } + + Err(MigrateError::CreateSchemasNotSupported( + format!("cannot create new schema {schema_name}; creation of additional schemas in SQLite requires attaching extra database files"), + )) + }) + } + + fn ensure_migrations_table<'e>( + &'e mut self, + table_name: &'e str, + ) -> BoxFuture<'e, Result<(), MigrateError>> { Box::pin(async move { // language=SQLite - self.execute(&*format!(r#" + self.execute(&*format!( + r#" CREATE TABLE IF NOT EXISTS {table_name} ( version BIGINT PRIMARY KEY, description TEXT NOT NULL, @@ -77,20 +102,23 @@ CREATE TABLE IF NOT EXISTS {table_name} ( checksum BLOB NOT NULL, execution_time BIGINT NOT NULL ); - "#), - ) - .await?; + "# + )) + .await?; Ok(()) }) } - fn dirty_version<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>> { + fn dirty_version<'e>( + &'e mut self, + table_name: &'e str, + ) -> BoxFuture<'e, Result, MigrateError>> { Box::pin(async move { // language=SQLite - let row: Option<(i64,)> = query_as( - &format!("SELECT version FROM {table_name} WHERE success = false ORDER BY version LIMIT 1"), - ) + let row: Option<(i64,)> = query_as(&format!( + "SELECT version FROM {table_name} WHERE success = false ORDER BY version LIMIT 1" + )) .fetch_optional(self) .await?; @@ -98,13 +126,17 @@ CREATE TABLE IF NOT EXISTS {table_name} ( }) } - fn list_applied_migrations<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>> { + fn list_applied_migrations<'e>( + &'e mut self, + table_name: &'e str, + ) -> BoxFuture<'e, Result, MigrateError>> { Box::pin(async move { // language=SQLite - let rows: Vec<(i64, Vec)> = - query_as(&format!("SELECT version, checksum FROM {table_name} ORDER BY version")) - .fetch_all(self) - .await?; + let rows: Vec<(i64, Vec)> = query_as(&format!( + "SELECT version, checksum FROM {table_name} ORDER BY version" + )) + .fetch_all(self) + .await?; let migrations = rows .into_iter() @@ -146,12 +178,12 @@ CREATE TABLE IF NOT EXISTS {table_name} ( .map_err(|e| MigrateError::ExecuteMigration(e, migration.version))?; // language=SQL - let _ = query( - &format!(r#" + let _ = query(&format!( + r#" INSERT INTO {table_name} ( version, description, success, checksum, execution_time ) VALUES ( ?1, ?2, TRUE, ?3, -1 ) - "#), - ) + "# + )) .bind(migration.version) .bind(&*migration.description) .bind(&*migration.checksum) @@ -168,13 +200,13 @@ CREATE TABLE IF NOT EXISTS {table_name} ( // language=SQL #[allow(clippy::cast_possible_truncation)] - let _ = query( - &format!(r#" + let _ = query(&format!( + r#" UPDATE {table_name} SET execution_time = ?1 WHERE version = ?2 - "#), - ) + "# + )) .bind(elapsed.as_nanos() as i64) .bind(migration.version) .execute(self) From a8d9ac2f8c9daab2dde405a3a056f0a9571db203 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Sun, 26 Jan 2025 01:00:46 -0800 Subject: [PATCH 16/78] WIP feat: create multi-tenant database example --- Cargo.lock | 50 +++++-- Cargo.toml | 1 + .../postgres/axum-multi-tenant/Cargo.toml | 18 +++ examples/postgres/axum-multi-tenant/README.md | 11 ++ .../axum-multi-tenant/accounts/Cargo.toml | 13 ++ .../accounts/migrations/01_setup.sql | 0 .../accounts/migrations/02_account.sql | 8 ++ .../axum-multi-tenant/accounts/sqlx.toml | 6 + .../axum-multi-tenant/accounts/src/lib.rs | 133 ++++++++++++++++++ .../axum-multi-tenant/payments/Cargo.toml | 7 + .../axum-multi-tenant/payments/src/lib.rs | 14 ++ .../postgres/axum-multi-tenant/src/main.rs | 3 + 12 files changed, 251 insertions(+), 13 deletions(-) create mode 100644 examples/postgres/axum-multi-tenant/Cargo.toml create mode 100644 examples/postgres/axum-multi-tenant/README.md create mode 100644 examples/postgres/axum-multi-tenant/accounts/Cargo.toml create mode 100644 examples/postgres/axum-multi-tenant/accounts/migrations/01_setup.sql create mode 100644 examples/postgres/axum-multi-tenant/accounts/migrations/02_account.sql create mode 100644 examples/postgres/axum-multi-tenant/accounts/sqlx.toml create mode 100644 examples/postgres/axum-multi-tenant/accounts/src/lib.rs create mode 100644 examples/postgres/axum-multi-tenant/payments/Cargo.toml create mode 100644 examples/postgres/axum-multi-tenant/payments/src/lib.rs create mode 100644 examples/postgres/axum-multi-tenant/src/main.rs diff --git a/Cargo.lock b/Cargo.lock index e86b9d4a27..ae4f589783 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,6 +2,17 @@ # It is not intended for manual editing. version = 4 +[[package]] +name = "accounts" +version = "0.1.0" +dependencies = [ + "argon2 0.5.3", + "sqlx", + "thiserror 1.0.69", + "tokio", + "uuid", +] + [[package]] name = "addr2line" version = "0.21.0" @@ -1279,6 +1290,16 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "error-code" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64f18991e7bf11e7ffee451b5318b5c1a73c52d0d0ada6e5a3017c8c1ced6a21" +dependencies = [ + "libc", + "str-buf", +] + [[package]] name = "etcetera" version = "0.10.0" @@ -2312,6 +2333,15 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "multi-tenant" +version = "0.8.3" +dependencies = [ + "accounts", + "payments", + "sqlx", +] + [[package]] name = "native-tls" version = "0.2.12" @@ -2575,6 +2605,13 @@ version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" +[[package]] +name = "payments" +version = "0.1.0" +dependencies = [ + "sqlx", +] + [[package]] name = "pem-rfc7468" version = "0.7.0" @@ -3108,19 +3145,6 @@ dependencies = [ "windows-sys 0.59.0", ] -[[package]] -name = "rustix" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d97817398dd4bb2e6da002002db259209759911da105da92bec29ccb12cf58bf" -dependencies = [ - "bitflags 2.7.0", - "errno", - "libc", - "linux-raw-sys 0.9.4", - "windows-sys 0.59.0", -] - [[package]] name = "rustls" version = "0.23.21" diff --git a/Cargo.toml b/Cargo.toml index b9226b43ef..dc3c30bb96 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -11,6 +11,7 @@ members = [ "sqlx-postgres", "sqlx-sqlite", "examples/mysql/todos", + "examples/postgres/axum-multi-tenant", "examples/postgres/axum-social-with-tests", "examples/postgres/chat", "examples/postgres/files", diff --git a/examples/postgres/axum-multi-tenant/Cargo.toml b/examples/postgres/axum-multi-tenant/Cargo.toml new file mode 100644 index 0000000000..1be607c5b8 --- /dev/null +++ b/examples/postgres/axum-multi-tenant/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "multi-tenant" +version.workspace = true +license.workspace = true +edition.workspace = true +repository.workspace = true +keywords.workspace = true +categories.workspace = true +authors.workspace = true + +[dependencies] +accounts = { path = "accounts" } +payments = { path = "payments" } + +sqlx = { path = "../../..", version = "0.8.3", features = ["runtime-tokio", "postgres"] } + +[lints] +workspace = true diff --git a/examples/postgres/axum-multi-tenant/README.md b/examples/postgres/axum-multi-tenant/README.md new file mode 100644 index 0000000000..d38f7f3ea5 --- /dev/null +++ b/examples/postgres/axum-multi-tenant/README.md @@ -0,0 +1,11 @@ +# Axum App with Multi-tenant Database + +This example project involves three crates, each owning a different schema in one database, +with their own set of migrations. + +* The main crate, an Axum app. + * Owns the `public` schema (tables are referenced unqualified). +* `accounts`: a subcrate simulating a reusable account-management crate. + * Owns schema `accounts`. +* `payments`: a subcrate simulating a wrapper for a payments API. + * Owns schema `payments`. diff --git a/examples/postgres/axum-multi-tenant/accounts/Cargo.toml b/examples/postgres/axum-multi-tenant/accounts/Cargo.toml new file mode 100644 index 0000000000..485ba8eb73 --- /dev/null +++ b/examples/postgres/axum-multi-tenant/accounts/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "accounts" +version = "0.1.0" +edition = "2021" + +[dependencies] +sqlx = { workspace = true, features = ["postgres", "time"] } +argon2 = { version = "0.5.3", features = ["password-hash"] } +tokio = { version = "1", features = ["rt", "sync"] } + +uuid = "1" +thiserror = "1" +rand = "0.8" diff --git a/examples/postgres/axum-multi-tenant/accounts/migrations/01_setup.sql b/examples/postgres/axum-multi-tenant/accounts/migrations/01_setup.sql new file mode 100644 index 0000000000..e69de29bb2 diff --git a/examples/postgres/axum-multi-tenant/accounts/migrations/02_account.sql b/examples/postgres/axum-multi-tenant/accounts/migrations/02_account.sql new file mode 100644 index 0000000000..91b9cf82e0 --- /dev/null +++ b/examples/postgres/axum-multi-tenant/accounts/migrations/02_account.sql @@ -0,0 +1,8 @@ +create table account +( + account_id uuid primary key default gen_random_uuid(), + email text unique not null, + password_hash text not null, + created_at timestamptz not null default now(), + updated_at timestamptz +); diff --git a/examples/postgres/axum-multi-tenant/accounts/sqlx.toml b/examples/postgres/axum-multi-tenant/accounts/sqlx.toml new file mode 100644 index 0000000000..45042f1333 --- /dev/null +++ b/examples/postgres/axum-multi-tenant/accounts/sqlx.toml @@ -0,0 +1,6 @@ +[migrate] +create-schemas = ["accounts"] +migrations-table = "accounts._sqlx_migrations" + +[macros.table-overrides.'accounts.account'] +'account_id' = "crate::AccountId" diff --git a/examples/postgres/axum-multi-tenant/accounts/src/lib.rs b/examples/postgres/axum-multi-tenant/accounts/src/lib.rs new file mode 100644 index 0000000000..f015af3d40 --- /dev/null +++ b/examples/postgres/axum-multi-tenant/accounts/src/lib.rs @@ -0,0 +1,133 @@ +use std::error::Error; +use argon2::{password_hash, Argon2, PasswordHash, PasswordHasher, PasswordVerifier}; + +use password_hash::PasswordHashString; + +use sqlx::{PgConnection, PgTransaction}; +use sqlx::types::Text; + +use uuid::Uuid; + +use tokio::sync::Semaphore; + +#[derive(sqlx::Type)] +#[sqlx(transparent)] +pub struct AccountId(pub Uuid); + + +pub struct AccountsManager { + hashing_semaphore: Semaphore, +} + +#[derive(Debug, thiserror::Error)] +pub enum CreateError { + #[error("email in-use")] + EmailInUse, + General(#[source] + #[from] GeneralError), +} + +#[derive(Debug, thiserror::Error)] +pub enum AuthenticateError { + #[error("unknown email")] + UnknownEmail, + #[error("invalid password")] + InvalidPassword, + General(#[source] + #[from] GeneralError), +} + +#[derive(Debug, thiserror::Error)] +pub enum GeneralError { + Sqlx(#[source] + #[from] sqlx::Error), + PasswordHash(#[source] #[from] argon2::password_hash::Error), + Task(#[source] + #[from] tokio::task::JoinError), +} + +impl AccountsManager { + pub async fn new(conn: &mut PgConnection, max_hashing_threads: usize) -> Result { + sqlx::migrate!().run(conn).await?; + + AccountsManager { + hashing_semaphore: Semaphore::new(max_hashing_threads) + } + } + + async fn hash_password(&self, password: String) -> Result { + let guard = self.hashing_semaphore.acquire().await + .expect("BUG: this semaphore should not be closed"); + + // We transfer ownership to the blocking task and back to ensure Tokio doesn't spawn + // excess threads. + let (_guard, res) = tokio::task::spawn_blocking(move || { + let salt = argon2::password_hash::SaltString::generate(rand::thread_rng()); + (guard, Argon2::default().hash_password(password.as_bytes(), &salt)) + }) + .await?; + + Ok(res?) + } + + async fn verify_password(&self, password: String, hash: PasswordHashString) -> Result<(), AuthenticateError> { + let guard = self.hashing_semaphore.acquire().await + .expect("BUG: this semaphore should not be closed"); + + let (_guard, res) = tokio::task::spawn_blocking(move || { + (guard, Argon2::default().verify_password(password.as_bytes(), &hash.password_hash())) + }).await.map_err(GeneralError::from)?; + + if let Err(password_hash::Error::Password) = res { + return Err(AuthenticateError::InvalidPassword); + } + + res.map_err(GeneralError::from)?; + + Ok(()) + } + + pub async fn create(&self, txn: &mut PgTransaction, email: &str, password: String) -> Result { + // Hash password whether the account exists or not to make it harder + // to tell the difference in the timing. + let hash = self.hash_password(password).await?; + + // language=PostgreSQL + sqlx::query!( + "insert into accounts.account(email, password_hash) \ + values ($1, $2) \ + returning account_id", + email, + Text(hash) as Text>, + ) + .fetch_one(&mut *txn) + .await + .map_err(|e| if e.constraint() == Some("account_account_id_key") { + CreateError::EmailInUse + } else { + GeneralError::from(e).into() + }) + } + + pub async fn authenticate(&self, conn: &mut PgConnection, email: &str, password: String) -> Result { + let maybe_account = sqlx::query!( + "select account_id, password_hash as \"password_hash: Text\" \ + from accounts.account \ + where email_id = $1", + email + ) + .fetch_optional(&mut *conn) + .await + .map_err(GeneralError::from)?; + + let Some(account) = maybe_account else { + // Hash the password whether the account exists or not to hide the difference in timing. + self.hash_password(password).await.map_err(GeneralError::from)?; + return Err(AuthenticateError::UnknownEmail); + }; + + self.verify_password(password, account.password_hash.into())?; + + Ok(account.account_id) + } +} diff --git a/examples/postgres/axum-multi-tenant/payments/Cargo.toml b/examples/postgres/axum-multi-tenant/payments/Cargo.toml new file mode 100644 index 0000000000..0a2485955b --- /dev/null +++ b/examples/postgres/axum-multi-tenant/payments/Cargo.toml @@ -0,0 +1,7 @@ +[package] +name = "payments" +version = "0.1.0" +edition = "2021" + +[dependencies] +sqlx = { workspace = true, features = ["postgres", "time"] } diff --git a/examples/postgres/axum-multi-tenant/payments/src/lib.rs b/examples/postgres/axum-multi-tenant/payments/src/lib.rs new file mode 100644 index 0000000000..7d12d9af81 --- /dev/null +++ b/examples/postgres/axum-multi-tenant/payments/src/lib.rs @@ -0,0 +1,14 @@ +pub fn add(left: usize, right: usize) -> usize { + left + right +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn it_works() { + let result = add(2, 2); + assert_eq!(result, 4); + } +} diff --git a/examples/postgres/axum-multi-tenant/src/main.rs b/examples/postgres/axum-multi-tenant/src/main.rs new file mode 100644 index 0000000000..e7a11a969c --- /dev/null +++ b/examples/postgres/axum-multi-tenant/src/main.rs @@ -0,0 +1,3 @@ +fn main() { + println!("Hello, world!"); +} From f415b087172dbea44cdb15ea27a719d348b1efa5 Mon Sep 17 00:00:00 2001 From: Daniel Arbuckle Date: Wed, 29 Jan 2025 07:11:23 -0800 Subject: [PATCH 17/78] SQLite extension loading via sqlx.toml for CLI and query macros --- sqlx-cli/src/lib.rs | 2 +- sqlx-core/src/any/connection/mod.rs | 14 +++++++++++ sqlx-core/src/any/options.rs | 15 +++++++++++ sqlx-core/src/config/common.rs | 39 +++++++++++++++++++++++++++++ sqlx-core/src/config/reference.toml | 6 +++++ sqlx-sqlite/src/any.rs | 8 ++++++ sqlx-sqlite/src/lib.rs | 9 ++++++- 7 files changed, 91 insertions(+), 2 deletions(-) diff --git a/sqlx-cli/src/lib.rs b/sqlx-cli/src/lib.rs index 987b956d9d..fc9e2446c4 100644 --- a/sqlx-cli/src/lib.rs +++ b/sqlx-cli/src/lib.rs @@ -135,7 +135,7 @@ pub async fn run(opt: Opt) -> Result<()> { /// Attempt to connect to the database server, retrying up to `ops.connect_timeout`. async fn connect(opts: &ConnectOpts) -> anyhow::Result { - retry_connect_errors(opts, AnyConnection::connect).await + retry_connect_errors(opts, AnyConnection::connect_with_config).await } /// Attempt an operation that may return errors like `ConnectionRefused`, diff --git a/sqlx-core/src/any/connection/mod.rs b/sqlx-core/src/any/connection/mod.rs index 8cf8fc510c..509e8f5e93 100644 --- a/sqlx-core/src/any/connection/mod.rs +++ b/sqlx-core/src/any/connection/mod.rs @@ -40,6 +40,20 @@ impl AnyConnection { }) } + /// UNSTABLE: for use with `sqlx-cli` + /// + /// Connect to the database, and instruct the nested driver to + /// read options from the sqlx.toml file as appropriate. + #[doc(hidden)] + pub fn connect_with_config(url: &str) -> BoxFuture<'static, Result> + where + Self: Sized, + { + let options: Result = url.parse(); + + Box::pin(async move { Self::connect_with(&options?.allow_config_file()).await }) + } + pub(crate) fn connect_with_db( options: &AnyConnectOptions, ) -> BoxFuture<'_, crate::Result> diff --git a/sqlx-core/src/any/options.rs b/sqlx-core/src/any/options.rs index bb29d817c9..5ed68efec5 100644 --- a/sqlx-core/src/any/options.rs +++ b/sqlx-core/src/any/options.rs @@ -19,6 +19,7 @@ use url::Url; pub struct AnyConnectOptions { pub database_url: Url, pub log_settings: LogSettings, + pub enable_config: bool, } impl FromStr for AnyConnectOptions { type Err = Error; @@ -29,6 +30,7 @@ impl FromStr for AnyConnectOptions { .parse::() .map_err(|e| Error::Configuration(e.into()))?, log_settings: LogSettings::default(), + enable_config: false, }) } } @@ -40,6 +42,7 @@ impl ConnectOptions for AnyConnectOptions { Ok(AnyConnectOptions { database_url: url.clone(), log_settings: LogSettings::default(), + enable_config: false, }) } @@ -63,3 +66,15 @@ impl ConnectOptions for AnyConnectOptions { self } } + +impl AnyConnectOptions { + /// UNSTABLE: for use with `sqlx-cli` + /// + /// Allow nested drivers to extract configuration information from + /// the sqlx.toml file. + #[doc(hidden)] + pub fn allow_config_file(mut self) -> Self { + self.enable_config = true; + self + } +} diff --git a/sqlx-core/src/config/common.rs b/sqlx-core/src/config/common.rs index d2bf639e5f..9a17c7d0ef 100644 --- a/sqlx-core/src/config/common.rs +++ b/sqlx-core/src/config/common.rs @@ -40,6 +40,14 @@ pub struct Config { /// The query macros used in `foo` will use `FOO_DATABASE_URL`, /// and the ones used in `bar` will use `BAR_DATABASE_URL`. pub database_url_var: Option, + + /// Settings for specific database drivers. + /// + /// These settings apply when checking queries, or when applying + /// migrations via `sqlx-cli`. These settings *do not* apply when + /// applying migrations via the macro, as that uses the run-time + /// database connection configured by the application. + pub drivers: Drivers, } impl Config { @@ -47,3 +55,34 @@ impl Config { self.database_url_var.as_deref().unwrap_or("DATABASE_URL") } } + +/// Configuration for specific database drivers. +#[derive(Debug, Default)] +#[cfg_attr( + feature = "sqlx-toml", + derive(serde::Deserialize), + serde(default, rename_all = "kebab-case") +)] +pub struct Drivers { + /// Specify options for the SQLite driver. + pub sqlite: SQLite, +} + +/// Configuration for the SQLite database driver. +#[derive(Debug, Default)] +#[cfg_attr( + feature = "sqlx-toml", + derive(serde::Deserialize), + serde(default, rename_all = "kebab-case") +)] +pub struct SQLite { + /// Specify extensions to load. + /// + /// ### Example: Load the "uuid" and "vsv" extensions + /// `sqlx.toml`: + /// ```toml + /// [common.drivers.sqlite] + /// load-extensions = ["uuid", "vsv"] + /// ``` + pub load_extensions: Vec, +} diff --git a/sqlx-core/src/config/reference.toml b/sqlx-core/src/config/reference.toml index 77833fb5a8..787c3456db 100644 --- a/sqlx-core/src/config/reference.toml +++ b/sqlx-core/src/config/reference.toml @@ -15,6 +15,12 @@ # If not specified, defaults to `DATABASE_URL` database-url-var = "FOO_DATABASE_URL" +[common.drivers.sqlite] +# Load extensions into SQLite when running macros or migrations +# +# Defaults to an empty list, which has no effect. +load-extensions = ["uuid", "vsv"] + ############################################################################################### # Configuration for the `query!()` family of macros. diff --git a/sqlx-sqlite/src/any.rs b/sqlx-sqlite/src/any.rs index c72370d0ff..96c224b8a4 100644 --- a/sqlx-sqlite/src/any.rs +++ b/sqlx-sqlite/src/any.rs @@ -201,6 +201,14 @@ impl<'a> TryFrom<&'a AnyConnectOptions> for SqliteConnectOptions { fn try_from(opts: &'a AnyConnectOptions) -> Result { let mut opts_out = SqliteConnectOptions::from_url(&opts.database_url)?; opts_out.log_settings = opts.log_settings.clone(); + + if opts.enable_config { + let config = sqlx_core::config::Config::from_crate(); + for extension in config.common.drivers.sqlite.load_extensions.iter() { + opts_out = opts_out.extension(extension); + } + } + Ok(opts_out) } } diff --git a/sqlx-sqlite/src/lib.rs b/sqlx-sqlite/src/lib.rs index e4a122b6bd..afbe639ab2 100644 --- a/sqlx-sqlite/src/lib.rs +++ b/sqlx-sqlite/src/lib.rs @@ -127,8 +127,15 @@ pub static CREATE_DB_WAL: AtomicBool = AtomicBool::new(true); /// UNSTABLE: for use by `sqlite-macros-core` only. #[doc(hidden)] pub fn describe_blocking(query: &str, database_url: &str) -> Result, Error> { - let opts: SqliteConnectOptions = database_url.parse()?; + let mut opts: SqliteConnectOptions = database_url.parse()?; + + let config = sqlx_core::config::Config::from_crate(); + for extension in config.common.drivers.sqlite.load_extensions.iter() { + opts = opts.extension(extension); + } + let params = EstablishParams::from_options(&opts)?; + let mut conn = params.establish()?; // Execute any ancillary `PRAGMA`s From 22317693f220e1af755842c71ceed00fe40eaf6f Mon Sep 17 00:00:00 2001 From: Daniel Arbuckle Date: Fri, 31 Jan 2025 03:50:57 -0800 Subject: [PATCH 18/78] fix: allow start_database to function when the SQLite database file does not already exist --- tests/docker.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/docker.py b/tests/docker.py index b1b81b07fb..5e8c74fb1f 100644 --- a/tests/docker.py +++ b/tests/docker.py @@ -17,9 +17,10 @@ def start_database(driver, database, cwd): database = path.join(cwd, database) (base_path, ext) = path.splitext(database) new_database = f"{base_path}.test{ext}" - shutil.copy(database, new_database) + if path.exists(database): + shutil.copy(database, new_database) # short-circuit for sqlite - return f"sqlite://{path.join(cwd, new_database)}" + return f"sqlite://{path.join(cwd, new_database)}?mode=rwc" res = subprocess.run( ["docker-compose", "up", "-d", driver], From 6e55a88752d0746743cea4cfa3ac8237c6739c6b Mon Sep 17 00:00:00 2001 From: Daniel Arbuckle Date: Mon, 3 Feb 2025 03:03:15 -0800 Subject: [PATCH 19/78] Added example demonstrating migration and compile-time checking with SQLite extensions --- Cargo.toml | 1 + examples/sqlite/extension/Cargo.toml | 17 +++++++++ .../sqlite/extension/download-extension.sh | 9 +++++ examples/sqlite/extension/extension.test.db | Bin 0 -> 16384 bytes .../migrations/20250203094951_addresses.sql | 25 +++++++++++++ examples/sqlite/extension/sqlx.toml | 12 +++++++ examples/sqlite/extension/src/main.rs | 33 ++++++++++++++++++ examples/x.py | 1 + 8 files changed, 98 insertions(+) create mode 100644 examples/sqlite/extension/Cargo.toml create mode 100755 examples/sqlite/extension/download-extension.sh create mode 100644 examples/sqlite/extension/extension.test.db create mode 100644 examples/sqlite/extension/migrations/20250203094951_addresses.sql create mode 100644 examples/sqlite/extension/sqlx.toml create mode 100644 examples/sqlite/extension/src/main.rs diff --git a/Cargo.toml b/Cargo.toml index dc3c30bb96..2382f859b5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -24,6 +24,7 @@ members = [ "examples/postgres/todos", "examples/postgres/transaction", "examples/sqlite/todos", + "examples/sqlite/extension", ] [workspace.package] diff --git a/examples/sqlite/extension/Cargo.toml b/examples/sqlite/extension/Cargo.toml new file mode 100644 index 0000000000..bf20add4b3 --- /dev/null +++ b/examples/sqlite/extension/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "sqlx-example-sqlite-extension" +version = "0.1.0" +license.workspace = true +edition.workspace = true +repository.workspace = true +keywords.workspace = true +categories.workspace = true +authors.workspace = true + +[dependencies] +sqlx = { path = "../../../", features = [ "sqlite", "runtime-tokio", "tls-native-tls" ] } +tokio = { version = "1.20.0", features = ["rt", "macros"]} +anyhow = "1.0" + +[lints] +workspace = true diff --git a/examples/sqlite/extension/download-extension.sh b/examples/sqlite/extension/download-extension.sh new file mode 100755 index 0000000000..ce7f23a486 --- /dev/null +++ b/examples/sqlite/extension/download-extension.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +# This grabs a pre-compiled version of the extension used in this +# example, and stores it in a temporary directory. That's a bit +# unusual. Normally, any extensions you need will be installed into a +# directory on the library search path, either by using the system +# package manager or by compiling and installing it yourself. + +mkdir /tmp/sqlite3-lib && wget -O /tmp/sqlite3-lib/ipaddr.so https://github.com/nalgeon/sqlean/releases/download/0.15.2/ipaddr.so diff --git a/examples/sqlite/extension/extension.test.db b/examples/sqlite/extension/extension.test.db new file mode 100644 index 0000000000000000000000000000000000000000..a7f1325883ccd36ba1e882a496bf923277394637 GIT binary patch literal 16384 zcmeI%!Ee$~7y$6Llx|?xHO^0VCQm5Qd6*&OsNX)rt#R% zOKg#*HIB=Ls!`d%M$IBCq#oP~(tRVL5dFY=w5)F4sxx*Qq^|ZsF_xw zQXwV1RZq_=c`Z4=&>e)&D;JVWs)py5ayccV>D?ji>1Lx?v}JHAn^l!$raP})EnX+Z zn`WbmQ))KVQzK1MY_xWzTSk?1w%^&DKRCm1Bl!?b8MP8=+EDgyc)ifDR*z!67ekkO zGama<>u)`719qp;Q_4P|fdB}A00@8p2!H?xfB*=900@8p2>dO9t8||?5b;&mp&JLm zd(XNrQ^HbsG9-mVQUuF#LXs1a=zH(0`s~5-;c(ys+1NG%Zy$Y|ejKid_eT@Y7cY(O z$R|JTp4q!IxV3ls?tW}%hFSj-@Y~;uMxQDBfCd5}00JNY0w4eaAOHd&00JNY0wC~T z3HUr-U;BMR>;L~D;RS_0qJ8uR?V=~>A=*Smq@r_3LZ{HM@Lf0*-U+YmLC`<|1V8`; zKmY_l00ck)1V8`;K;X{_@I#&xv@A`uK5 anyhow::Result<()> { + let opts = SqliteConnectOptions::from_str(&std::env::var("DATABASE_URL")?)? + // The sqlx.toml file controls loading extensions for the CLI + // and for the query checking macros, *not* for the + // application while it's running. Thus, if we want the + // extension to be available during program execution, we need + // to load it. + // + // Note that while in this case the extension path is the same + // when checking the program (sqlx.toml) and when running it + // (here), this is not required. The runtime environment can + // be entirely different from the development one. + // + // The extension can be described with a full path, as seen + // here, but in many cases that will not be necessary. As long + // as the extension is installed in a directory on the library + // search path, it is sufficient to just provide the extension + // name, like "ipaddr" + .extension("/tmp/sqlite3-lib/ipaddr"); + + let db = SqlitePool::connect_with(opts).await?; + + query!("insert into addresses (address, family) values (?1, ipfamily(?1))", "10.0.0.10").execute(&db).await?; + + println!("Query which requires the extension was successfully executed."); + + Ok(()) +} diff --git a/examples/x.py b/examples/x.py index 79f6fda1ba..aaf4170c77 100755 --- a/examples/x.py +++ b/examples/x.py @@ -85,3 +85,4 @@ def project(name, database=None, driver=None): project("mysql/todos", driver="mysql_8", database="todos") project("postgres/todos", driver="postgres_12", database="todos") project("sqlite/todos", driver="sqlite", database="todos.db") +project("sqlite/extension", driver="sqlite", database="extension.db") From 8126ae1d7e466af617a9a2b74e7a2ec35e53ef2f Mon Sep 17 00:00:00 2001 From: Daniel Arbuckle Date: Mon, 3 Feb 2025 03:04:19 -0800 Subject: [PATCH 20/78] remove accidentally included db file --- examples/sqlite/extension/extension.test.db | Bin 16384 -> 0 bytes 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 examples/sqlite/extension/extension.test.db diff --git a/examples/sqlite/extension/extension.test.db b/examples/sqlite/extension/extension.test.db deleted file mode 100644 index a7f1325883ccd36ba1e882a496bf923277394637..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 16384 zcmeI%!Ee$~7y$6Llx|?xHO^0VCQm5Qd6*&OsNX)rt#R% zOKg#*HIB=Ls!`d%M$IBCq#oP~(tRVL5dFY=w5)F4sxx*Qq^|ZsF_xw zQXwV1RZq_=c`Z4=&>e)&D;JVWs)py5ayccV>D?ji>1Lx?v}JHAn^l!$raP})EnX+Z zn`WbmQ))KVQzK1MY_xWzTSk?1w%^&DKRCm1Bl!?b8MP8=+EDgyc)ifDR*z!67ekkO zGama<>u)`719qp;Q_4P|fdB}A00@8p2!H?xfB*=900@8p2>dO9t8||?5b;&mp&JLm zd(XNrQ^HbsG9-mVQUuF#LXs1a=zH(0`s~5-;c(ys+1NG%Zy$Y|ejKid_eT@Y7cY(O z$R|JTp4q!IxV3ls?tW}%hFSj-@Y~;uMxQDBfCd5}00JNY0w4eaAOHd&00JNY0wC~T z3HUr-U;BMR>;L~D;RS_0qJ8uR?V=~>A=*Smq@r_3LZ{HM@Lf0*-U+YmLC`<|1V8`; zKmY_l00ck)1V8`;K;X{_@I#&xv@A`uK5 Date: Thu, 1 May 2025 01:55:39 -0700 Subject: [PATCH 21/78] Update sqlx-core/src/config/common.rs Doc formatting tweak Co-authored-by: Josh McKinney --- sqlx-core/src/config/common.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sqlx-core/src/config/common.rs b/sqlx-core/src/config/common.rs index 9a17c7d0ef..676741a931 100644 --- a/sqlx-core/src/config/common.rs +++ b/sqlx-core/src/config/common.rs @@ -78,7 +78,7 @@ pub struct Drivers { pub struct SQLite { /// Specify extensions to load. /// - /// ### Example: Load the "uuid" and "vsv" extensions + /// # Example: Load the "uuid" and "vsv" extensions /// `sqlx.toml`: /// ```toml /// [common.drivers.sqlite] From 15c837ed6a192a31e5914274661c7c9b0f7dd359 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Fri, 26 Jul 2024 03:31:48 -0700 Subject: [PATCH 22/78] feat: create `sqlx.toml` format --- Cargo.toml | 12 +- sqlx-core/Cargo.toml | 12 +- sqlx-core/src/config/common.rs | 31 ++-- sqlx-core/src/config/macros.rs | 216 +++++++--------------------- sqlx-core/src/config/migrate.rs | 98 +++---------- sqlx-core/src/config/mod.rs | 136 ++++++------------ sqlx-core/src/config/reference.toml | 59 +++----- sqlx-core/src/config/tests.rs | 23 ++- sqlx-core/src/lib.rs | 1 + sqlx-macros-core/Cargo.toml | 4 +- sqlx-macros/Cargo.toml | 3 +- src/lib.rs | 30 ---- 12 files changed, 170 insertions(+), 455 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 2382f859b5..20a2976738 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -56,18 +56,20 @@ repository.workspace = true rust-version.workspace = true [package.metadata.docs.rs] -features = ["all-databases", "_unstable-all-types", "_unstable-doc"] +features = ["all-databases", "_unstable-all-types", "_unstable-doc", "sqlite-preupdate-hook"] rustdoc-args = ["--cfg", "docsrs"] [features] -default = ["any", "macros", "migrate", "json", "sqlx-toml"] +default = ["any", "macros", "migrate", "json", "config-all"] derive = ["sqlx-macros/derive"] macros = ["derive", "sqlx-macros/macros"] migrate = ["sqlx-core/migrate", "sqlx-macros?/migrate", "sqlx-mysql?/migrate", "sqlx-postgres?/migrate", "sqlx-sqlite?/migrate"] -# Enable parsing of `sqlx.toml` for configuring macros and migrations. -sqlx-toml = ["sqlx-core/sqlx-toml", "sqlx-macros?/sqlx-toml"] +# Enable parsing of `sqlx.toml` for configuring macros, migrations, or both. +config-macros = ["sqlx-macros?/config-macros"] +config-migrate = ["sqlx-macros?/config-migrate"] +config-all = ["config-macros", "config-migrate"] # intended mainly for CI and docs all-databases = ["mysql", "sqlite", "postgres", "any"] @@ -85,7 +87,7 @@ _unstable-all-types = [ "bstr" ] # Render documentation that wouldn't otherwise be shown (e.g. `sqlx_core::config`). -_unstable-doc = [] +_unstable-doc = ["config-all", "sqlx-core/_unstable-doc"] # Base runtime features without TLS runtime-async-std = ["_rt-async-std", "sqlx-core/_rt-async-std", "sqlx-macros?/_rt-async-std"] diff --git a/sqlx-core/Cargo.toml b/sqlx-core/Cargo.toml index bf57849733..c12de18889 100644 --- a/sqlx-core/Cargo.toml +++ b/sqlx-core/Cargo.toml @@ -13,7 +13,7 @@ features = ["offline"] [features] default = [] -migrate = ["sha2", "crc"] +migrate = ["sha2", "crc", "config-migrate"] any = [] @@ -32,13 +32,11 @@ _tls-none = [] # support offline/decoupled building (enables serialization of `Describe`) offline = ["serde", "either/serde"] -# Enable parsing of `sqlx.toml`. -# For simplicity, the `config` module is always enabled, -# but disabling this disables the `serde` derives and the `toml` crate, -# which is a good bit less code to compile if the feature isn't being used. -sqlx-toml = ["serde", "toml/parse"] +config = ["serde", "toml/parse"] +config-macros = ["config"] +config-migrate = ["config"] -_unstable-doc = ["sqlx-toml"] +_unstable-doc = ["config-macros", "config-migrate"] [dependencies] # Runtimes diff --git a/sqlx-core/src/config/common.rs b/sqlx-core/src/config/common.rs index 676741a931..95dd388071 100644 --- a/sqlx-core/src/config/common.rs +++ b/sqlx-core/src/config/common.rs @@ -1,10 +1,5 @@ /// Configuration shared by multiple components. -#[derive(Debug, Default)] -#[cfg_attr( - feature = "sqlx-toml", - derive(serde::Deserialize), - serde(default, rename_all = "kebab-case") -)] +#[derive(Debug, Default, serde::Deserialize)] pub struct Config { /// Override the database URL environment variable. /// @@ -21,14 +16,14 @@ pub struct Config { /// /// #### `foo/sqlx.toml` /// ```toml - /// [common] - /// database-url-var = "FOO_DATABASE_URL" + /// [macros] + /// database_url_var = "FOO_DATABASE_URL" /// ``` /// /// #### `bar/sqlx.toml` /// ```toml - /// [common] - /// database-url-var = "BAR_DATABASE_URL" + /// [macros] + /// database_url_var = "BAR_DATABASE_URL" /// ``` /// /// #### `.env` @@ -57,24 +52,14 @@ impl Config { } /// Configuration for specific database drivers. -#[derive(Debug, Default)] -#[cfg_attr( - feature = "sqlx-toml", - derive(serde::Deserialize), - serde(default, rename_all = "kebab-case") -)] +#[derive(Debug, Default, serde::Deserialize)] pub struct Drivers { /// Specify options for the SQLite driver. pub sqlite: SQLite, } /// Configuration for the SQLite database driver. -#[derive(Debug, Default)] -#[cfg_attr( - feature = "sqlx-toml", - derive(serde::Deserialize), - serde(default, rename_all = "kebab-case") -)] +#[derive(Debug, Default, serde::Deserialize)] pub struct SQLite { /// Specify extensions to load. /// @@ -82,7 +67,7 @@ pub struct SQLite { /// `sqlx.toml`: /// ```toml /// [common.drivers.sqlite] - /// load-extensions = ["uuid", "vsv"] + /// load_extensions = ["uuid", "vsv"] /// ``` pub load_extensions: Vec, } diff --git a/sqlx-core/src/config/macros.rs b/sqlx-core/src/config/macros.rs index 19e5f42fa0..5edd30dc15 100644 --- a/sqlx-core/src/config/macros.rs +++ b/sqlx-core/src/config/macros.rs @@ -1,29 +1,40 @@ use std::collections::BTreeMap; /// Configuration for the `query!()` family of macros. -#[derive(Debug, Default)] -#[cfg_attr( - feature = "sqlx-toml", - derive(serde::Deserialize), - serde(default, rename_all = "kebab-case") -)] +#[derive(Debug, Default, serde::Deserialize)] +#[serde(default)] pub struct Config { - /// Specify which crates' types to use when types from multiple crates apply. + /// Specify the crate to use for mapping date/time types to Rust. + /// + /// The default behavior is to use whatever crate is enabled, + /// [`chrono`] or [`time`] (the latter takes precedent). + /// + /// [`chrono`]: crate::types::chrono + /// [`time`]: crate::types::time + /// + /// Example: Always Use Chrono + /// ------- + /// Thanks to Cargo's [feature unification], a crate in the dependency graph may enable + /// the `time` feature of SQLx which will force it on for all crates using SQLx, + /// which will result in problems if your crate wants to use types from [`chrono`]. + /// + /// You can use the type override syntax (see `sqlx::query!` for details), + /// or you can force an override globally by setting this option. /// - /// See [`PreferredCrates`] for details. - pub preferred_crates: PreferredCrates, + /// #### `sqlx.toml` + /// ```toml + /// [macros] + /// datetime_crate = "chrono" + /// ``` + /// + /// [feature unification]: https://doc.rust-lang.org/cargo/reference/features.html#feature-unification + pub datetime_crate: DateTimeCrate, /// Specify global overrides for mapping SQL type names to Rust type names. /// /// Default type mappings are defined by the database driver. /// Refer to the `sqlx::types` module for details. /// - /// ## Note: Case-Sensitive - /// Currently, the case of the type name MUST match the name SQLx knows it by. - /// Built-in types are spelled in all-uppercase to match SQL convention. - /// - /// However, user-created types in Postgres are all-lowercase unless quoted. - /// /// ## Note: Orthogonal to Nullability /// These overrides do not affect whether `query!()` decides to wrap a column in `Option<_>` /// or not. They only override the inner type used. @@ -67,9 +78,9 @@ pub struct Config { /// /// #### `sqlx.toml` /// ```toml - /// [macros.type-overrides] + /// [macros.type_overrides] /// # Override a built-in type - /// 'UUID' = "crate::types::MyUuid" + /// 'uuid' = "crate::types::MyUuid" /// /// # Support an external or custom wrapper type (e.g. from the `isn` Postgres extension) /// # (NOTE: FOR DOCUMENTATION PURPOSES ONLY; THIS CRATE/TYPE DOES NOT EXIST AS OF WRITING) @@ -104,7 +115,7 @@ pub struct Config { /// /// #### `sqlx.toml` /// ```toml - /// [macros.type-overrides] + /// [macros.type_overrides] /// # Map SQL type `foo` to `crate::types::Foo` /// 'foo' = "crate::types::Foo" /// ``` @@ -114,7 +125,7 @@ pub struct Config { /// (See `Note` section above for details.) /// /// ```toml - /// [macros.type-overrides] + /// [macros.type_overrides] /// # Map SQL type `foo.foo` to `crate::types::Foo` /// 'foo.foo' = "crate::types::Foo" /// ``` @@ -125,7 +136,7 @@ pub struct Config { /// it must be wrapped in quotes _twice_ for SQLx to know the difference: /// /// ```toml - /// [macros.type-overrides] + /// [macros.type_overrides] /// # `"Foo"` in SQLx /// '"Foo"' = "crate::types::Foo" /// # **NOT** `"Foo"` in SQLx (parses as just `Foo`) @@ -138,11 +149,9 @@ pub struct Config { /// ``` /// /// (See `Note` section above for details.) - // TODO: allow specifying different types for input vs output - // e.g. to accept `&[T]` on input but output `Vec` pub type_overrides: BTreeMap, - /// Specify per-table and per-column overrides for mapping SQL types to Rust types. + /// Specify per-column overrides for mapping SQL types to Rust types. /// /// Default type mappings are defined by the database driver. /// Refer to the `sqlx::types` module for details. @@ -197,7 +206,7 @@ pub struct Config { /// /// #### `sqlx.toml` /// ```toml - /// [macros.table-overrides.'foo'] + /// [macros.column_overrides.'foo'] /// # Map column `bar` of table `foo` to Rust type `crate::types::Foo`: /// 'bar' = "crate::types::Bar" /// @@ -209,89 +218,25 @@ pub struct Config { /// # "Bar" = "crate::types::Bar" /// /// # Table name may be quoted (note the wrapping single-quotes) - /// [macros.table-overrides.'"Foo"'] + /// [macros.column_overrides.'"Foo"'] /// 'bar' = "crate::types::Bar" /// '"Bar"' = "crate::types::Bar" /// /// # Table name may also be schema-qualified. /// # Note how the dot is inside the quotes. - /// [macros.table-overrides.'my_schema.my_table'] + /// [macros.column_overrides.'my_schema.my_table'] /// 'my_column' = "crate::types::MyType" /// /// # Quoted schema, table, and column names - /// [macros.table-overrides.'"My Schema"."My Table"'] + /// [macros.column_overrides.'"My Schema"."My Table"'] /// '"My Column"' = "crate::types::MyType" /// ``` - pub table_overrides: BTreeMap>, + pub column_overrides: BTreeMap>, } -#[derive(Debug, Default)] -#[cfg_attr( - feature = "sqlx-toml", - derive(serde::Deserialize), - serde(default, rename_all = "kebab-case") -)] -pub struct PreferredCrates { - /// Specify the crate to use for mapping date/time types to Rust. - /// - /// The default behavior is to use whatever crate is enabled, - /// [`chrono`] or [`time`] (the latter takes precedent). - /// - /// [`chrono`]: crate::types::chrono - /// [`time`]: crate::types::time - /// - /// Example: Always Use Chrono - /// ------- - /// Thanks to Cargo's [feature unification], a crate in the dependency graph may enable - /// the `time` feature of SQLx which will force it on for all crates using SQLx, - /// which will result in problems if your crate wants to use types from [`chrono`]. - /// - /// You can use the type override syntax (see `sqlx::query!` for details), - /// or you can force an override globally by setting this option. - /// - /// #### `sqlx.toml` - /// ```toml - /// [macros.preferred-crates] - /// date-time = "chrono" - /// ``` - /// - /// [feature unification]: https://doc.rust-lang.org/cargo/reference/features.html#feature-unification - pub date_time: DateTimeCrate, - - /// Specify the crate to use for mapping `NUMERIC` types to Rust. - /// - /// The default behavior is to use whatever crate is enabled, - /// [`bigdecimal`] or [`rust_decimal`] (the latter takes precedent). - /// - /// [`bigdecimal`]: crate::types::bigdecimal - /// [`rust_decimal`]: crate::types::rust_decimal - /// - /// Example: Always Use `bigdecimal` - /// ------- - /// Thanks to Cargo's [feature unification], a crate in the dependency graph may enable - /// the `rust_decimal` feature of SQLx which will force it on for all crates using SQLx, - /// which will result in problems if your crate wants to use types from [`bigdecimal`]. - /// - /// You can use the type override syntax (see `sqlx::query!` for details), - /// or you can force an override globally by setting this option. - /// - /// #### `sqlx.toml` - /// ```toml - /// [macros.preferred-crates] - /// numeric = "bigdecimal" - /// ``` - /// - /// [feature unification]: https://doc.rust-lang.org/cargo/reference/features.html#feature-unification - pub numeric: NumericCrate, -} - -/// The preferred crate to use for mapping date/time types to Rust. -#[derive(Debug, Default, PartialEq, Eq)] -#[cfg_attr( - feature = "sqlx-toml", - derive(serde::Deserialize), - serde(rename_all = "snake_case") -)] +/// The crate to use for mapping date/time types to Rust. +#[derive(Debug, Default, PartialEq, Eq, serde::Deserialize)] +#[serde(rename_all = "snake_case")] pub enum DateTimeCrate { /// Use whichever crate is enabled (`time` then `chrono`). #[default] @@ -300,63 +245,33 @@ pub enum DateTimeCrate { /// Always use types from [`chrono`][crate::types::chrono]. /// /// ```toml - /// [macros.preferred-crates] - /// date-time = "chrono" + /// [macros] + /// datetime_crate = "chrono" /// ``` Chrono, /// Always use types from [`time`][crate::types::time]. /// /// ```toml - /// [macros.preferred-crates] - /// date-time = "time" + /// [macros] + /// datetime_crate = "time" /// ``` Time, } -/// The preferred crate to use for mapping `NUMERIC` types to Rust. -#[derive(Debug, Default, PartialEq, Eq)] -#[cfg_attr( - feature = "sqlx-toml", - derive(serde::Deserialize), - serde(rename_all = "snake_case") -)] -pub enum NumericCrate { - /// Use whichever crate is enabled (`rust_decimal` then `bigdecimal`). - #[default] - Inferred, - - /// Always use types from [`bigdecimal`][crate::types::bigdecimal]. - /// - /// ```toml - /// [macros.preferred-crates] - /// numeric = "bigdecimal" - /// ``` - #[cfg_attr(feature = "sqlx-toml", serde(rename = "bigdecimal"))] - BigDecimal, - - /// Always use types from [`rust_decimal`][crate::types::rust_decimal]. - /// - /// ```toml - /// [macros.preferred-crates] - /// numeric = "rust_decimal" - /// ``` - RustDecimal, -} - /// A SQL type name; may optionally be schema-qualified. /// -/// See [`macros.type-overrides`][Config::type_overrides] for usages. +/// See [`macros.type_overrides`][Config::type_overrides] for usages. pub type SqlType = Box; /// A SQL table name; may optionally be schema-qualified. /// -/// See [`macros.table-overrides`][Config::table_overrides] for usages. +/// See [`macros.column_overrides`][Config::column_overrides] for usages. pub type TableName = Box; /// A column in a SQL table. /// -/// See [`macros.table-overrides`][Config::table_overrides] for usages. +/// See [`macros.column_overrides`][Config::column_overrides] for usages. pub type ColumnName = Box; /// A Rust type name or path. @@ -368,49 +283,14 @@ pub type RustType = Box; impl Config { /// Get the override for a given type name (optionally schema-qualified). pub fn type_override(&self, type_name: &str) -> Option<&str> { - // TODO: make this case-insensitive self.type_overrides.get(type_name).map(|s| &**s) } /// Get the override for a given column and table name (optionally schema-qualified). pub fn column_override(&self, table: &str, column: &str) -> Option<&str> { - self.table_overrides + self.column_overrides .get(table) .and_then(|by_column| by_column.get(column)) .map(|s| &**s) } } - -impl DateTimeCrate { - /// Returns `self == Self::Inferred` - #[inline(always)] - pub fn is_inferred(&self) -> bool { - *self == Self::Inferred - } - - #[inline(always)] - pub fn crate_name(&self) -> Option<&str> { - match self { - Self::Inferred => None, - Self::Chrono => Some("chrono"), - Self::Time => Some("time"), - } - } -} - -impl NumericCrate { - /// Returns `self == Self::Inferred` - #[inline(always)] - pub fn is_inferred(&self) -> bool { - *self == Self::Inferred - } - - #[inline(always)] - pub fn crate_name(&self) -> Option<&str> { - match self { - Self::Inferred => None, - Self::BigDecimal => Some("bigdecimal"), - Self::RustDecimal => Some("rust_decimal"), - } - } -} diff --git a/sqlx-core/src/config/migrate.rs b/sqlx-core/src/config/migrate.rs index 4865e24c76..5878f9a24f 100644 --- a/sqlx-core/src/config/migrate.rs +++ b/sqlx-core/src/config/migrate.rs @@ -12,27 +12,9 @@ use std::collections::BTreeSet; /// if the proper precautions are not taken. /// /// Be sure you know what you are doing and that you read all relevant documentation _thoroughly_. -#[derive(Debug, Default)] -#[cfg_attr( - feature = "sqlx-toml", - derive(serde::Deserialize), - serde(default, rename_all = "kebab-case") -)] +#[derive(Debug, Default, serde::Deserialize)] +#[serde(default)] pub struct Config { - /// Specify the names of schemas to create if they don't already exist. - /// - /// This is done before checking the existence of the migrations table - /// (`_sqlx_migrations` or overridden `table_name` below) so that it may be placed in - /// one of these schemas. - /// - /// ### Example - /// `sqlx.toml`: - /// ```toml - /// [migrate] - /// create-schemas = ["foo"] - /// ``` - pub create_schemas: BTreeSet>, - /// Override the name of the table used to track executed migrations. /// /// May be schema-qualified and/or contain quotes. Defaults to `_sqlx_migrations`. @@ -53,7 +35,7 @@ pub struct Config { /// ```toml /// [migrate] /// # Put `_sqlx_migrations` in schema `foo` - /// table-name = "foo._sqlx_migrations" + /// table_name = "foo._sqlx_migrations" /// ``` pub table_name: Option>, @@ -81,7 +63,7 @@ pub struct Config { /// `sqlx.toml`: /// ```toml /// [migrate] - /// ignored-chars = ["\r"] + /// ignored_chars = ["\r"] /// ``` /// /// For projects using Git, this can also be addressed using [`.gitattributes`]: @@ -99,7 +81,7 @@ pub struct Config { /// To make your migrations amenable to reformatting, you may wish to tell SQLx to ignore /// _all_ whitespace characters in migrations. /// - /// ##### Warning: Beware Syntactically Significant Whitespace! + /// ##### Warning: Beware Syntatically Significant Whitespace! /// If your migrations use string literals or quoted identifiers which contain whitespace, /// this configuration will cause the migration machinery to ignore some changes to these. /// This may result in a mismatch between the development and production versions of @@ -109,70 +91,51 @@ pub struct Config { /// ```toml /// [migrate] /// # Ignore common whitespace characters when hashing - /// ignored-chars = [" ", "\t", "\r", "\n"] # Space, tab, CR, LF + /// ignored_chars = [" ", "\t", "\r", "\n"] # Space, tab, CR, LF /// ``` // Likely lower overhead for small sets than `HashSet`. pub ignored_chars: BTreeSet, - /// Specify default options for new migrations created with `sqlx migrate add`. - pub defaults: MigrationDefaults, -} - -#[derive(Debug, Default)] -#[cfg_attr( - feature = "sqlx-toml", - derive(serde::Deserialize), - serde(default, rename_all = "kebab-case") -)] -pub struct MigrationDefaults { - /// Specify the default type of migration that `sqlx migrate add` should create by default. + /// Specify the default type of migration that `sqlx migrate create` should create by default. /// /// ### Example: Use Reversible Migrations by Default /// `sqlx.toml`: /// ```toml - /// [migrate.defaults] - /// migration-type = "reversible" + /// [migrate] + /// default_type = "reversible" /// ``` - pub migration_type: DefaultMigrationType, + pub default_type: DefaultMigrationType, - /// Specify the default scheme that `sqlx migrate add` should use for version integers. + /// Specify the default scheme that `sqlx migrate create` should use for version integers. /// /// ### Example: Use Sequential Versioning by Default /// `sqlx.toml`: /// ```toml - /// [migrate.defaults] - /// migration-versioning = "sequential" + /// [migrate] + /// default_versioning = "sequential" /// ``` - pub migration_versioning: DefaultVersioning, + pub default_versioning: DefaultVersioning, } -/// The default type of migration that `sqlx migrate add` should create by default. -#[derive(Debug, Default, PartialEq, Eq)] -#[cfg_attr( - feature = "sqlx-toml", - derive(serde::Deserialize), - serde(rename_all = "snake_case") -)] +/// The default type of migration that `sqlx migrate create` should create by default. +#[derive(Debug, Default, PartialEq, Eq, serde::Deserialize)] +#[serde(rename_all = "snake_case")] pub enum DefaultMigrationType { /// Create the same migration type as that of the latest existing migration, /// or `Simple` otherwise. #[default] Inferred, - /// Create non-reversible migrations (`_.sql`) by default. + /// Create a non-reversible migration (`_.sql`). Simple, - /// Create reversible migrations (`_.up.sql` and `[...].down.sql`) by default. + /// Create a reversible migration (`_.up.sql` and `[...].down.sql`). Reversible, } -/// The default scheme that `sqlx migrate add` should use for version integers. -#[derive(Debug, Default, PartialEq, Eq)] -#[cfg_attr( - feature = "sqlx-toml", - derive(serde::Deserialize), - serde(rename_all = "snake_case") -)] +/// The default scheme that `sqlx migrate create` should use for version integers. +#[derive(Debug, Default, PartialEq, Eq, serde::Deserialize)] +#[serde(rename_all = "snake_case")] pub enum DefaultVersioning { /// Infer the versioning scheme from existing migrations: /// @@ -193,20 +156,3 @@ pub enum DefaultVersioning { /// Use sequential integers for migration versions. Sequential, } - -#[cfg(feature = "migrate")] -impl Config { - pub fn migrations_dir(&self) -> &str { - self.migrations_dir.as_deref().unwrap_or("migrations") - } - - pub fn table_name(&self) -> &str { - self.table_name.as_deref().unwrap_or("_sqlx_migrations") - } - - pub fn to_resolve_config(&self) -> crate::migrate::ResolveConfig { - let mut config = crate::migrate::ResolveConfig::new(); - config.ignore_chars(self.ignored_chars.iter().copied()); - config - } -} diff --git a/sqlx-core/src/config/mod.rs b/sqlx-core/src/config/mod.rs index 02bde20f73..979477241f 100644 --- a/sqlx-core/src/config/mod.rs +++ b/sqlx-core/src/config/mod.rs @@ -7,7 +7,6 @@ //! //! See the [reference][`_reference`] for the full `sqlx.toml` file. -use std::error::Error; use std::fmt::Debug; use std::io; use std::path::{Path, PathBuf}; @@ -24,11 +23,13 @@ pub mod common; /// Configuration for the `query!()` family of macros. /// /// See [`macros::Config`] for details. +#[cfg(feature = "config-macros")] pub mod macros; /// Configuration for migrations when executed using `sqlx::migrate!()` or through `sqlx-cli`. /// /// See [`migrate::Config`] for details. +#[cfg(feature = "config-migrate")] pub mod migrate; /// Reference for `sqlx.toml` files @@ -40,16 +41,11 @@ pub mod migrate; /// ``` pub mod _reference {} -#[cfg(all(test, feature = "sqlx-toml"))] +#[cfg(test)] mod tests; /// The parsed structure of a `sqlx.toml` file. -#[derive(Debug, Default)] -#[cfg_attr( - feature = "sqlx-toml", - derive(serde::Deserialize), - serde(default, rename_all = "kebab-case") -)] +#[derive(Debug, Default, serde::Deserialize)] pub struct Config { /// Configuration shared by multiple components. /// @@ -59,11 +55,21 @@ pub struct Config { /// Configuration for the `query!()` family of macros. /// /// See [`macros::Config`] for details. + #[cfg_attr( + docsrs, + doc(cfg(any(feature = "config-all", feature = "config-macros"))) + )] + #[cfg(feature = "config-macros")] pub macros: macros::Config, /// Configuration for migrations when executed using `sqlx::migrate!()` or through `sqlx-cli`. /// /// See [`migrate::Config`] for details. + #[cfg_attr( + docsrs, + doc(cfg(any(feature = "config-all", feature = "config-migrate"))) + )] + #[cfg(feature = "config-migrate")] pub migrate: migrate::Config, } @@ -84,15 +90,13 @@ pub enum ConfigError { std::env::VarError, ), - /// No configuration file was found. Not necessarily fatal. - #[error("config file {path:?} not found")] - NotFound { path: PathBuf }, - /// An I/O error occurred while attempting to read the config file at `path`. /// - /// If the error is [`io::ErrorKind::NotFound`], [`Self::NotFound`] is returned instead. + /// This includes [`io::ErrorKind::NotFound`]. + /// + /// [`Self::not_found_path()`] will return the path if the file was not found. #[error("error reading config file {path:?}")] - Io { + Read { path: PathBuf, #[source] error: io::Error, @@ -101,39 +105,22 @@ pub enum ConfigError { /// An error in the TOML was encountered while parsing the config file at `path`. /// /// The error gives line numbers and context when printed with `Display`/`ToString`. - /// - /// Only returned if the `sqlx-toml` feature is enabled. #[error("error parsing config file {path:?}")] Parse { path: PathBuf, - /// Type-erased [`toml::de::Error`]. #[source] - error: Box, + error: toml::de::Error, }, - - /// A `sqlx.toml` file was found or specified, but the `sqlx-toml` feature is not enabled. - #[error("SQLx found config file at {path:?} but the `sqlx-toml` feature was not enabled")] - ParseDisabled { path: PathBuf }, } impl ConfigError { - /// Create a [`ConfigError`] from a [`std::io::Error`]. - /// - /// Maps to either `NotFound` or `Io`. - pub fn from_io(path: PathBuf, error: io::Error) -> Self { - if error.kind() == io::ErrorKind::NotFound { - Self::NotFound { path } - } else { - Self::Io { path, error } - } - } - /// If this error means the file was not found, return the path that was attempted. pub fn not_found_path(&self) -> Option<&Path> { - if let Self::NotFound { path } = self { - Some(path) - } else { - None + match self { + ConfigError::Read { path, error } if error.kind() == io::ErrorKind::NotFound => { + Some(path) + } + _ => None, } } } @@ -152,7 +139,17 @@ impl Config { /// ### Panics /// If the file exists but an unrecoverable error was encountered while parsing it. pub fn from_crate() -> &'static Self { - Self::read_with_or_default(get_crate_path) + Self::try_from_crate().unwrap_or_else(|e| { + if let Some(path) = e.not_found_path() { + // Non-fatal + tracing::debug!("Not reading config, file {path:?} not found (error: {e})"); + CACHE.get_or_init(Config::default) + } else { + // In the case of migrations, + // we can't proceed with defaults as they may be completely wrong. + panic!("failed to read sqlx config: {e}") + } + }) } /// Get the cached config, or to read `$CARGO_MANIFEST_DIR/sqlx.toml`. @@ -161,7 +158,11 @@ impl Config { /// /// Errors if `CARGO_MANIFEST_DIR` is not set, or if the config file could not be read. pub fn try_from_crate() -> Result<&'static Self, ConfigError> { - Self::try_read_with(get_crate_path) + Self::try_get_with(|| { + let mut path = PathBuf::from(std::env::var("CARGO_MANIFEST_DIR")?); + path.push("sqlx.toml"); + Ok(path) + }) } /// Get the cached config, or attempt to read `sqlx.toml` from the current working directory. @@ -170,7 +171,7 @@ impl Config { /// /// Errors if the config file does not exist, or could not be read. pub fn try_from_current_dir() -> Result<&'static Self, ConfigError> { - Self::try_read_with(|| Ok("sqlx.toml".into())) + Self::try_get_with(|| Ok("sqlx.toml".into())) } /// Get the cached config, or attempt to read it from the path returned by the closure. @@ -178,7 +179,7 @@ impl Config { /// On success, the config is cached in a `static` and returned by future calls. /// /// Errors if the config file does not exist, or could not be read. - pub fn try_read_with( + pub fn try_get_with( make_path: impl FnOnce() -> Result, ) -> Result<&'static Self, ConfigError> { CACHE.get_or_try_init(|| { @@ -187,43 +188,12 @@ impl Config { }) } - /// Get the cached config, or attempt to read it from the path returned by the closure. - /// - /// On success, the config is cached in a `static` and returned by future calls. - /// - /// Returns `Config::default()` if the file does not exist. - pub fn read_with_or_default( - make_path: impl FnOnce() -> Result, - ) -> &'static Self { - CACHE.get_or_init(|| { - match make_path().and_then(Self::read_from) { - Ok(config) => config, - Err(ConfigError::NotFound { path }) => { - // Non-fatal - tracing::debug!("Not reading config, file {path:?} not found"); - Config::default() - } - // FATAL ERRORS BELOW: - // In the case of migrations, - // we can't proceed with defaults as they may be completely wrong. - Err(e @ ConfigError::ParseDisabled { .. }) => { - // Only returned if the file exists but the feature is not enabled. - panic!("{e}") - } - Err(e) => { - panic!("failed to read sqlx config: {e}") - } - } - }) - } - - #[cfg(feature = "sqlx-toml")] fn read_from(path: PathBuf) -> Result { // The `toml` crate doesn't provide an incremental reader. let toml_s = match std::fs::read_to_string(&path) { Ok(toml) => toml, Err(error) => { - return Err(ConfigError::from_io(path, error)); + return Err(ConfigError::Read { path, error }); } }; @@ -231,24 +201,6 @@ impl Config { // Motivation: https://github.com/toml-rs/toml/issues/761 tracing::debug!("read config TOML from {path:?}:\n{toml_s}"); - toml::from_str(&toml_s).map_err(|error| ConfigError::Parse { - path, - error: Box::new(error), - }) - } - - #[cfg(not(feature = "sqlx-toml"))] - fn read_from(path: PathBuf) -> Result { - match path.try_exists() { - Ok(true) => Err(ConfigError::ParseDisabled { path }), - Ok(false) => Err(ConfigError::NotFound { path }), - Err(e) => Err(ConfigError::from_io(path, e)), - } + toml::from_str(&toml_s).map_err(|error| ConfigError::Parse { path, error }) } } - -fn get_crate_path() -> Result { - let mut path = PathBuf::from(std::env::var("CARGO_MANIFEST_DIR")?); - path.push("sqlx.toml"); - Ok(path) -} diff --git a/sqlx-core/src/config/reference.toml b/sqlx-core/src/config/reference.toml index 787c3456db..62229e596a 100644 --- a/sqlx-core/src/config/reference.toml +++ b/sqlx-core/src/config/reference.toml @@ -13,37 +13,26 @@ # This is used by both the macros and `sqlx-cli`. # # If not specified, defaults to `DATABASE_URL` -database-url-var = "FOO_DATABASE_URL" +database_url_var = "FOO_DATABASE_URL" [common.drivers.sqlite] # Load extensions into SQLite when running macros or migrations # # Defaults to an empty list, which has no effect. -load-extensions = ["uuid", "vsv"] +load_extensions = ["uuid", "vsv"] ############################################################################################### # Configuration for the `query!()` family of macros. [macros] - -[macros.preferred-crates] # Force the macros to use the `chrono` crate for date/time types, even if `time` is enabled. # # Defaults to "inferred": use whichever crate is enabled (`time` takes precedence over `chrono`). -date-time = "chrono" +datetime_crate = "chrono" # Or, ensure the macros always prefer `time` # in case new date/time crates are added in the future: -# date-time = "time" - -# Force the macros to use the `rust_decimal` crate for `NUMERIC`, even if `bigdecimal` is enabled. -# -# Defaults to "inferred": use whichever crate is enabled (`bigdecimal` takes precedence over `rust_decimal`). -numeric = "rust_decimal" - -# Or, ensure the macros always prefer `bigdecimal` -# in case new decimal crates are added in the future: -# numeric = "bigdecimal" +# datetime_crate = "time" # Set global overrides for mapping SQL types to Rust types. # @@ -55,11 +44,9 @@ numeric = "rust_decimal" # ### Note: Orthogonal to Nullability # These overrides do not affect whether `query!()` decides to wrap a column in `Option<_>` # or not. They only override the inner type used. -[macros.type-overrides] +[macros.type_overrides] # Override a built-in type (map all `UUID` columns to `crate::types::MyUuid`) -# Note: currently, the case of the type name MUST match. -# Built-in types are spelled in all-uppercase to match SQL convention. -'UUID' = "crate::types::MyUuid" +'uuid' = "crate::types::MyUuid" # Support an external or custom wrapper type (e.g. from the `isn` Postgres extension) # (NOTE: FOR DOCUMENTATION PURPOSES ONLY; THIS CRATE/TYPE DOES NOT EXIST AS OF WRITING) @@ -86,7 +73,7 @@ numeric = "rust_decimal" # Quoted schema and type name '"Foo"."Bar"' = "crate::schema::foo::Bar" -# Set per-table and per-column overrides for mapping SQL types to Rust types. +# Set per-column overrides for mapping SQL types to Rust types. # # Note: table name is required in the header. # @@ -95,7 +82,7 @@ numeric = "rust_decimal" # ### Note: Orthogonal to Nullability # These overrides do not affect whether `query!()` decides to wrap a column in `Option<_>` # or not. They only override the inner type used. -[macros.table-overrides.'foo'] +[macros.column_overrides.'foo'] # Map column `bar` of table `foo` to Rust type `crate::types::Foo`: 'bar' = "crate::types::Bar" @@ -107,17 +94,17 @@ numeric = "rust_decimal" # "Bar" = "crate::types::Bar" # Table name may be quoted (note the wrapping single-quotes) -[macros.table-overrides.'"Foo"'] +[macros.column_overrides.'"Foo"'] 'bar' = "crate::types::Bar" '"Bar"' = "crate::types::Bar" # Table name may also be schema-qualified. # Note how the dot is inside the quotes. -[macros.table-overrides.'my_schema.my_table'] +[macros.column_overrides.'my_schema.my_table'] 'my_column' = "crate::types::MyType" # Quoted schema, table, and column names -[macros.table-overrides.'"My Schema"."My Table"'] +[macros.column_overrides.'"My Schema"."My Table"'] '"My Column"' = "crate::types::MyType" ############################################################################################### @@ -149,12 +136,12 @@ numeric = "rust_decimal" # You should create the new table as a copy of the existing migrations table (with contents!), # and be sure all instances of your application have been migrated to the new # table before deleting the old one. -table-name = "foo._sqlx_migrations" +table_name = "foo._sqlx_migrations" # Override the directory used for migrations files. # # Relative to the crate root for `sqlx::migrate!()`, or the current directory for `sqlx-cli`. -migrations-dir = "foo/migrations" +migrations_dir = "foo/migrations" # Specify characters that should be ignored when hashing migrations. # @@ -167,34 +154,28 @@ migrations-dir = "foo/migrations" # change the output of the hash. # # This may require manual rectification for deployed databases. -# ignored-chars = [] +# ignored_chars = [] # Ignore Carriage Returns (`` | `\r`) # Note that the TOML format requires double-quoted strings to process escapes. -# ignored-chars = ["\r"] +# ignored_chars = ["\r"] # Ignore common whitespace characters (beware syntatically significant whitespace!) -# Space, tab, CR, LF, zero-width non-breaking space (U+FEFF) -# -# U+FEFF is added by some editors as a magic number at the beginning of a text file indicating it is UTF-8 encoded, -# where it is known as a byte-order mark (BOM): https://en.wikipedia.org/wiki/Byte_order_mark -ignored-chars = [" ", "\t", "\r", "\n", "\uFEFF"] +ignored_chars = [" ", "\t", "\r", "\n"] # Space, tab, CR, LF -# Set default options for new migrations. -[migrate.defaults] # Specify reversible migrations by default (for `sqlx migrate create`). # # Defaults to "inferred": uses the type of the last migration, or "simple" otherwise. -migration-type = "reversible" +default_type = "reversible" # Specify simple (non-reversible) migrations by default. -# migration-type = "simple" +# default_type = "simple" # Specify sequential versioning by default (for `sqlx migrate create`). # # Defaults to "inferred": guesses the versioning scheme from the latest migrations, # or "timestamp" otherwise. -migration-versioning = "sequential" +default_versioning = "sequential" # Specify timestamp versioning by default. -# migration-versioning = "timestamp" +# default_versioning = "timestamp" diff --git a/sqlx-core/src/config/tests.rs b/sqlx-core/src/config/tests.rs index 0b0b590919..bf042069a2 100644 --- a/sqlx-core/src/config/tests.rs +++ b/sqlx-core/src/config/tests.rs @@ -8,7 +8,11 @@ fn reference_parses_as_config() { .unwrap_or_else(|e| panic!("expected reference.toml to parse as Config: {e}")); assert_common_config(&config.common); + + #[cfg(feature = "config-macros")] assert_macros_config(&config.macros); + + #[cfg(feature = "config-migrate")] assert_migrate_config(&config.migrate); } @@ -16,16 +20,14 @@ fn assert_common_config(config: &config::common::Config) { assert_eq!(config.database_url_var.as_deref(), Some("FOO_DATABASE_URL")); } +#[cfg(feature = "config-macros")] fn assert_macros_config(config: &config::macros::Config) { use config::macros::*; - assert_eq!(config.preferred_crates.date_time, DateTimeCrate::Chrono); - assert_eq!(config.preferred_crates.numeric, NumericCrate::RustDecimal); + assert_eq!(config.datetime_crate, DateTimeCrate::Chrono); // Type overrides // Don't need to cover everything, just some important canaries. - assert_eq!(config.type_override("UUID"), Some("crate::types::MyUuid")); - assert_eq!(config.type_override("foo"), Some("crate::types::Foo")); assert_eq!(config.type_override(r#""Bar""#), Some("crate::types::Bar"),); @@ -72,22 +74,17 @@ fn assert_macros_config(config: &config::macros::Config) { ); } +#[cfg(feature = "config-migrate")] fn assert_migrate_config(config: &config::migrate::Config) { use config::migrate::*; assert_eq!(config.table_name.as_deref(), Some("foo._sqlx_migrations")); assert_eq!(config.migrations_dir.as_deref(), Some("foo/migrations")); - let ignored_chars = BTreeSet::from([' ', '\t', '\r', '\n', '\u{FEFF}']); + let ignored_chars = BTreeSet::from([' ', '\t', '\r', '\n']); assert_eq!(config.ignored_chars, ignored_chars); - assert_eq!( - config.defaults.migration_type, - DefaultMigrationType::Reversible - ); - assert_eq!( - config.defaults.migration_versioning, - DefaultVersioning::Sequential - ); + assert_eq!(config.default_type, DefaultMigrationType::Reversible); + assert_eq!(config.default_versioning, DefaultVersioning::Sequential); } diff --git a/sqlx-core/src/lib.rs b/sqlx-core/src/lib.rs index 09f2900ba8..8b831ecaff 100644 --- a/sqlx-core/src/lib.rs +++ b/sqlx-core/src/lib.rs @@ -91,6 +91,7 @@ pub mod any; #[cfg(feature = "migrate")] pub mod testing; +#[cfg(feature = "config")] pub mod config; pub use error::{Error, Result}; diff --git a/sqlx-macros-core/Cargo.toml b/sqlx-macros-core/Cargo.toml index 02b773af07..3ed1ae4072 100644 --- a/sqlx-macros-core/Cargo.toml +++ b/sqlx-macros-core/Cargo.toml @@ -27,7 +27,9 @@ derive = [] macros = [] migrate = ["sqlx-core/migrate"] -sqlx-toml = ["sqlx-core/sqlx-toml"] +config = ["sqlx-core/config"] +config-macros = ["config", "sqlx-core/config-macros"] +config-migrate = ["config", "sqlx-core/config-migrate"] # database mysql = ["sqlx-mysql"] diff --git a/sqlx-macros/Cargo.toml b/sqlx-macros/Cargo.toml index 23079a3810..49117afac7 100644 --- a/sqlx-macros/Cargo.toml +++ b/sqlx-macros/Cargo.toml @@ -28,7 +28,8 @@ derive = ["sqlx-macros-core/derive"] macros = ["sqlx-macros-core/macros"] migrate = ["sqlx-macros-core/migrate"] -sqlx-toml = ["sqlx-macros-core/sqlx-toml"] +config-macros = ["sqlx-macros-core/config-macros"] +config-migrate = ["sqlx-macros-core/config-migrate"] # database mysql = ["sqlx-macros-core/mysql"] diff --git a/src/lib.rs b/src/lib.rs index ce34f0e851..2e801540dd 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -176,33 +176,3 @@ pub mod prelude { #[cfg(feature = "_unstable-doc")] pub use sqlx_core::config; - -// NOTE: APIs exported in this module are SemVer-exempt. -#[doc(hidden)] -pub mod _unstable { - pub use sqlx_core::config; -} - -#[doc(hidden)] -#[cfg_attr( - all(feature = "chrono", feature = "time"), - deprecated = "SQLx has both `chrono` and `time` features enabled, \ - which presents an ambiguity when the `query!()` macros are mapping date/time types. \ - The `query!()` macros prefer types from `time` by default, \ - but this behavior should not be relied upon; \ - to resolve the ambiguity, we recommend specifying the preferred crate in a `sqlx.toml` file: \ - https://docs.rs/sqlx/latest/sqlx/config/macros/PreferredCrates.html#field.date_time" -)] -pub fn warn_on_ambiguous_inferred_date_time_crate() {} - -#[doc(hidden)] -#[cfg_attr( - all(feature = "bigdecimal", feature = "rust_decimal"), - deprecated = "SQLx has both `bigdecimal` and `rust_decimal` features enabled, \ - which presents an ambiguity when the `query!()` macros are mapping `NUMERIC`. \ - The `query!()` macros prefer `bigdecimal::BigDecimal` by default, \ - but this behavior should not be relied upon; \ - to resolve the ambiguity, we recommend specifying the preferred crate in a `sqlx.toml` file: \ - https://docs.rs/sqlx/latest/sqlx/config/macros/PreferredCrates.html#field.numeric" -)] -pub fn warn_on_ambiguous_inferred_numeric_crate() {} From f5ed7215cb14ab0c2f33c6d43550f1548cf738e1 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Mon, 9 Sep 2024 00:24:01 -0700 Subject: [PATCH 23/78] feat: add support for ignored_chars config to sqlx_core::migrate --- sqlx-core/src/migrate/migration.rs | 2 +- sqlx-core/src/migrate/migrator.rs | 15 +------------ sqlx-core/src/migrate/source.rs | 34 ++++++++++++------------------ 3 files changed, 16 insertions(+), 35 deletions(-) diff --git a/sqlx-core/src/migrate/migration.rs b/sqlx-core/src/migrate/migration.rs index 1f1175ce58..df7a11d78b 100644 --- a/sqlx-core/src/migrate/migration.rs +++ b/sqlx-core/src/migrate/migration.rs @@ -76,7 +76,7 @@ pub fn checksum_fragments<'a>(fragments: impl Iterator) -> Vec, +} #[doc(hidden)] pub create_schemas: Cow<'static, [Cow<'static, str>]>, @@ -38,7 +39,6 @@ impl Migrator { no_tx: false, locking: true, table_name: Cow::Borrowed("_sqlx_migrations"), - create_schemas: Cow::Borrowed(&[]), }; /// Creates a new instance with the given source. @@ -88,19 +88,6 @@ impl Migrator { self } - /// Add a schema name to be created if it does not already exist. - /// - /// May be used with [`Self::dangerous_set_table_name()`] to place the migrations table - /// in a new schema without requiring it to exist first. - /// - /// ### Note: Support Depends on Database - /// SQLite cannot create new schemas without attaching them to a database file, - /// the path of which must be specified separately in an [`ATTACH DATABASE`](https://www.sqlite.org/lang_attach.html) command. - pub fn create_schema(&mut self, schema_name: impl Into>) -> &Self { - self.create_schemas.to_mut().push(schema_name.into()); - self - } - /// Specify whether applied migrations that are missing from the resolved migrations should be ignored. pub fn set_ignore_missing(&mut self, ignore_missing: bool) -> &mut Self { self.ignore_missing = ignore_missing; diff --git a/sqlx-core/src/migrate/source.rs b/sqlx-core/src/migrate/source.rs index 9c2ef7719b..6c3d780bb3 100644 --- a/sqlx-core/src/migrate/source.rs +++ b/sqlx-core/src/migrate/source.rs @@ -52,9 +52,9 @@ impl MigrationSource<'static> for PathBuf { } /// A [`MigrationSource`] implementation with configurable resolution. -/// +/// /// `S` may be `PathBuf`, `&Path` or any type that implements `Into`. -/// +/// /// See [`ResolveConfig`] for details. #[derive(Debug)] pub struct ResolveWith(pub S, pub ResolveConfig); @@ -97,20 +97,20 @@ impl ResolveConfig { } /// Ignore a character when hashing migrations. - /// + /// /// The migration SQL string itself will still contain the character, /// but it will not be included when calculating the checksum. - /// + /// /// This can be used to ignore whitespace characters so changing formatting /// does not change the checksum. - /// + /// /// Adding the same `char` more than once is a no-op. - /// + /// /// ### Note: Changes Migration Checksum - /// This will change the checksum of resolved migrations, + /// This will change the checksum of resolved migrations, /// which may cause problems with existing deployments. /// - /// **Use at your own risk.** + /// **Use at your own risk.** pub fn ignore_char(&mut self, c: char) -> &mut Self { self.ignored_chars.insert(c); self @@ -123,21 +123,21 @@ impl ResolveConfig { /// /// This can be used to ignore whitespace characters so changing formatting /// does not change the checksum. - /// + /// /// Adding the same `char` more than once is a no-op. /// /// ### Note: Changes Migration Checksum - /// This will change the checksum of resolved migrations, + /// This will change the checksum of resolved migrations, /// which may cause problems with existing deployments. /// - /// **Use at your own risk.** + /// **Use at your own risk.** pub fn ignore_chars(&mut self, chars: impl IntoIterator) -> &mut Self { self.ignored_chars.extend(chars); self } /// Iterate over the set of ignored characters. - /// + /// /// Duplicate `char`s are not included. pub fn ignored_chars(&self) -> impl Iterator + '_ { self.ignored_chars.iter().copied() @@ -266,17 +266,11 @@ fn checksum_with(sql: &str, ignored_chars: &BTreeSet) -> Vec { fn checksum_with_ignored_chars() { // Ensure that `checksum_with` returns the same digest for a given set of ignored chars // as the equivalent string with the characters removed. - let ignored_chars = [ - ' ', '\t', '\r', '\n', - // Zero-width non-breaking space (ZWNBSP), often added as a magic-number at the beginning - // of UTF-8 encoded files as a byte-order mark (BOM): - // https://en.wikipedia.org/wiki/Byte_order_mark - '\u{FEFF}', - ]; + let ignored_chars = [' ', '\t', '\r', '\n']; // Copied from `examples/postgres/axum-social-with-tests/migrations/3_comment.sql` let sql = "\ - \u{FEFF}create table comment (\r\n\ + create table comment (\r\n\ \tcomment_id uuid primary key default gen_random_uuid(),\r\n\ \tpost_id uuid not null references post(post_id),\r\n\ \tuser_id uuid not null references \"user\"(user_id),\r\n\ From 0a7dfea03ba26c73fcdb4c2065d15e7d2f24e921 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Mon, 9 Sep 2024 00:49:20 -0700 Subject: [PATCH 24/78] chore: test ignored_chars with `U+FEFF` (ZWNBSP/BOM) https://en.wikipedia.org/wiki/Byte_order_mark --- sqlx-core/src/config/reference.toml | 6 ++++- sqlx-core/src/config/tests.rs | 2 +- sqlx-core/src/migrate/migration.rs | 2 +- sqlx-core/src/migrate/source.rs | 34 +++++++++++++++++------------ 4 files changed, 27 insertions(+), 17 deletions(-) diff --git a/sqlx-core/src/config/reference.toml b/sqlx-core/src/config/reference.toml index 62229e596a..3f9fd83a03 100644 --- a/sqlx-core/src/config/reference.toml +++ b/sqlx-core/src/config/reference.toml @@ -161,7 +161,11 @@ migrations_dir = "foo/migrations" # ignored_chars = ["\r"] # Ignore common whitespace characters (beware syntatically significant whitespace!) -ignored_chars = [" ", "\t", "\r", "\n"] # Space, tab, CR, LF +# Space, tab, CR, LF, zero-width non-breaking space (U+FEFF) +# +# U+FEFF is added by some editors as a magic number at the beginning of a text file indicating it is UTF-8 encoded, +# where it is known as a byte-order mark (BOM): https://en.wikipedia.org/wiki/Byte_order_mark +ignored_chars = [" ", "\t", "\r", "\n", "\uFEFF"] # Specify reversible migrations by default (for `sqlx migrate create`). # diff --git a/sqlx-core/src/config/tests.rs b/sqlx-core/src/config/tests.rs index bf042069a2..521e7074b3 100644 --- a/sqlx-core/src/config/tests.rs +++ b/sqlx-core/src/config/tests.rs @@ -81,7 +81,7 @@ fn assert_migrate_config(config: &config::migrate::Config) { assert_eq!(config.table_name.as_deref(), Some("foo._sqlx_migrations")); assert_eq!(config.migrations_dir.as_deref(), Some("foo/migrations")); - let ignored_chars = BTreeSet::from([' ', '\t', '\r', '\n']); + let ignored_chars = BTreeSet::from([' ', '\t', '\r', '\n', '\u{FEFF}']); assert_eq!(config.ignored_chars, ignored_chars); diff --git a/sqlx-core/src/migrate/migration.rs b/sqlx-core/src/migrate/migration.rs index df7a11d78b..1f1175ce58 100644 --- a/sqlx-core/src/migrate/migration.rs +++ b/sqlx-core/src/migrate/migration.rs @@ -76,7 +76,7 @@ pub fn checksum_fragments<'a>(fragments: impl Iterator) -> Vec for PathBuf { } /// A [`MigrationSource`] implementation with configurable resolution. -/// +/// /// `S` may be `PathBuf`, `&Path` or any type that implements `Into`. -/// +/// /// See [`ResolveConfig`] for details. #[derive(Debug)] pub struct ResolveWith(pub S, pub ResolveConfig); @@ -97,20 +97,20 @@ impl ResolveConfig { } /// Ignore a character when hashing migrations. - /// + /// /// The migration SQL string itself will still contain the character, /// but it will not be included when calculating the checksum. - /// + /// /// This can be used to ignore whitespace characters so changing formatting /// does not change the checksum. - /// + /// /// Adding the same `char` more than once is a no-op. - /// + /// /// ### Note: Changes Migration Checksum - /// This will change the checksum of resolved migrations, + /// This will change the checksum of resolved migrations, /// which may cause problems with existing deployments. /// - /// **Use at your own risk.** + /// **Use at your own risk.** pub fn ignore_char(&mut self, c: char) -> &mut Self { self.ignored_chars.insert(c); self @@ -123,21 +123,21 @@ impl ResolveConfig { /// /// This can be used to ignore whitespace characters so changing formatting /// does not change the checksum. - /// + /// /// Adding the same `char` more than once is a no-op. /// /// ### Note: Changes Migration Checksum - /// This will change the checksum of resolved migrations, + /// This will change the checksum of resolved migrations, /// which may cause problems with existing deployments. /// - /// **Use at your own risk.** + /// **Use at your own risk.** pub fn ignore_chars(&mut self, chars: impl IntoIterator) -> &mut Self { self.ignored_chars.extend(chars); self } /// Iterate over the set of ignored characters. - /// + /// /// Duplicate `char`s are not included. pub fn ignored_chars(&self) -> impl Iterator + '_ { self.ignored_chars.iter().copied() @@ -266,11 +266,17 @@ fn checksum_with(sql: &str, ignored_chars: &BTreeSet) -> Vec { fn checksum_with_ignored_chars() { // Ensure that `checksum_with` returns the same digest for a given set of ignored chars // as the equivalent string with the characters removed. - let ignored_chars = [' ', '\t', '\r', '\n']; + let ignored_chars = [ + ' ', '\t', '\r', '\n', + // Zero-width non-breaking space (ZWNBSP), often added as a magic-number at the beginning + // of UTF-8 encoded files as a byte-order mark (BOM): + // https://en.wikipedia.org/wiki/Byte_order_mark + '\u{FEFF}', + ]; // Copied from `examples/postgres/axum-social-with-tests/migrations/3_comment.sql` let sql = "\ - create table comment (\r\n\ + \u{FEFF}create table comment (\r\n\ \tcomment_id uuid primary key default gen_random_uuid(),\r\n\ \tpost_id uuid not null references post(post_id),\r\n\ \tuser_id uuid not null references \"user\"(user_id),\r\n\ From 2a35d6a583f68d61ec9be44ee7e3bf830950dfb0 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Wed, 18 Sep 2024 01:54:22 -0700 Subject: [PATCH 25/78] refactor: make `Config` always compiled simplifies usage while still making parsing optional for less generated code --- Cargo.toml | 10 ++-- sqlx-cli/Cargo.toml | 7 ++- sqlx-core/Cargo.toml | 12 ++-- sqlx-core/src/config/common.rs | 9 ++- sqlx-core/src/config/macros.rs | 12 ++-- sqlx-core/src/config/migrate.rs | 20 +++++-- sqlx-core/src/config/mod.rs | 95 ++++++++++++++++++++----------- sqlx-core/src/config/tests.rs | 2 - sqlx-core/src/lib.rs | 1 - sqlx-macros-core/Cargo.toml | 4 +- sqlx-macros-core/src/query/mod.rs | 7 ++- sqlx-macros/Cargo.toml | 3 +- 12 files changed, 117 insertions(+), 65 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 20a2976738..ca799fb03b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -60,16 +60,14 @@ features = ["all-databases", "_unstable-all-types", "_unstable-doc", "sqlite-pre rustdoc-args = ["--cfg", "docsrs"] [features] -default = ["any", "macros", "migrate", "json", "config-all"] +default = ["any", "macros", "migrate", "json", "sqlx-toml"] derive = ["sqlx-macros/derive"] macros = ["derive", "sqlx-macros/macros"] migrate = ["sqlx-core/migrate", "sqlx-macros?/migrate", "sqlx-mysql?/migrate", "sqlx-postgres?/migrate", "sqlx-sqlite?/migrate"] -# Enable parsing of `sqlx.toml` for configuring macros, migrations, or both. -config-macros = ["sqlx-macros?/config-macros"] -config-migrate = ["sqlx-macros?/config-migrate"] -config-all = ["config-macros", "config-migrate"] +# Enable parsing of `sqlx.toml` for configuring macros and migrations. +sqlx-toml = ["sqlx-core/sqlx-toml", "sqlx-macros?/sqlx-toml"] # intended mainly for CI and docs all-databases = ["mysql", "sqlite", "postgres", "any"] @@ -87,7 +85,7 @@ _unstable-all-types = [ "bstr" ] # Render documentation that wouldn't otherwise be shown (e.g. `sqlx_core::config`). -_unstable-doc = ["config-all", "sqlx-core/_unstable-doc"] +_unstable-doc = [] # Base runtime features without TLS runtime-async-std = ["_rt-async-std", "sqlx-core/_rt-async-std", "sqlx-macros?/_rt-async-std"] diff --git a/sqlx-cli/Cargo.toml b/sqlx-cli/Cargo.toml index 0cb428e6aa..5a57e46720 100644 --- a/sqlx-cli/Cargo.toml +++ b/sqlx-cli/Cargo.toml @@ -55,8 +55,8 @@ features = [ [features] default = ["postgres", "sqlite", "mysql", "native-tls", "completions", "sqlx-toml"] -rustls = ["sqlx/runtime-tokio-rustls"] -native-tls = ["sqlx/runtime-tokio-native-tls"] +rustls = ["sqlx/tls-rustls"] +native-tls = ["sqlx/tls-native-tls"] # databases mysql = ["sqlx/mysql"] @@ -71,6 +71,9 @@ completions = ["dep:clap_complete"] sqlx-toml = ["sqlx/sqlx-toml"] +# Conditional compilation only +_sqlite = [] + [dev-dependencies] assert_cmd = "2.0.11" tempfile = "3.10.1" diff --git a/sqlx-core/Cargo.toml b/sqlx-core/Cargo.toml index c12de18889..bf57849733 100644 --- a/sqlx-core/Cargo.toml +++ b/sqlx-core/Cargo.toml @@ -13,7 +13,7 @@ features = ["offline"] [features] default = [] -migrate = ["sha2", "crc", "config-migrate"] +migrate = ["sha2", "crc"] any = [] @@ -32,11 +32,13 @@ _tls-none = [] # support offline/decoupled building (enables serialization of `Describe`) offline = ["serde", "either/serde"] -config = ["serde", "toml/parse"] -config-macros = ["config"] -config-migrate = ["config"] +# Enable parsing of `sqlx.toml`. +# For simplicity, the `config` module is always enabled, +# but disabling this disables the `serde` derives and the `toml` crate, +# which is a good bit less code to compile if the feature isn't being used. +sqlx-toml = ["serde", "toml/parse"] -_unstable-doc = ["config-macros", "config-migrate"] +_unstable-doc = ["sqlx-toml"] [dependencies] # Runtimes diff --git a/sqlx-core/src/config/common.rs b/sqlx-core/src/config/common.rs index 95dd388071..e4845b4606 100644 --- a/sqlx-core/src/config/common.rs +++ b/sqlx-core/src/config/common.rs @@ -1,5 +1,6 @@ /// Configuration shared by multiple components. -#[derive(Debug, Default, serde::Deserialize)] +#[derive(Debug, Default)] +#[cfg_attr(feature = "sqlx-toml", derive(serde::Deserialize))] pub struct Config { /// Override the database URL environment variable. /// @@ -71,3 +72,9 @@ pub struct SQLite { /// ``` pub load_extensions: Vec, } + +impl Config { + pub fn database_url_var(&self) -> &str { + self.database_url_var.as_deref().unwrap_or("DATABASE_URL") + } +} \ No newline at end of file diff --git a/sqlx-core/src/config/macros.rs b/sqlx-core/src/config/macros.rs index 5edd30dc15..142f059da4 100644 --- a/sqlx-core/src/config/macros.rs +++ b/sqlx-core/src/config/macros.rs @@ -1,8 +1,8 @@ use std::collections::BTreeMap; /// Configuration for the `query!()` family of macros. -#[derive(Debug, Default, serde::Deserialize)] -#[serde(default)] +#[derive(Debug, Default)] +#[cfg_attr(feature = "sqlx-toml", derive(serde::Deserialize), serde(default))] pub struct Config { /// Specify the crate to use for mapping date/time types to Rust. /// @@ -235,8 +235,12 @@ pub struct Config { } /// The crate to use for mapping date/time types to Rust. -#[derive(Debug, Default, PartialEq, Eq, serde::Deserialize)] -#[serde(rename_all = "snake_case")] +#[derive(Debug, Default, PartialEq, Eq)] +#[cfg_attr( + feature = "sqlx-toml", + derive(serde::Deserialize), + serde(rename_all = "snake_case") +)] pub enum DateTimeCrate { /// Use whichever crate is enabled (`time` then `chrono`). #[default] diff --git a/sqlx-core/src/config/migrate.rs b/sqlx-core/src/config/migrate.rs index 5878f9a24f..efc03a0155 100644 --- a/sqlx-core/src/config/migrate.rs +++ b/sqlx-core/src/config/migrate.rs @@ -12,8 +12,8 @@ use std::collections::BTreeSet; /// if the proper precautions are not taken. /// /// Be sure you know what you are doing and that you read all relevant documentation _thoroughly_. -#[derive(Debug, Default, serde::Deserialize)] -#[serde(default)] +#[derive(Debug, Default)] +#[cfg_attr(feature = "sqlx-toml", derive(serde::Deserialize), serde(default))] pub struct Config { /// Override the name of the table used to track executed migrations. /// @@ -118,8 +118,12 @@ pub struct Config { } /// The default type of migration that `sqlx migrate create` should create by default. -#[derive(Debug, Default, PartialEq, Eq, serde::Deserialize)] -#[serde(rename_all = "snake_case")] +#[derive(Debug, Default, PartialEq, Eq)] +#[cfg_attr( + feature = "sqlx-toml", + derive(serde::Deserialize), + serde(rename_all = "snake_case") +)] pub enum DefaultMigrationType { /// Create the same migration type as that of the latest existing migration, /// or `Simple` otherwise. @@ -134,8 +138,12 @@ pub enum DefaultMigrationType { } /// The default scheme that `sqlx migrate create` should use for version integers. -#[derive(Debug, Default, PartialEq, Eq, serde::Deserialize)] -#[serde(rename_all = "snake_case")] +#[derive(Debug, Default, PartialEq, Eq)] +#[cfg_attr( + feature = "sqlx-toml", + derive(serde::Deserialize), + serde(rename_all = "snake_case") +)] pub enum DefaultVersioning { /// Infer the versioning scheme from existing migrations: /// diff --git a/sqlx-core/src/config/mod.rs b/sqlx-core/src/config/mod.rs index 979477241f..3bbde5c2f1 100644 --- a/sqlx-core/src/config/mod.rs +++ b/sqlx-core/src/config/mod.rs @@ -7,6 +7,7 @@ //! //! See the [reference][`_reference`] for the full `sqlx.toml` file. +use std::error::Error; use std::fmt::Debug; use std::io; use std::path::{Path, PathBuf}; @@ -23,13 +24,11 @@ pub mod common; /// Configuration for the `query!()` family of macros. /// /// See [`macros::Config`] for details. -#[cfg(feature = "config-macros")] pub mod macros; /// Configuration for migrations when executed using `sqlx::migrate!()` or through `sqlx-cli`. /// /// See [`migrate::Config`] for details. -#[cfg(feature = "config-migrate")] pub mod migrate; /// Reference for `sqlx.toml` files @@ -41,11 +40,12 @@ pub mod migrate; /// ``` pub mod _reference {} -#[cfg(test)] +#[cfg(all(test, feature = "sqlx-toml"))] mod tests; /// The parsed structure of a `sqlx.toml` file. -#[derive(Debug, Default, serde::Deserialize)] +#[derive(Debug, Default)] +#[cfg_attr(feature = "sqlx-toml", derive(serde::Deserialize))] pub struct Config { /// Configuration shared by multiple components. /// @@ -55,21 +55,11 @@ pub struct Config { /// Configuration for the `query!()` family of macros. /// /// See [`macros::Config`] for details. - #[cfg_attr( - docsrs, - doc(cfg(any(feature = "config-all", feature = "config-macros"))) - )] - #[cfg(feature = "config-macros")] pub macros: macros::Config, /// Configuration for migrations when executed using `sqlx::migrate!()` or through `sqlx-cli`. /// /// See [`migrate::Config`] for details. - #[cfg_attr( - docsrs, - doc(cfg(any(feature = "config-all", feature = "config-migrate"))) - )] - #[cfg(feature = "config-migrate")] pub migrate: migrate::Config, } @@ -90,13 +80,17 @@ pub enum ConfigError { std::env::VarError, ), + /// No configuration file was found. Not necessarily fatal. + #[error("config file {path:?} not found")] + NotFound { + path: PathBuf, + }, + /// An I/O error occurred while attempting to read the config file at `path`. /// - /// This includes [`io::ErrorKind::NotFound`]. - /// - /// [`Self::not_found_path()`] will return the path if the file was not found. + /// If the error is [`io::ErrorKind::NotFound`], [`Self::NotFound`] is returned instead. #[error("error reading config file {path:?}")] - Read { + Io { path: PathBuf, #[source] error: io::Error, @@ -105,22 +99,41 @@ pub enum ConfigError { /// An error in the TOML was encountered while parsing the config file at `path`. /// /// The error gives line numbers and context when printed with `Display`/`ToString`. + /// + /// Only returned if the `sqlx-toml` feature is enabled. #[error("error parsing config file {path:?}")] Parse { path: PathBuf, + /// Type-erased [`toml::de::Error`]. #[source] - error: toml::de::Error, + error: Box, + }, + + /// A `sqlx.toml` file was found or specified, but the `sqlx-toml` feature is not enabled. + #[error("SQLx found config file at {path:?} but the `sqlx-toml` feature was not enabled")] + ParseDisabled { + path: PathBuf }, } impl ConfigError { + /// Create a [`ConfigError`] from a [`std::io::Error`]. + /// + /// Maps to either `NotFound` or `Io`. + pub fn from_io(path: PathBuf, error: io::Error) -> Self { + if error.kind() == io::ErrorKind::NotFound { + Self::NotFound { path } + } else { + Self::Io { path, error } + } + } + /// If this error means the file was not found, return the path that was attempted. pub fn not_found_path(&self) -> Option<&Path> { - match self { - ConfigError::Read { path, error } if error.kind() == io::ErrorKind::NotFound => { - Some(path) - } - _ => None, + if let Self::NotFound { path } = self { + Some(path) + } else { + None } } } @@ -140,14 +153,22 @@ impl Config { /// If the file exists but an unrecoverable error was encountered while parsing it. pub fn from_crate() -> &'static Self { Self::try_from_crate().unwrap_or_else(|e| { - if let Some(path) = e.not_found_path() { - // Non-fatal - tracing::debug!("Not reading config, file {path:?} not found (error: {e})"); - CACHE.get_or_init(Config::default) - } else { + match e { + ConfigError::NotFound { path } => { + // Non-fatal + tracing::debug!("Not reading config, file {path:?} not found"); + CACHE.get_or_init(Config::default) + } + // FATAL ERRORS BELOW: // In the case of migrations, // we can't proceed with defaults as they may be completely wrong. - panic!("failed to read sqlx config: {e}") + e @ ConfigError::ParseDisabled { .. } => { + // Only returned if the file exists but the feature is not enabled. + panic!("{e}") + } + e => { + panic!("failed to read sqlx config: {e}") + } } }) } @@ -188,12 +209,13 @@ impl Config { }) } + #[cfg(feature = "sqlx-toml")] fn read_from(path: PathBuf) -> Result { // The `toml` crate doesn't provide an incremental reader. let toml_s = match std::fs::read_to_string(&path) { Ok(toml) => toml, Err(error) => { - return Err(ConfigError::Read { path, error }); + return Err(ConfigError::from_io(path, error)); } }; @@ -201,6 +223,15 @@ impl Config { // Motivation: https://github.com/toml-rs/toml/issues/761 tracing::debug!("read config TOML from {path:?}:\n{toml_s}"); - toml::from_str(&toml_s).map_err(|error| ConfigError::Parse { path, error }) + toml::from_str(&toml_s).map_err(|error| ConfigError::Parse { path, error: Box::new(error) }) + } + + #[cfg(not(feature = "sqlx-toml"))] + fn read_from(path: PathBuf) -> Result { + match path.try_exists() { + Ok(true) => Err(ConfigError::ParseDisabled { path }), + Ok(false) => Err(ConfigError::NotFound { path }), + Err(e) => Err(ConfigError::from_io(path, e)) + } } } diff --git a/sqlx-core/src/config/tests.rs b/sqlx-core/src/config/tests.rs index 521e7074b3..e5033bb459 100644 --- a/sqlx-core/src/config/tests.rs +++ b/sqlx-core/src/config/tests.rs @@ -20,7 +20,6 @@ fn assert_common_config(config: &config::common::Config) { assert_eq!(config.database_url_var.as_deref(), Some("FOO_DATABASE_URL")); } -#[cfg(feature = "config-macros")] fn assert_macros_config(config: &config::macros::Config) { use config::macros::*; @@ -74,7 +73,6 @@ fn assert_macros_config(config: &config::macros::Config) { ); } -#[cfg(feature = "config-migrate")] fn assert_migrate_config(config: &config::migrate::Config) { use config::migrate::*; diff --git a/sqlx-core/src/lib.rs b/sqlx-core/src/lib.rs index 8b831ecaff..09f2900ba8 100644 --- a/sqlx-core/src/lib.rs +++ b/sqlx-core/src/lib.rs @@ -91,7 +91,6 @@ pub mod any; #[cfg(feature = "migrate")] pub mod testing; -#[cfg(feature = "config")] pub mod config; pub use error::{Error, Result}; diff --git a/sqlx-macros-core/Cargo.toml b/sqlx-macros-core/Cargo.toml index 3ed1ae4072..02b773af07 100644 --- a/sqlx-macros-core/Cargo.toml +++ b/sqlx-macros-core/Cargo.toml @@ -27,9 +27,7 @@ derive = [] macros = [] migrate = ["sqlx-core/migrate"] -config = ["sqlx-core/config"] -config-macros = ["config", "sqlx-core/config-macros"] -config-migrate = ["config", "sqlx-core/config-migrate"] +sqlx-toml = ["sqlx-core/sqlx-toml"] # database mysql = ["sqlx-mysql"] diff --git a/sqlx-macros-core/src/query/mod.rs b/sqlx-macros-core/src/query/mod.rs index 198a3f236b..8c0016346f 100644 --- a/sqlx-macros-core/src/query/mod.rs +++ b/sqlx-macros-core/src/query/mod.rs @@ -17,6 +17,7 @@ use crate::query::input::RecordType; use either::Either; use sqlx_core::config::Config; use url::Url; +use sqlx_core::config::Config; mod args; mod data; @@ -124,7 +125,11 @@ fn init_metadata(manifest_dir: &String) -> crate::Result { .map(|s| s.eq_ignore_ascii_case("true") || s == "1") .unwrap_or(false); - let var_name = Config::from_crate().common.database_url_var(); + let var_name = Config::from_crate() + .common + .database_url_var(); + + let database_url = env(var_name).ok().or(database_url); let database_url = env(var_name).ok(); diff --git a/sqlx-macros/Cargo.toml b/sqlx-macros/Cargo.toml index 49117afac7..23079a3810 100644 --- a/sqlx-macros/Cargo.toml +++ b/sqlx-macros/Cargo.toml @@ -28,8 +28,7 @@ derive = ["sqlx-macros-core/derive"] macros = ["sqlx-macros-core/macros"] migrate = ["sqlx-macros-core/migrate"] -config-macros = ["sqlx-macros-core/config-macros"] -config-migrate = ["sqlx-macros-core/config-migrate"] +sqlx-toml = ["sqlx-macros-core/sqlx-toml"] # database mysql = ["sqlx-macros-core/mysql"] From 2bed2eecfbd4de22f6aa2f14d327f08b9f635b0b Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Wed, 18 Sep 2024 01:55:59 -0700 Subject: [PATCH 26/78] refactor: add origin information to `Column` --- sqlx-core/src/column.rs | 22 ++++----- sqlx-mysql/src/connection/executor.rs | 4 +- sqlx-mysql/src/protocol/text/column.rs | 2 +- sqlx-postgres/src/column.rs | 3 +- sqlx-postgres/src/connection/describe.rs | 59 +++++++++++++++++++++++- sqlx-postgres/src/connection/mod.rs | 1 + sqlx-sqlite/src/column.rs | 2 +- sqlx-sqlite/src/connection/describe.rs | 2 + sqlx-sqlite/src/statement/handle.rs | 33 +++++++------ 9 files changed, 93 insertions(+), 35 deletions(-) diff --git a/sqlx-core/src/column.rs b/sqlx-core/src/column.rs index fddc048c4b..7483375765 100644 --- a/sqlx-core/src/column.rs +++ b/sqlx-core/src/column.rs @@ -23,17 +23,15 @@ pub trait Column: 'static + Send + Sync + Debug { fn type_info(&self) -> &::TypeInfo; /// If this column comes from a table, return the table and original column name. - /// + /// /// Returns [`ColumnOrigin::Expression`] if the column is the result of an expression /// or else the source table could not be determined. - /// + /// /// Returns [`ColumnOrigin::Unknown`] if the database driver does not have that information, /// or has not overridden this method. - // This method returns an owned value instead of a reference, + // This method returns an owned value instead of a reference, // to give the implementor more flexibility. - fn origin(&self) -> ColumnOrigin { - ColumnOrigin::Unknown - } + fn origin(&self) -> ColumnOrigin { ColumnOrigin::Unknown } } /// A [`Column`] that originates from a table. @@ -46,20 +44,20 @@ pub struct TableColumn { pub name: Arc, } -/// The possible statuses for our knowledge of the origin of a [`Column`]. +/// The possible statuses for our knowledge of the origin of a [`Column`]. #[derive(Debug, Clone, Default)] #[cfg_attr(feature = "offline", derive(serde::Serialize, serde::Deserialize))] pub enum ColumnOrigin { - /// The column is known to originate from a table. - /// - /// Included is the table name and original column name. + /// The column is known to originate from a table. + /// + /// Included is the table name and original column name. Table(TableColumn), /// The column originates from an expression, or else its origin could not be determined. Expression, /// The database driver does not know the column origin at this time. - /// + /// /// This may happen if: - /// * The connection is in the middle of executing a query, + /// * The connection is in the middle of executing a query, /// and cannot query the catalog to fetch this information. /// * The connection does not have access to the database catalog. /// * The implementation of [`Column`] did not override [`Column::origin()`]. diff --git a/sqlx-mysql/src/connection/executor.rs b/sqlx-mysql/src/connection/executor.rs index 40928d7065..07748fe691 100644 --- a/sqlx-mysql/src/connection/executor.rs +++ b/sqlx-mysql/src/connection/executor.rs @@ -21,9 +21,9 @@ use either::Either; use futures_core::future::BoxFuture; use futures_core::stream::BoxStream; use futures_core::Stream; -use futures_util::{pin_mut, TryStreamExt}; +use futures_util::TryStreamExt; +use std::{borrow::Cow, pin::pin, sync::Arc}; use sqlx_core::column::{ColumnOrigin, TableColumn}; -use std::{borrow::Cow, sync::Arc}; impl MySqlConnection { async fn prepare_statement( diff --git a/sqlx-mysql/src/protocol/text/column.rs b/sqlx-mysql/src/protocol/text/column.rs index a7d95f7166..b94ef47403 100644 --- a/sqlx-mysql/src/protocol/text/column.rs +++ b/sqlx-mysql/src/protocol/text/column.rs @@ -130,7 +130,7 @@ impl ColumnDefinition { pub(crate) fn table(&self) -> Result<&str, Error> { str::from_utf8(&self.table).map_err(Error::protocol) } - + pub(crate) fn name(&self) -> Result<&str, Error> { str::from_utf8(&self.name).map_err(Error::protocol) } diff --git a/sqlx-postgres/src/column.rs b/sqlx-postgres/src/column.rs index 4dd3a1cbd2..8530267b5d 100644 --- a/sqlx-postgres/src/column.rs +++ b/sqlx-postgres/src/column.rs @@ -3,6 +3,7 @@ use crate::{PgTypeInfo, Postgres}; use sqlx_core::column::ColumnOrigin; pub(crate) use sqlx_core::column::{Column, ColumnIndex}; +use sqlx_core::column::ColumnOrigin; #[derive(Debug, Clone)] #[cfg_attr(feature = "offline", derive(serde::Serialize, serde::Deserialize))] @@ -13,7 +14,7 @@ pub struct PgColumn { #[cfg_attr(feature = "offline", serde(default))] pub(crate) origin: ColumnOrigin, - + #[cfg_attr(feature = "offline", serde(skip))] pub(crate) relation_id: Option, #[cfg_attr(feature = "offline", serde(skip))] diff --git a/sqlx-postgres/src/connection/describe.rs b/sqlx-postgres/src/connection/describe.rs index b32331835b..1c2d0a626f 100644 --- a/sqlx-postgres/src/connection/describe.rs +++ b/sqlx-postgres/src/connection/describe.rs @@ -1,4 +1,4 @@ -use crate::connection::TableColumns; +use std::collections::btree_map; use crate::error::Error; use crate::ext::ustr::UStr; use crate::io::StatementId; @@ -15,6 +15,9 @@ use smallvec::SmallVec; use sqlx_core::column::{ColumnOrigin, TableColumn}; use sqlx_core::query_builder::QueryBuilder; use std::sync::Arc; +use sqlx_core::column::{ColumnOrigin, TableColumn}; +use sqlx_core::hash_map; +use crate::connection::TableColumns; /// Describes the type of the `pg_type.typtype` column /// @@ -124,6 +127,12 @@ impl PgConnection { let type_info = self .maybe_fetch_type_info_by_oid(field.data_type_id, fetch_type_info) .await?; + + let origin = if let (Some(relation_oid), Some(attribute_no)) = (field.relation_id, field.relation_attribute_no) { + self.maybe_fetch_column_origin(relation_oid, attribute_no, should_fetch).await? + } else { + ColumnOrigin::Expression + }; let origin = if let (Some(relation_oid), Some(attribute_no)) = (field.relation_id, field.relation_attribute_no) @@ -211,6 +220,54 @@ impl PgConnection { Ok(PgTypeInfo(PgType::DeclareWithOid(oid))) } } + + async fn maybe_fetch_column_origin( + &mut self, + relation_id: Oid, + attribute_no: i16, + should_fetch: bool, + ) -> Result { + let mut table_columns = match self.cache_table_to_column_names.entry(relation_id) { + hash_map::Entry::Occupied(table_columns) => { + table_columns.into_mut() + }, + hash_map::Entry::Vacant(vacant) => { + if !should_fetch { return Ok(ColumnOrigin::Unknown); } + + let table_name: String = query_scalar("SELECT $1::oid::regclass::text") + .bind(relation_id) + .fetch_one(&mut *self) + .await?; + + vacant.insert(TableColumns { + table_name: table_name.into(), + columns: Default::default(), + }) + } + }; + + let column_name = match table_columns.columns.entry(attribute_no) { + btree_map::Entry::Occupied(occupied) => Arc::clone(occupied.get()), + btree_map::Entry::Vacant(vacant) => { + if !should_fetch { return Ok(ColumnOrigin::Unknown); } + + let column_name: String = query_scalar( + "SELECT attname FROM pg_attribute WHERE attrelid = $1 AND attnum = $2" + ) + .bind(relation_id) + .bind(attribute_no) + .fetch_one(&mut *self) + .await?; + + Arc::clone(vacant.insert(column_name.into())) + } + }; + + Ok(ColumnOrigin::Table(TableColumn { + table: table_columns.table_name.clone(), + name: column_name + })) + } async fn maybe_fetch_column_origin( &mut self, diff --git a/sqlx-postgres/src/connection/mod.rs b/sqlx-postgres/src/connection/mod.rs index dded00c4a5..74398d6a8b 100644 --- a/sqlx-postgres/src/connection/mod.rs +++ b/sqlx-postgres/src/connection/mod.rs @@ -1,3 +1,4 @@ +use std::borrow::Cow; use std::collections::BTreeMap; use std::fmt::{self, Debug, Formatter}; use std::sync::Arc; diff --git a/sqlx-sqlite/src/column.rs b/sqlx-sqlite/src/column.rs index d319bd46a8..390f3687fb 100644 --- a/sqlx-sqlite/src/column.rs +++ b/sqlx-sqlite/src/column.rs @@ -11,7 +11,7 @@ pub struct SqliteColumn { pub(crate) type_info: SqliteTypeInfo, #[cfg_attr(feature = "offline", serde(default))] - pub(crate) origin: ColumnOrigin, + pub(crate) origin: ColumnOrigin } impl Column for SqliteColumn { diff --git a/sqlx-sqlite/src/connection/describe.rs b/sqlx-sqlite/src/connection/describe.rs index 5ff7081502..8a09cd1153 100644 --- a/sqlx-sqlite/src/connection/describe.rs +++ b/sqlx-sqlite/src/connection/describe.rs @@ -49,6 +49,8 @@ pub(crate) fn describe(conn: &mut ConnectionState, query: &str) -> Result ColumnOrigin { - if let Some((table, name)) = self - .column_table_name(index) - .zip(self.column_origin_name(index)) + if let Some((table, name)) = + self.column_table_name(index).zip(self.column_origin_name(index)) { let table: Arc = self .column_db_name(index) @@ -126,20 +125,20 @@ impl StatementHandle { // TODO: check that SQLite returns the names properly quoted if necessary |db| format!("{db}.{table}").into(), ); - + ColumnOrigin::Table(TableColumn { table, - name: name.into(), + name: name.into() }) } else { ColumnOrigin::Expression } } - + fn column_db_name(&self, index: usize) -> Option<&str> { unsafe { let db_name = sqlite3_column_database_name(self.0.as_ptr(), check_col_idx!(index)); - + if !db_name.is_null() { Some(from_utf8_unchecked(CStr::from_ptr(db_name).to_bytes())) } else { @@ -171,7 +170,7 @@ impl StatementHandle { } } } - + pub(crate) fn column_type_info(&self, index: usize) -> SqliteTypeInfo { SqliteTypeInfo(DataType::from_code(self.column_type(index))) } From d6870173d1d5b766d753870ea33eac7aff03583e Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Wed, 18 Sep 2024 18:17:43 -0700 Subject: [PATCH 27/78] feat(macros): implement `type_override` and `column_override` from `sqlx.toml` --- sqlx-macros-core/src/query/output.rs | 121 ++++++++++----------------- 1 file changed, 42 insertions(+), 79 deletions(-) diff --git a/sqlx-macros-core/src/query/output.rs b/sqlx-macros-core/src/query/output.rs index 1a145e3a75..654e9328a1 100644 --- a/sqlx-macros-core/src/query/output.rs +++ b/sqlx-macros-core/src/query/output.rs @@ -15,6 +15,8 @@ use sqlx_core::type_info::TypeInfo; use std::fmt::{self, Display, Formatter}; use syn::parse::{Parse, ParseStream}; use syn::Token; +use sqlx_core::config::Config; +use sqlx_core::type_info::TypeInfo; pub struct RustColumn { pub(super) ident: Ident, @@ -239,92 +241,53 @@ pub fn quote_query_scalar( }) } -fn get_column_type(config: &Config, i: usize, column: &DB::Column) -> TokenStream { +fn get_column_type(i: usize, column: &DB::Column) -> TokenStream { if let ColumnOrigin::Table(origin) = column.origin() { - if let Some(column_override) = config.macros.column_override(&origin.table, &origin.name) { + if let Some(column_override) = Config::from_crate() + .macros + .column_override(&origin.table, &origin.name) + { return column_override.parse().unwrap(); } } - + let type_info = column.type_info(); - if let Some(type_override) = config.macros.type_override(type_info.name()) { - return type_override.parse().unwrap(); + if let Some(type_override) = Config::from_crate() + .macros + .type_override(type_info.name()) + { + return type_override.parse().unwrap(); } - - let err = match ::return_type_for_id( - type_info, - &config.macros.preferred_crates, - ) { - Ok(t) => return t.parse().unwrap(), - Err(e) => e, - }; - - let message = match err { - type_checking::Error::NoMappingFound => { - if let Some(feature_gate) = ::get_feature_gate(type_info) { - format!( - "SQLx feature `{feat}` required for type {ty} of {col}", - ty = &type_info, - feat = feature_gate, - col = DisplayColumn { - idx: i, - name: column.name() - } - ) - } else { - format!( - "no built-in mapping found for type {ty} of {col}; \ - a type override may be required, see documentation for details", - ty = type_info, - col = DisplayColumn { - idx: i, - name: column.name() - } - ) - } - } - type_checking::Error::DateTimeCrateFeatureNotEnabled => { - let feature_gate = config - .macros - .preferred_crates - .date_time - .crate_name() - .expect("BUG: got feature-not-enabled error for DateTimeCrate::Inferred"); - - format!( - "SQLx feature `{feat}` required for type {ty} of {col} \ - (configured by `macros.preferred-crates.date-time` in sqlx.toml)", - ty = &type_info, - feat = feature_gate, - col = DisplayColumn { - idx: i, - name: column.name() - } - ) - } - type_checking::Error::NumericCrateFeatureNotEnabled => { - let feature_gate = config - .macros - .preferred_crates - .numeric - .crate_name() - .expect("BUG: got feature-not-enabled error for NumericCrate::Inferred"); - - format!( - "SQLx feature `{feat}` required for type {ty} of {col} \ - (configured by `macros.preferred-crates.numeric` in sqlx.toml)", - ty = &type_info, - feat = feature_gate, - col = DisplayColumn { - idx: i, - name: column.name() - } - ) - } - }; - - syn::Error::new(Span::call_site(), message).to_compile_error() + + ::return_type_for_id(type_info).map_or_else( + || { + let message = + if let Some(feature_gate) = ::get_feature_gate(type_info) { + format!( + "SQLx feature `{feat}` required for type {ty} of {col}", + ty = &type_info, + feat = feature_gate, + col = DisplayColumn { + idx: i, + name: column.name() + } + ) + } else { + format!( + "no built in mapping found for type {ty} of {col}; \ + a type override may be required, see documentation for details", + ty = type_info, + col = DisplayColumn { + idx: i, + name: column.name() + } + ) + }; + syn::Error::new(Span::call_site(), message).to_compile_error() + }, + |t| t.parse().unwrap(), + ) } impl ColumnDecl { From 3a2295872a738d390981e5adca27512ef536d4ae Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Thu, 19 Sep 2024 19:23:03 -0700 Subject: [PATCH 28/78] refactor(sqlx.toml): make all keys kebab-case, create `macros.preferred-crates` --- sqlx-core/src/config/common.rs | 14 ++- sqlx-core/src/config/macros.rs | 179 +++++++++++++++++++++------- sqlx-core/src/config/migrate.rs | 47 +++++--- sqlx-core/src/config/mod.rs | 6 +- sqlx-core/src/config/reference.toml | 42 ++++--- sqlx-core/src/config/tests.rs | 10 +- 6 files changed, 206 insertions(+), 92 deletions(-) diff --git a/sqlx-core/src/config/common.rs b/sqlx-core/src/config/common.rs index e4845b4606..0bd89e1375 100644 --- a/sqlx-core/src/config/common.rs +++ b/sqlx-core/src/config/common.rs @@ -1,6 +1,10 @@ /// Configuration shared by multiple components. #[derive(Debug, Default)] -#[cfg_attr(feature = "sqlx-toml", derive(serde::Deserialize))] +#[cfg_attr( + feature = "sqlx-toml", + derive(serde::Deserialize), + serde(default, rename_all = "kebab-case") +)] pub struct Config { /// Override the database URL environment variable. /// @@ -17,14 +21,14 @@ pub struct Config { /// /// #### `foo/sqlx.toml` /// ```toml - /// [macros] - /// database_url_var = "FOO_DATABASE_URL" + /// [common] + /// database-url-var = "FOO_DATABASE_URL" /// ``` /// /// #### `bar/sqlx.toml` /// ```toml - /// [macros] - /// database_url_var = "BAR_DATABASE_URL" + /// [common] + /// database-url-var = "BAR_DATABASE_URL" /// ``` /// /// #### `.env` diff --git a/sqlx-core/src/config/macros.rs b/sqlx-core/src/config/macros.rs index 142f059da4..9f4cf4524f 100644 --- a/sqlx-core/src/config/macros.rs +++ b/sqlx-core/src/config/macros.rs @@ -2,33 +2,16 @@ use std::collections::BTreeMap; /// Configuration for the `query!()` family of macros. #[derive(Debug, Default)] -#[cfg_attr(feature = "sqlx-toml", derive(serde::Deserialize), serde(default))] +#[cfg_attr( + feature = "sqlx-toml", + derive(serde::Deserialize), + serde(default, rename_all = "kebab-case") +)] pub struct Config { - /// Specify the crate to use for mapping date/time types to Rust. - /// - /// The default behavior is to use whatever crate is enabled, - /// [`chrono`] or [`time`] (the latter takes precedent). - /// - /// [`chrono`]: crate::types::chrono - /// [`time`]: crate::types::time - /// - /// Example: Always Use Chrono - /// ------- - /// Thanks to Cargo's [feature unification], a crate in the dependency graph may enable - /// the `time` feature of SQLx which will force it on for all crates using SQLx, - /// which will result in problems if your crate wants to use types from [`chrono`]. - /// - /// You can use the type override syntax (see `sqlx::query!` for details), - /// or you can force an override globally by setting this option. - /// - /// #### `sqlx.toml` - /// ```toml - /// [macros] - /// datetime_crate = "chrono" - /// ``` - /// - /// [feature unification]: https://doc.rust-lang.org/cargo/reference/features.html#feature-unification - pub datetime_crate: DateTimeCrate, + /// Specify which crates' types to use when types from multiple crates apply. + /// + /// See [`PreferredCrates`] for details. + pub preferred_crates: PreferredCrates, /// Specify global overrides for mapping SQL type names to Rust type names. /// @@ -78,7 +61,7 @@ pub struct Config { /// /// #### `sqlx.toml` /// ```toml - /// [macros.type_overrides] + /// [macros.type-overrides] /// # Override a built-in type /// 'uuid' = "crate::types::MyUuid" /// @@ -115,7 +98,7 @@ pub struct Config { /// /// #### `sqlx.toml` /// ```toml - /// [macros.type_overrides] + /// [macros.type-overrides] /// # Map SQL type `foo` to `crate::types::Foo` /// 'foo' = "crate::types::Foo" /// ``` @@ -125,7 +108,7 @@ pub struct Config { /// (See `Note` section above for details.) /// /// ```toml - /// [macros.type_overrides] + /// [macros.type-overrides] /// # Map SQL type `foo.foo` to `crate::types::Foo` /// 'foo.foo' = "crate::types::Foo" /// ``` @@ -136,7 +119,7 @@ pub struct Config { /// it must be wrapped in quotes _twice_ for SQLx to know the difference: /// /// ```toml - /// [macros.type_overrides] + /// [macros.type-overrides] /// # `"Foo"` in SQLx /// '"Foo"' = "crate::types::Foo" /// # **NOT** `"Foo"` in SQLx (parses as just `Foo`) @@ -151,7 +134,7 @@ pub struct Config { /// (See `Note` section above for details.) pub type_overrides: BTreeMap, - /// Specify per-column overrides for mapping SQL types to Rust types. + /// Specify per-table and per-column overrides for mapping SQL types to Rust types. /// /// Default type mappings are defined by the database driver. /// Refer to the `sqlx::types` module for details. @@ -206,7 +189,7 @@ pub struct Config { /// /// #### `sqlx.toml` /// ```toml - /// [macros.column_overrides.'foo'] + /// [macros.table-overrides.'foo'] /// # Map column `bar` of table `foo` to Rust type `crate::types::Foo`: /// 'bar' = "crate::types::Bar" /// @@ -218,23 +201,83 @@ pub struct Config { /// # "Bar" = "crate::types::Bar" /// /// # Table name may be quoted (note the wrapping single-quotes) - /// [macros.column_overrides.'"Foo"'] + /// [macros.table-overrides.'"Foo"'] /// 'bar' = "crate::types::Bar" /// '"Bar"' = "crate::types::Bar" /// /// # Table name may also be schema-qualified. /// # Note how the dot is inside the quotes. - /// [macros.column_overrides.'my_schema.my_table'] + /// [macros.table-overrides.'my_schema.my_table'] /// 'my_column' = "crate::types::MyType" /// /// # Quoted schema, table, and column names - /// [macros.column_overrides.'"My Schema"."My Table"'] + /// [macros.table-overrides.'"My Schema"."My Table"'] /// '"My Column"' = "crate::types::MyType" /// ``` - pub column_overrides: BTreeMap>, + pub table_overrides: BTreeMap>, +} + +#[derive(Debug, Default)] +#[cfg_attr( + feature = "sqlx-toml", + derive(serde::Deserialize), + serde(rename_all = "kebab-case") +)] +pub struct PreferredCrates { + /// Specify the crate to use for mapping date/time types to Rust. + /// + /// The default behavior is to use whatever crate is enabled, + /// [`chrono`] or [`time`] (the latter takes precedent). + /// + /// [`chrono`]: crate::types::chrono + /// [`time`]: crate::types::time + /// + /// Example: Always Use Chrono + /// ------- + /// Thanks to Cargo's [feature unification], a crate in the dependency graph may enable + /// the `time` feature of SQLx which will force it on for all crates using SQLx, + /// which will result in problems if your crate wants to use types from [`chrono`]. + /// + /// You can use the type override syntax (see `sqlx::query!` for details), + /// or you can force an override globally by setting this option. + /// + /// #### `sqlx.toml` + /// ```toml + /// [macros.preferred-crates] + /// date-time = "chrono" + /// ``` + /// + /// [feature unification]: https://doc.rust-lang.org/cargo/reference/features.html#feature-unification + pub date_time: DateTimeCrate, + + /// Specify the crate to use for mapping `NUMERIC` types to Rust. + /// + /// The default behavior is to use whatever crate is enabled, + /// [`bigdecimal`] or [`rust_decimal`] (the latter takes precedent). + /// + /// [`bigdecimal`]: crate::types::bigdecimal + /// [`rust_decimal`]: crate::types::rust_decimal + /// + /// Example: Always Use `bigdecimal` + /// ------- + /// Thanks to Cargo's [feature unification], a crate in the dependency graph may enable + /// the `rust_decimal` feature of SQLx which will force it on for all crates using SQLx, + /// which will result in problems if your crate wants to use types from [`bigdecimal`]. + /// + /// You can use the type override syntax (see `sqlx::query!` for details), + /// or you can force an override globally by setting this option. + /// + /// #### `sqlx.toml` + /// ```toml + /// [macros.preferred-crates] + /// numeric = "bigdecimal" + /// ``` + /// + /// [feature unification]: https://doc.rust-lang.org/cargo/reference/features.html#feature-unification + pub numeric: NumericCrate, } -/// The crate to use for mapping date/time types to Rust. +/// The preferred crate to use for mapping date/time types to Rust. #[derive(Debug, Default, PartialEq, Eq)] #[cfg_attr( feature = "sqlx-toml", @@ -249,33 +292,63 @@ pub enum DateTimeCrate { /// Always use types from [`chrono`][crate::types::chrono]. /// /// ```toml - /// [macros] - /// datetime_crate = "chrono" + /// [macros.preferred-crates] + /// date-time = "chrono" /// ``` Chrono, /// Always use types from [`time`][crate::types::time]. /// /// ```toml - /// [macros] - /// datetime_crate = "time" + /// [macros.preferred-crates] + /// date-time = "time" /// ``` Time, } +/// The preferred crate to use for mapping `NUMERIC` types to Rust. +#[derive(Debug, Default, PartialEq, Eq)] +#[cfg_attr( + feature = "sqlx-toml", + derive(serde::Deserialize), + serde(rename_all = "snake_case") +)] +pub enum NumericCrate { + /// Use whichever crate is enabled (`rust_decimal` then `bigdecimal`). + #[default] + Inferred, + + /// Always use types from [`bigdecimal`][crate::types::bigdecimal]. + /// + /// ```toml + /// [macros.preferred-crates] + /// numeric = "bigdecimal" + /// ``` + #[cfg_attr(feature = "sqlx-toml", serde(rename = "bigdecimal"))] + BigDecimal, + + /// Always use types from [`rust_decimal`][crate::types::rust_decimal]. + /// + /// ```toml + /// [macros.preferred-crates] + /// numeric = "rust_decimal" + /// ``` + RustDecimal, +} + /// A SQL type name; may optionally be schema-qualified. /// -/// See [`macros.type_overrides`][Config::type_overrides] for usages. +/// See [`macros.type-overrides`][Config::type_overrides] for usages. pub type SqlType = Box; /// A SQL table name; may optionally be schema-qualified. /// -/// See [`macros.column_overrides`][Config::column_overrides] for usages. +/// See [`macros.table-overrides`][Config::table_overrides] for usages. pub type TableName = Box; /// A column in a SQL table. /// -/// See [`macros.column_overrides`][Config::column_overrides] for usages. +/// See [`macros.table-overrides`][Config::table_overrides] for usages. pub type ColumnName = Box; /// A Rust type name or path. @@ -292,9 +365,25 @@ impl Config { /// Get the override for a given column and table name (optionally schema-qualified). pub fn column_override(&self, table: &str, column: &str) -> Option<&str> { - self.column_overrides + self.table_overrides .get(table) .and_then(|by_column| by_column.get(column)) .map(|s| &**s) } } + +impl DateTimeCrate { + /// Returns `self == Self::Inferred` + #[inline(always)] + pub fn is_inferred(&self) -> bool { + *self == Self::Inferred + } +} + +impl NumericCrate { + /// Returns `self == Self::Inferred` + #[inline(always)] + pub fn is_inferred(&self) -> bool { + *self == Self::Inferred + } +} \ No newline at end of file diff --git a/sqlx-core/src/config/migrate.rs b/sqlx-core/src/config/migrate.rs index efc03a0155..d0e55b35d8 100644 --- a/sqlx-core/src/config/migrate.rs +++ b/sqlx-core/src/config/migrate.rs @@ -13,7 +13,11 @@ use std::collections::BTreeSet; /// /// Be sure you know what you are doing and that you read all relevant documentation _thoroughly_. #[derive(Debug, Default)] -#[cfg_attr(feature = "sqlx-toml", derive(serde::Deserialize), serde(default))] +#[cfg_attr( + feature = "sqlx-toml", + derive(serde::Deserialize), + serde(default, rename_all = "kebab-case") +)] pub struct Config { /// Override the name of the table used to track executed migrations. /// @@ -35,7 +39,7 @@ pub struct Config { /// ```toml /// [migrate] /// # Put `_sqlx_migrations` in schema `foo` - /// table_name = "foo._sqlx_migrations" + /// table-name = "foo._sqlx_migrations" /// ``` pub table_name: Option>, @@ -63,7 +67,7 @@ pub struct Config { /// `sqlx.toml`: /// ```toml /// [migrate] - /// ignored_chars = ["\r"] + /// ignored-chars = ["\r"] /// ``` /// /// For projects using Git, this can also be addressed using [`.gitattributes`]: @@ -91,33 +95,44 @@ pub struct Config { /// ```toml /// [migrate] /// # Ignore common whitespace characters when hashing - /// ignored_chars = [" ", "\t", "\r", "\n"] # Space, tab, CR, LF + /// ignored-chars = [" ", "\t", "\r", "\n"] # Space, tab, CR, LF /// ``` // Likely lower overhead for small sets than `HashSet`. pub ignored_chars: BTreeSet, - /// Specify the default type of migration that `sqlx migrate create` should create by default. + /// Specify default options for new migrations created with `sqlx migrate add`. + pub defaults: MigrationDefaults, +} + +#[derive(Debug, Default)] +#[cfg_attr( + feature = "sqlx-toml", + derive(serde::Deserialize), + serde(default, rename_all = "kebab-case") +)] +pub struct MigrationDefaults { + /// Specify the default type of migration that `sqlx migrate add` should create by default. /// /// ### Example: Use Reversible Migrations by Default /// `sqlx.toml`: /// ```toml - /// [migrate] - /// default_type = "reversible" + /// [migrate.defaults] + /// migration-type = "reversible" /// ``` - pub default_type: DefaultMigrationType, + pub migration_type: DefaultMigrationType, - /// Specify the default scheme that `sqlx migrate create` should use for version integers. + /// Specify the default scheme that `sqlx migrate add` should use for version integers. /// /// ### Example: Use Sequential Versioning by Default /// `sqlx.toml`: /// ```toml - /// [migrate] - /// default_versioning = "sequential" + /// [migrate.defaults] + /// migration-versioning = "sequential" /// ``` - pub default_versioning: DefaultVersioning, + pub migration_versioning: DefaultVersioning, } -/// The default type of migration that `sqlx migrate create` should create by default. +/// The default type of migration that `sqlx migrate add` should create by default. #[derive(Debug, Default, PartialEq, Eq)] #[cfg_attr( feature = "sqlx-toml", @@ -130,14 +145,14 @@ pub enum DefaultMigrationType { #[default] Inferred, - /// Create a non-reversible migration (`_.sql`). + /// Create non-reversible migrations (`_.sql`) by default. Simple, - /// Create a reversible migration (`_.up.sql` and `[...].down.sql`). + /// Create reversible migrations (`_.up.sql` and `[...].down.sql`) by default. Reversible, } -/// The default scheme that `sqlx migrate create` should use for version integers. +/// The default scheme that `sqlx migrate add` should use for version integers. #[derive(Debug, Default, PartialEq, Eq)] #[cfg_attr( feature = "sqlx-toml", diff --git a/sqlx-core/src/config/mod.rs b/sqlx-core/src/config/mod.rs index 3bbde5c2f1..696752a51b 100644 --- a/sqlx-core/src/config/mod.rs +++ b/sqlx-core/src/config/mod.rs @@ -45,7 +45,11 @@ mod tests; /// The parsed structure of a `sqlx.toml` file. #[derive(Debug, Default)] -#[cfg_attr(feature = "sqlx-toml", derive(serde::Deserialize))] +#[cfg_attr( + feature = "sqlx-toml", + derive(serde::Deserialize), + serde(default, rename_all = "kebab-case") +)] pub struct Config { /// Configuration shared by multiple components. /// diff --git a/sqlx-core/src/config/reference.toml b/sqlx-core/src/config/reference.toml index 3f9fd83a03..9585d5f392 100644 --- a/sqlx-core/src/config/reference.toml +++ b/sqlx-core/src/config/reference.toml @@ -13,7 +13,7 @@ # This is used by both the macros and `sqlx-cli`. # # If not specified, defaults to `DATABASE_URL` -database_url_var = "FOO_DATABASE_URL" +database-url-var = "FOO_DATABASE_URL" [common.drivers.sqlite] # Load extensions into SQLite when running macros or migrations @@ -25,14 +25,18 @@ load_extensions = ["uuid", "vsv"] # Configuration for the `query!()` family of macros. [macros] + +[macros.preferred-crates] # Force the macros to use the `chrono` crate for date/time types, even if `time` is enabled. # # Defaults to "inferred": use whichever crate is enabled (`time` takes precedence over `chrono`). -datetime_crate = "chrono" +date-time = "chrono" # Or, ensure the macros always prefer `time` # in case new date/time crates are added in the future: -# datetime_crate = "time" +# date-time = "time" + + # Set global overrides for mapping SQL types to Rust types. # @@ -44,7 +48,7 @@ datetime_crate = "chrono" # ### Note: Orthogonal to Nullability # These overrides do not affect whether `query!()` decides to wrap a column in `Option<_>` # or not. They only override the inner type used. -[macros.type_overrides] +[macros.type-overrides] # Override a built-in type (map all `UUID` columns to `crate::types::MyUuid`) 'uuid' = "crate::types::MyUuid" @@ -73,7 +77,7 @@ datetime_crate = "chrono" # Quoted schema and type name '"Foo"."Bar"' = "crate::schema::foo::Bar" -# Set per-column overrides for mapping SQL types to Rust types. +# Set per-table and per-column overrides for mapping SQL types to Rust types. # # Note: table name is required in the header. # @@ -82,7 +86,7 @@ datetime_crate = "chrono" # ### Note: Orthogonal to Nullability # These overrides do not affect whether `query!()` decides to wrap a column in `Option<_>` # or not. They only override the inner type used. -[macros.column_overrides.'foo'] +[macros.table-overrides.'foo'] # Map column `bar` of table `foo` to Rust type `crate::types::Foo`: 'bar' = "crate::types::Bar" @@ -94,17 +98,17 @@ datetime_crate = "chrono" # "Bar" = "crate::types::Bar" # Table name may be quoted (note the wrapping single-quotes) -[macros.column_overrides.'"Foo"'] +[macros.table-overrides.'"Foo"'] 'bar' = "crate::types::Bar" '"Bar"' = "crate::types::Bar" # Table name may also be schema-qualified. # Note how the dot is inside the quotes. -[macros.column_overrides.'my_schema.my_table'] +[macros.table-overrides.'my_schema.my_table'] 'my_column' = "crate::types::MyType" # Quoted schema, table, and column names -[macros.column_overrides.'"My Schema"."My Table"'] +[macros.table-overrides.'"My Schema"."My Table"'] '"My Column"' = "crate::types::MyType" ############################################################################################### @@ -136,12 +140,12 @@ datetime_crate = "chrono" # You should create the new table as a copy of the existing migrations table (with contents!), # and be sure all instances of your application have been migrated to the new # table before deleting the old one. -table_name = "foo._sqlx_migrations" +table-name = "foo._sqlx_migrations" # Override the directory used for migrations files. # # Relative to the crate root for `sqlx::migrate!()`, or the current directory for `sqlx-cli`. -migrations_dir = "foo/migrations" +migrations-dir = "foo/migrations" # Specify characters that should be ignored when hashing migrations. # @@ -154,32 +158,34 @@ migrations_dir = "foo/migrations" # change the output of the hash. # # This may require manual rectification for deployed databases. -# ignored_chars = [] +# ignored-chars = [] # Ignore Carriage Returns (`` | `\r`) # Note that the TOML format requires double-quoted strings to process escapes. -# ignored_chars = ["\r"] +# ignored-chars = ["\r"] # Ignore common whitespace characters (beware syntatically significant whitespace!) # Space, tab, CR, LF, zero-width non-breaking space (U+FEFF) # # U+FEFF is added by some editors as a magic number at the beginning of a text file indicating it is UTF-8 encoded, # where it is known as a byte-order mark (BOM): https://en.wikipedia.org/wiki/Byte_order_mark -ignored_chars = [" ", "\t", "\r", "\n", "\uFEFF"] +ignored-chars = [" ", "\t", "\r", "\n", "\uFEFF"] +# Set default options for new migrations. +[migrate.defaults] # Specify reversible migrations by default (for `sqlx migrate create`). # # Defaults to "inferred": uses the type of the last migration, or "simple" otherwise. -default_type = "reversible" +migration-type = "reversible" # Specify simple (non-reversible) migrations by default. -# default_type = "simple" +# migration-type = "simple" # Specify sequential versioning by default (for `sqlx migrate create`). # # Defaults to "inferred": guesses the versioning scheme from the latest migrations, # or "timestamp" otherwise. -default_versioning = "sequential" +migration-versioning = "sequential" # Specify timestamp versioning by default. -# default_versioning = "timestamp" +# migration-versioning = "timestamp" diff --git a/sqlx-core/src/config/tests.rs b/sqlx-core/src/config/tests.rs index e5033bb459..6c2883d58b 100644 --- a/sqlx-core/src/config/tests.rs +++ b/sqlx-core/src/config/tests.rs @@ -8,11 +8,7 @@ fn reference_parses_as_config() { .unwrap_or_else(|e| panic!("expected reference.toml to parse as Config: {e}")); assert_common_config(&config.common); - - #[cfg(feature = "config-macros")] assert_macros_config(&config.macros); - - #[cfg(feature = "config-migrate")] assert_migrate_config(&config.migrate); } @@ -23,7 +19,7 @@ fn assert_common_config(config: &config::common::Config) { fn assert_macros_config(config: &config::macros::Config) { use config::macros::*; - assert_eq!(config.datetime_crate, DateTimeCrate::Chrono); + assert_eq!(config.preferred_crates.date_time, DateTimeCrate::Chrono); // Type overrides // Don't need to cover everything, just some important canaries. @@ -83,6 +79,6 @@ fn assert_migrate_config(config: &config::migrate::Config) { assert_eq!(config.ignored_chars, ignored_chars); - assert_eq!(config.default_type, DefaultMigrationType::Reversible); - assert_eq!(config.default_versioning, DefaultVersioning::Sequential); + assert_eq!(config.defaults.migration_type, DefaultMigrationType::Reversible); + assert_eq!(config.defaults.migration_versioning, DefaultVersioning::Sequential); } From a6947b254720821ea6dc6a06338d7934357467fa Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Thu, 19 Sep 2024 22:54:48 -0700 Subject: [PATCH 29/78] feat: make macros aware of `macros.preferred-crates` --- sqlx-core/src/column.rs | 22 ++-- sqlx-core/src/config/common.rs | 4 +- sqlx-core/src/config/macros.rs | 39 +++++-- sqlx-core/src/config/migrate.rs | 4 +- sqlx-core/src/config/mod.rs | 23 ++-- sqlx-core/src/config/reference.toml | 11 +- sqlx-core/src/config/tests.rs | 13 ++- sqlx-macros-core/src/query/mod.rs | 5 +- sqlx-macros-core/src/query/output.rs | 121 ++++++++++++++-------- sqlx-mysql/src/protocol/text/column.rs | 2 +- sqlx-postgres/src/column.rs | 3 +- sqlx-postgres/src/connection/describe.rs | 103 ++++++++++-------- sqlx-postgres/src/connection/establish.rs | 3 +- sqlx-sqlite/src/column.rs | 2 +- sqlx-sqlite/src/connection/describe.rs | 2 +- sqlx-sqlite/src/statement/handle.rs | 33 +++--- src/lib.rs | 24 +++++ 17 files changed, 265 insertions(+), 149 deletions(-) diff --git a/sqlx-core/src/column.rs b/sqlx-core/src/column.rs index 7483375765..fddc048c4b 100644 --- a/sqlx-core/src/column.rs +++ b/sqlx-core/src/column.rs @@ -23,15 +23,17 @@ pub trait Column: 'static + Send + Sync + Debug { fn type_info(&self) -> &::TypeInfo; /// If this column comes from a table, return the table and original column name. - /// + /// /// Returns [`ColumnOrigin::Expression`] if the column is the result of an expression /// or else the source table could not be determined. - /// + /// /// Returns [`ColumnOrigin::Unknown`] if the database driver does not have that information, /// or has not overridden this method. - // This method returns an owned value instead of a reference, + // This method returns an owned value instead of a reference, // to give the implementor more flexibility. - fn origin(&self) -> ColumnOrigin { ColumnOrigin::Unknown } + fn origin(&self) -> ColumnOrigin { + ColumnOrigin::Unknown + } } /// A [`Column`] that originates from a table. @@ -44,20 +46,20 @@ pub struct TableColumn { pub name: Arc, } -/// The possible statuses for our knowledge of the origin of a [`Column`]. +/// The possible statuses for our knowledge of the origin of a [`Column`]. #[derive(Debug, Clone, Default)] #[cfg_attr(feature = "offline", derive(serde::Serialize, serde::Deserialize))] pub enum ColumnOrigin { - /// The column is known to originate from a table. - /// - /// Included is the table name and original column name. + /// The column is known to originate from a table. + /// + /// Included is the table name and original column name. Table(TableColumn), /// The column originates from an expression, or else its origin could not be determined. Expression, /// The database driver does not know the column origin at this time. - /// + /// /// This may happen if: - /// * The connection is in the middle of executing a query, + /// * The connection is in the middle of executing a query, /// and cannot query the catalog to fetch this information. /// * The connection does not have access to the database catalog. /// * The implementation of [`Column`] did not override [`Column::origin()`]. diff --git a/sqlx-core/src/config/common.rs b/sqlx-core/src/config/common.rs index 0bd89e1375..7f88c223a7 100644 --- a/sqlx-core/src/config/common.rs +++ b/sqlx-core/src/config/common.rs @@ -79,6 +79,6 @@ pub struct SQLite { impl Config { pub fn database_url_var(&self) -> &str { - self.database_url_var.as_deref().unwrap_or("DATABASE_URL") + self.database_url_var.as_deref().unwrap_or("DATABASE_URL") } -} \ No newline at end of file +} diff --git a/sqlx-core/src/config/macros.rs b/sqlx-core/src/config/macros.rs index 9f4cf4524f..19e5f42fa0 100644 --- a/sqlx-core/src/config/macros.rs +++ b/sqlx-core/src/config/macros.rs @@ -3,13 +3,13 @@ use std::collections::BTreeMap; /// Configuration for the `query!()` family of macros. #[derive(Debug, Default)] #[cfg_attr( - feature = "sqlx-toml", - derive(serde::Deserialize), + feature = "sqlx-toml", + derive(serde::Deserialize), serde(default, rename_all = "kebab-case") )] pub struct Config { /// Specify which crates' types to use when types from multiple crates apply. - /// + /// /// See [`PreferredCrates`] for details. pub preferred_crates: PreferredCrates, @@ -18,6 +18,12 @@ pub struct Config { /// Default type mappings are defined by the database driver. /// Refer to the `sqlx::types` module for details. /// + /// ## Note: Case-Sensitive + /// Currently, the case of the type name MUST match the name SQLx knows it by. + /// Built-in types are spelled in all-uppercase to match SQL convention. + /// + /// However, user-created types in Postgres are all-lowercase unless quoted. + /// /// ## Note: Orthogonal to Nullability /// These overrides do not affect whether `query!()` decides to wrap a column in `Option<_>` /// or not. They only override the inner type used. @@ -63,7 +69,7 @@ pub struct Config { /// ```toml /// [macros.type-overrides] /// # Override a built-in type - /// 'uuid' = "crate::types::MyUuid" + /// 'UUID' = "crate::types::MyUuid" /// /// # Support an external or custom wrapper type (e.g. from the `isn` Postgres extension) /// # (NOTE: FOR DOCUMENTATION PURPOSES ONLY; THIS CRATE/TYPE DOES NOT EXIST AS OF WRITING) @@ -132,6 +138,8 @@ pub struct Config { /// ``` /// /// (See `Note` section above for details.) + // TODO: allow specifying different types for input vs output + // e.g. to accept `&[T]` on input but output `Vec` pub type_overrides: BTreeMap, /// Specify per-table and per-column overrides for mapping SQL types to Rust types. @@ -221,7 +229,7 @@ pub struct Config { #[cfg_attr( feature = "sqlx-toml", derive(serde::Deserialize), - serde(rename_all = "kebab-case") + serde(default, rename_all = "kebab-case") )] pub struct PreferredCrates { /// Specify the crate to use for mapping date/time types to Rust. @@ -360,6 +368,7 @@ pub type RustType = Box; impl Config { /// Get the override for a given type name (optionally schema-qualified). pub fn type_override(&self, type_name: &str) -> Option<&str> { + // TODO: make this case-insensitive self.type_overrides.get(type_name).map(|s| &**s) } @@ -378,6 +387,15 @@ impl DateTimeCrate { pub fn is_inferred(&self) -> bool { *self == Self::Inferred } + + #[inline(always)] + pub fn crate_name(&self) -> Option<&str> { + match self { + Self::Inferred => None, + Self::Chrono => Some("chrono"), + Self::Time => Some("time"), + } + } } impl NumericCrate { @@ -386,4 +404,13 @@ impl NumericCrate { pub fn is_inferred(&self) -> bool { *self == Self::Inferred } -} \ No newline at end of file + + #[inline(always)] + pub fn crate_name(&self) -> Option<&str> { + match self { + Self::Inferred => None, + Self::BigDecimal => Some("bigdecimal"), + Self::RustDecimal => Some("rust_decimal"), + } + } +} diff --git a/sqlx-core/src/config/migrate.rs b/sqlx-core/src/config/migrate.rs index d0e55b35d8..64529f9f02 100644 --- a/sqlx-core/src/config/migrate.rs +++ b/sqlx-core/src/config/migrate.rs @@ -14,8 +14,8 @@ use std::collections::BTreeSet; /// Be sure you know what you are doing and that you read all relevant documentation _thoroughly_. #[derive(Debug, Default)] #[cfg_attr( - feature = "sqlx-toml", - derive(serde::Deserialize), + feature = "sqlx-toml", + derive(serde::Deserialize), serde(default, rename_all = "kebab-case") )] pub struct Config { diff --git a/sqlx-core/src/config/mod.rs b/sqlx-core/src/config/mod.rs index 696752a51b..b3afd9ea1b 100644 --- a/sqlx-core/src/config/mod.rs +++ b/sqlx-core/src/config/mod.rs @@ -86,9 +86,7 @@ pub enum ConfigError { /// No configuration file was found. Not necessarily fatal. #[error("config file {path:?} not found")] - NotFound { - path: PathBuf, - }, + NotFound { path: PathBuf }, /// An I/O error occurred while attempting to read the config file at `path`. /// @@ -103,7 +101,7 @@ pub enum ConfigError { /// An error in the TOML was encountered while parsing the config file at `path`. /// /// The error gives line numbers and context when printed with `Display`/`ToString`. - /// + /// /// Only returned if the `sqlx-toml` feature is enabled. #[error("error parsing config file {path:?}")] Parse { @@ -115,14 +113,12 @@ pub enum ConfigError { /// A `sqlx.toml` file was found or specified, but the `sqlx-toml` feature is not enabled. #[error("SQLx found config file at {path:?} but the `sqlx-toml` feature was not enabled")] - ParseDisabled { - path: PathBuf - }, + ParseDisabled { path: PathBuf }, } impl ConfigError { /// Create a [`ConfigError`] from a [`std::io::Error`]. - /// + /// /// Maps to either `NotFound` or `Io`. pub fn from_io(path: PathBuf, error: io::Error) -> Self { if error.kind() == io::ErrorKind::NotFound { @@ -131,7 +127,7 @@ impl ConfigError { Self::Io { path, error } } } - + /// If this error means the file was not found, return the path that was attempted. pub fn not_found_path(&self) -> Option<&Path> { if let Self::NotFound { path } = self { @@ -227,15 +223,18 @@ impl Config { // Motivation: https://github.com/toml-rs/toml/issues/761 tracing::debug!("read config TOML from {path:?}:\n{toml_s}"); - toml::from_str(&toml_s).map_err(|error| ConfigError::Parse { path, error: Box::new(error) }) + toml::from_str(&toml_s).map_err(|error| ConfigError::Parse { + path, + error: Box::new(error), + }) } - + #[cfg(not(feature = "sqlx-toml"))] fn read_from(path: PathBuf) -> Result { match path.try_exists() { Ok(true) => Err(ConfigError::ParseDisabled { path }), Ok(false) => Err(ConfigError::NotFound { path }), - Err(e) => Err(ConfigError::from_io(path, e)) + Err(e) => Err(ConfigError::from_io(path, e)), } } } diff --git a/sqlx-core/src/config/reference.toml b/sqlx-core/src/config/reference.toml index 9585d5f392..375d510762 100644 --- a/sqlx-core/src/config/reference.toml +++ b/sqlx-core/src/config/reference.toml @@ -36,7 +36,14 @@ date-time = "chrono" # in case new date/time crates are added in the future: # date-time = "time" +# Force the macros to use the `rust_decimal` crate for `NUMERIC`, even if `bigdecimal` is enabled. +# +# Defaults to "inferred": use whichever crate is enabled (`bigdecimal` takes precedence over `rust_decimal`). +numeric = "rust_decimal" +# Or, ensure the macros always prefer `bigdecimal` +# in case new decimal crates are added in the future: +# numeric = "bigdecimal" # Set global overrides for mapping SQL types to Rust types. # @@ -50,7 +57,9 @@ date-time = "chrono" # or not. They only override the inner type used. [macros.type-overrides] # Override a built-in type (map all `UUID` columns to `crate::types::MyUuid`) -'uuid' = "crate::types::MyUuid" +# Note: currently, the case of the type name MUST match. +# Built-in types are spelled in all-uppercase to match SQL convention. +'UUID' = "crate::types::MyUuid" # Support an external or custom wrapper type (e.g. from the `isn` Postgres extension) # (NOTE: FOR DOCUMENTATION PURPOSES ONLY; THIS CRATE/TYPE DOES NOT EXIST AS OF WRITING) diff --git a/sqlx-core/src/config/tests.rs b/sqlx-core/src/config/tests.rs index 6c2883d58b..0b0b590919 100644 --- a/sqlx-core/src/config/tests.rs +++ b/sqlx-core/src/config/tests.rs @@ -20,9 +20,12 @@ fn assert_macros_config(config: &config::macros::Config) { use config::macros::*; assert_eq!(config.preferred_crates.date_time, DateTimeCrate::Chrono); + assert_eq!(config.preferred_crates.numeric, NumericCrate::RustDecimal); // Type overrides // Don't need to cover everything, just some important canaries. + assert_eq!(config.type_override("UUID"), Some("crate::types::MyUuid")); + assert_eq!(config.type_override("foo"), Some("crate::types::Foo")); assert_eq!(config.type_override(r#""Bar""#), Some("crate::types::Bar"),); @@ -79,6 +82,12 @@ fn assert_migrate_config(config: &config::migrate::Config) { assert_eq!(config.ignored_chars, ignored_chars); - assert_eq!(config.defaults.migration_type, DefaultMigrationType::Reversible); - assert_eq!(config.defaults.migration_versioning, DefaultVersioning::Sequential); + assert_eq!( + config.defaults.migration_type, + DefaultMigrationType::Reversible + ); + assert_eq!( + config.defaults.migration_versioning, + DefaultVersioning::Sequential + ); } diff --git a/sqlx-macros-core/src/query/mod.rs b/sqlx-macros-core/src/query/mod.rs index 8c0016346f..cb2d8fc6a7 100644 --- a/sqlx-macros-core/src/query/mod.rs +++ b/sqlx-macros-core/src/query/mod.rs @@ -17,7 +17,6 @@ use crate::query::input::RecordType; use either::Either; use sqlx_core::config::Config; use url::Url; -use sqlx_core::config::Config; mod args; mod data; @@ -125,9 +124,7 @@ fn init_metadata(manifest_dir: &String) -> crate::Result { .map(|s| s.eq_ignore_ascii_case("true") || s == "1") .unwrap_or(false); - let var_name = Config::from_crate() - .common - .database_url_var(); + let var_name = Config::from_crate().common.database_url_var(); let database_url = env(var_name).ok().or(database_url); diff --git a/sqlx-macros-core/src/query/output.rs b/sqlx-macros-core/src/query/output.rs index 654e9328a1..1a145e3a75 100644 --- a/sqlx-macros-core/src/query/output.rs +++ b/sqlx-macros-core/src/query/output.rs @@ -15,8 +15,6 @@ use sqlx_core::type_info::TypeInfo; use std::fmt::{self, Display, Formatter}; use syn::parse::{Parse, ParseStream}; use syn::Token; -use sqlx_core::config::Config; -use sqlx_core::type_info::TypeInfo; pub struct RustColumn { pub(super) ident: Ident, @@ -241,53 +239,92 @@ pub fn quote_query_scalar( }) } -fn get_column_type(i: usize, column: &DB::Column) -> TokenStream { +fn get_column_type(config: &Config, i: usize, column: &DB::Column) -> TokenStream { if let ColumnOrigin::Table(origin) = column.origin() { - if let Some(column_override) = Config::from_crate() - .macros - .column_override(&origin.table, &origin.name) - { + if let Some(column_override) = config.macros.column_override(&origin.table, &origin.name) { return column_override.parse().unwrap(); } } - + let type_info = column.type_info(); - if let Some(type_override) = Config::from_crate() - .macros - .type_override(type_info.name()) - { - return type_override.parse().unwrap(); + if let Some(type_override) = config.macros.type_override(type_info.name()) { + return type_override.parse().unwrap(); } - - ::return_type_for_id(type_info).map_or_else( - || { - let message = - if let Some(feature_gate) = ::get_feature_gate(type_info) { - format!( - "SQLx feature `{feat}` required for type {ty} of {col}", - ty = &type_info, - feat = feature_gate, - col = DisplayColumn { - idx: i, - name: column.name() - } - ) - } else { - format!( - "no built in mapping found for type {ty} of {col}; \ - a type override may be required, see documentation for details", - ty = type_info, - col = DisplayColumn { - idx: i, - name: column.name() - } - ) - }; - syn::Error::new(Span::call_site(), message).to_compile_error() - }, - |t| t.parse().unwrap(), - ) + + let err = match ::return_type_for_id( + type_info, + &config.macros.preferred_crates, + ) { + Ok(t) => return t.parse().unwrap(), + Err(e) => e, + }; + + let message = match err { + type_checking::Error::NoMappingFound => { + if let Some(feature_gate) = ::get_feature_gate(type_info) { + format!( + "SQLx feature `{feat}` required for type {ty} of {col}", + ty = &type_info, + feat = feature_gate, + col = DisplayColumn { + idx: i, + name: column.name() + } + ) + } else { + format!( + "no built-in mapping found for type {ty} of {col}; \ + a type override may be required, see documentation for details", + ty = type_info, + col = DisplayColumn { + idx: i, + name: column.name() + } + ) + } + } + type_checking::Error::DateTimeCrateFeatureNotEnabled => { + let feature_gate = config + .macros + .preferred_crates + .date_time + .crate_name() + .expect("BUG: got feature-not-enabled error for DateTimeCrate::Inferred"); + + format!( + "SQLx feature `{feat}` required for type {ty} of {col} \ + (configured by `macros.preferred-crates.date-time` in sqlx.toml)", + ty = &type_info, + feat = feature_gate, + col = DisplayColumn { + idx: i, + name: column.name() + } + ) + } + type_checking::Error::NumericCrateFeatureNotEnabled => { + let feature_gate = config + .macros + .preferred_crates + .numeric + .crate_name() + .expect("BUG: got feature-not-enabled error for NumericCrate::Inferred"); + + format!( + "SQLx feature `{feat}` required for type {ty} of {col} \ + (configured by `macros.preferred-crates.numeric` in sqlx.toml)", + ty = &type_info, + feat = feature_gate, + col = DisplayColumn { + idx: i, + name: column.name() + } + ) + } + }; + + syn::Error::new(Span::call_site(), message).to_compile_error() } impl ColumnDecl { diff --git a/sqlx-mysql/src/protocol/text/column.rs b/sqlx-mysql/src/protocol/text/column.rs index b94ef47403..a7d95f7166 100644 --- a/sqlx-mysql/src/protocol/text/column.rs +++ b/sqlx-mysql/src/protocol/text/column.rs @@ -130,7 +130,7 @@ impl ColumnDefinition { pub(crate) fn table(&self) -> Result<&str, Error> { str::from_utf8(&self.table).map_err(Error::protocol) } - + pub(crate) fn name(&self) -> Result<&str, Error> { str::from_utf8(&self.name).map_err(Error::protocol) } diff --git a/sqlx-postgres/src/column.rs b/sqlx-postgres/src/column.rs index 8530267b5d..4dd3a1cbd2 100644 --- a/sqlx-postgres/src/column.rs +++ b/sqlx-postgres/src/column.rs @@ -3,7 +3,6 @@ use crate::{PgTypeInfo, Postgres}; use sqlx_core::column::ColumnOrigin; pub(crate) use sqlx_core::column::{Column, ColumnIndex}; -use sqlx_core::column::ColumnOrigin; #[derive(Debug, Clone)] #[cfg_attr(feature = "offline", derive(serde::Serialize, serde::Deserialize))] @@ -14,7 +13,7 @@ pub struct PgColumn { #[cfg_attr(feature = "offline", serde(default))] pub(crate) origin: ColumnOrigin, - + #[cfg_attr(feature = "offline", serde(skip))] pub(crate) relation_id: Option, #[cfg_attr(feature = "offline", serde(skip))] diff --git a/sqlx-postgres/src/connection/describe.rs b/sqlx-postgres/src/connection/describe.rs index 1c2d0a626f..74f8e1212d 100644 --- a/sqlx-postgres/src/connection/describe.rs +++ b/sqlx-postgres/src/connection/describe.rs @@ -1,4 +1,4 @@ -use std::collections::btree_map; +use crate::connection::TableColumns; use crate::error::Error; use crate::ext::ustr::UStr; use crate::io::StatementId; @@ -15,9 +15,6 @@ use smallvec::SmallVec; use sqlx_core::column::{ColumnOrigin, TableColumn}; use sqlx_core::query_builder::QueryBuilder; use std::sync::Arc; -use sqlx_core::column::{ColumnOrigin, TableColumn}; -use sqlx_core::hash_map; -use crate::connection::TableColumns; /// Describes the type of the `pg_type.typtype` column /// @@ -127,9 +124,12 @@ impl PgConnection { let type_info = self .maybe_fetch_type_info_by_oid(field.data_type_id, fetch_type_info) .await?; - - let origin = if let (Some(relation_oid), Some(attribute_no)) = (field.relation_id, field.relation_attribute_no) { - self.maybe_fetch_column_origin(relation_oid, attribute_no, should_fetch).await? + + let origin = if let (Some(relation_oid), Some(attribute_no)) = + (field.relation_id, field.relation_attribute_no) + { + self.maybe_fetch_column_origin(relation_oid, attribute_no, should_fetch) + .await? } else { ColumnOrigin::Expression }; @@ -220,52 +220,65 @@ impl PgConnection { Ok(PgTypeInfo(PgType::DeclareWithOid(oid))) } } - + async fn maybe_fetch_column_origin( - &mut self, - relation_id: Oid, + &mut self, + relation_id: Oid, attribute_no: i16, should_fetch: bool, ) -> Result { - let mut table_columns = match self.cache_table_to_column_names.entry(relation_id) { - hash_map::Entry::Occupied(table_columns) => { - table_columns.into_mut() - }, - hash_map::Entry::Vacant(vacant) => { - if !should_fetch { return Ok(ColumnOrigin::Unknown); } - - let table_name: String = query_scalar("SELECT $1::oid::regclass::text") - .bind(relation_id) - .fetch_one(&mut *self) - .await?; - - vacant.insert(TableColumns { - table_name: table_name.into(), - columns: Default::default(), + if let Some(origin) = + self.cache_table_to_column_names + .get(&relation_id) + .and_then(|table_columns| { + let column_name = table_columns.columns.get(&attribute_no).cloned()?; + + Some(ColumnOrigin::Table(TableColumn { + table: table_columns.table_name.clone(), + name: column_name, + })) }) - } - }; - - let column_name = match table_columns.columns.entry(attribute_no) { - btree_map::Entry::Occupied(occupied) => Arc::clone(occupied.get()), - btree_map::Entry::Vacant(vacant) => { - if !should_fetch { return Ok(ColumnOrigin::Unknown); } - - let column_name: String = query_scalar( - "SELECT attname FROM pg_attribute WHERE attrelid = $1 AND attnum = $2" - ) - .bind(relation_id) - .bind(attribute_no) - .fetch_one(&mut *self) - .await?; - - Arc::clone(vacant.insert(column_name.into())) - } + { + return Ok(origin); + } + + if !should_fetch { + return Ok(ColumnOrigin::Unknown); + } + + // Looking up the table name _may_ end up being redundant, + // but the round-trip to the server is by far the most expensive part anyway. + let Some((table_name, column_name)): Option<(String, String)> = query_as( + // language=PostgreSQL + "SELECT $1::oid::regclass::text, attname \ + FROM pg_catalog.pg_attribute \ + WHERE attrelid = $1 AND attnum = $2", + ) + .bind(relation_id) + .bind(attribute_no) + .fetch_optional(&mut *self) + .await? + else { + // The column/table doesn't exist anymore for whatever reason. + return Ok(ColumnOrigin::Unknown); }; - + + let table_columns = self + .cache_table_to_column_names + .entry(relation_id) + .or_insert_with(|| TableColumns { + table_name: table_name.into(), + columns: Default::default(), + }); + + let column_name = table_columns + .columns + .entry(attribute_no) + .or_insert(column_name.into()); + Ok(ColumnOrigin::Table(TableColumn { table: table_columns.table_name.clone(), - name: column_name + name: Arc::clone(column_name), })) } diff --git a/sqlx-postgres/src/connection/establish.rs b/sqlx-postgres/src/connection/establish.rs index 634b71de4b..684bf26599 100644 --- a/sqlx-postgres/src/connection/establish.rs +++ b/sqlx-postgres/src/connection/establish.rs @@ -149,8 +149,7 @@ impl PgConnection { cache_type_info: HashMap::new(), cache_elem_type_to_array: HashMap::new(), cache_table_to_column_names: HashMap::new(), - log_settings: options.log_settings.clone(), - }), + log_settings: options.log_settings.clone(),}), }) } } diff --git a/sqlx-sqlite/src/column.rs b/sqlx-sqlite/src/column.rs index 390f3687fb..d319bd46a8 100644 --- a/sqlx-sqlite/src/column.rs +++ b/sqlx-sqlite/src/column.rs @@ -11,7 +11,7 @@ pub struct SqliteColumn { pub(crate) type_info: SqliteTypeInfo, #[cfg_attr(feature = "offline", serde(default))] - pub(crate) origin: ColumnOrigin + pub(crate) origin: ColumnOrigin, } impl Column for SqliteColumn { diff --git a/sqlx-sqlite/src/connection/describe.rs b/sqlx-sqlite/src/connection/describe.rs index 8a09cd1153..b22590cfa3 100644 --- a/sqlx-sqlite/src/connection/describe.rs +++ b/sqlx-sqlite/src/connection/describe.rs @@ -49,7 +49,7 @@ pub(crate) fn describe(conn: &mut ConnectionState, query: &str) -> Result ColumnOrigin { - if let Some((table, name)) = - self.column_table_name(index).zip(self.column_origin_name(index)) + if let Some((table, name)) = self + .column_table_name(index) + .zip(self.column_origin_name(index)) { let table: Arc = self .column_db_name(index) @@ -125,20 +126,20 @@ impl StatementHandle { // TODO: check that SQLite returns the names properly quoted if necessary |db| format!("{db}.{table}").into(), ); - + ColumnOrigin::Table(TableColumn { table, - name: name.into() + name: name.into(), }) } else { ColumnOrigin::Expression } } - + fn column_db_name(&self, index: usize) -> Option<&str> { unsafe { let db_name = sqlite3_column_database_name(self.0.as_ptr(), check_col_idx!(index)); - + if !db_name.is_null() { Some(from_utf8_unchecked(CStr::from_ptr(db_name).to_bytes())) } else { @@ -170,7 +171,7 @@ impl StatementHandle { } } } - + pub(crate) fn column_type_info(&self, index: usize) -> SqliteTypeInfo { SqliteTypeInfo(DataType::from_code(self.column_type(index))) } diff --git a/src/lib.rs b/src/lib.rs index 2e801540dd..3d5fb9bb26 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -176,3 +176,27 @@ pub mod prelude { #[cfg(feature = "_unstable-doc")] pub use sqlx_core::config; + +#[doc(hidden)] +#[cfg_attr( + all(feature = "chrono", feature = "time"), + deprecated = "SQLx has both `chrono` and `time` features enabled, \ + which presents an ambiguity when the `query!()` macros are mapping date/time types. \ + The `query!()` macros prefer types from `time` by default, \ + but this behavior should not be relied upon; \ + to resolve the ambiguity, we recommend specifying the preferred crate in a `sqlx.toml` file: \ + https://docs.rs/sqlx/latest/sqlx/config/macros/PreferredCrates.html#field.date_time" +)] +pub fn warn_on_ambiguous_inferred_date_time_crate() {} + +#[doc(hidden)] +#[cfg_attr( + all(feature = "bigdecimal", feature = "rust_decimal"), + deprecated = "SQLx has both `bigdecimal` and `rust_decimal` features enabled, \ + which presents an ambiguity when the `query!()` macros are mapping `NUMERIC`. \ + The `query!()` macros prefer `bigdecimal::BigDecimal` by default, \ + but this behavior should not be relied upon; \ + to resolve the ambiguity, we recommend specifying the preferred crate in a `sqlx.toml` file: \ + https://docs.rs/sqlx/latest/sqlx/config/macros/PreferredCrates.html#field.numeric" +)] +pub fn warn_on_ambiguous_inferred_numeric_crate() {} From ff5374e04500178b2908076b3346c16f9876ff15 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Fri, 20 Sep 2024 00:46:43 -0700 Subject: [PATCH 30/78] feat: make `sqlx-cli` aware of `database-url-var` --- sqlx-cli/src/database.rs | 2 +- sqlx-cli/src/lib.rs | 57 +++++++++++++++++++++++--------- sqlx-cli/src/opt.rs | 26 ++++++++------- sqlx-core/src/config/mod.rs | 66 ++++++++++++++++++++++--------------- src/lib.rs | 6 ++++ 5 files changed, 103 insertions(+), 54 deletions(-) diff --git a/sqlx-cli/src/database.rs b/sqlx-cli/src/database.rs index bcfb911692..367c77dbc9 100644 --- a/sqlx-cli/src/database.rs +++ b/sqlx-cli/src/database.rs @@ -26,7 +26,7 @@ pub async fn create(connect_opts: &ConnectOpts) -> anyhow::Result<()> { } pub async fn drop(connect_opts: &ConnectOpts, confirm: bool, force: bool) -> anyhow::Result<()> { - if confirm && !ask_to_continue_drop(connect_opts.expect_db_url()?) { + if confirm && !ask_to_continue_drop(connect_opts.expect_db_url()?.to_owned()).await { return Ok(()); } diff --git a/sqlx-cli/src/lib.rs b/sqlx-cli/src/lib.rs index fc9e2446c4..55eb86c55c 100644 --- a/sqlx-cli/src/lib.rs +++ b/sqlx-cli/src/lib.rs @@ -1,6 +1,6 @@ use std::future::Future; use std::io; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; use std::time::Duration; use anyhow::{Context, Result}; @@ -23,11 +23,38 @@ mod prepare; pub use crate::opt::Opt; -pub use sqlx::_unstable::config::{self, Config}; +pub use sqlx::_unstable::config; +use crate::config::Config; + +/// Check arguments for `--no-dotenv` _before_ Clap parsing, and apply `.env` if not set. +pub fn maybe_apply_dotenv() { + if std::env::args().any(|arg| arg == "--no-dotenv") { + return; + } + + dotenvy::dotenv().ok(); +} pub async fn run(opt: Opt) -> Result<()> { let config = config_from_current_dir().await?; + let ctrlc_fut = signal::ctrl_c(); + let do_run_fut = do_run(opt); + + select! { + biased; + _ = ctrlc_fut => { + Ok(()) + }, + do_run_outcome = do_run_fut => { + do_run_outcome + } + } +} + +async fn do_run(opt: Opt) -> Result<()> { + let config = config_from_current_dir()?; + match opt.command { Command::Migrate(migrate) => match migrate.command { MigrateCommand::Add(opts) => migrate::add(config, opts).await?, @@ -77,18 +104,16 @@ pub async fn run(opt: Opt) -> Result<()> { } => { connect_opts.populate_db_url(config)?; - migrate::info(config, &source, &connect_opts).await? - } - MigrateCommand::BuildScript { source, force } => { - migrate::build_script(config, &source, force)? - } + migrate::info(&source, &connect_opts).await? + }, + MigrateCommand::BuildScript { source, force } => migrate::build_script(&source, force)?, }, Command::Database(database) => match database.command { DatabaseCommand::Create { mut connect_opts } => { connect_opts.populate_db_url(config)?; database::create(&connect_opts).await? - } + }, DatabaseCommand::Drop { confirmation, mut connect_opts, @@ -96,7 +121,7 @@ pub async fn run(opt: Opt) -> Result<()> { } => { connect_opts.populate_db_url(config)?; database::drop(&connect_opts, !confirmation.yes, force).await? - } + }, DatabaseCommand::Reset { confirmation, source, @@ -104,15 +129,15 @@ pub async fn run(opt: Opt) -> Result<()> { force, } => { connect_opts.populate_db_url(config)?; - database::reset(config, &source, &connect_opts, !confirmation.yes, force).await? - } + database::reset(&source, &connect_opts, !confirmation.yes, force).await? + }, DatabaseCommand::Setup { source, mut connect_opts, } => { connect_opts.populate_db_url(config)?; - database::setup(config, &source, &connect_opts).await? - } + database::setup(&source, &connect_opts).await? + }, }, Command::Prepare { @@ -124,7 +149,7 @@ pub async fn run(opt: Opt) -> Result<()> { } => { connect_opts.populate_db_url(config)?; prepare::run(check, all, workspace, connect_opts, args).await? - } + }, #[cfg(feature = "completions")] Command::Completions { shell } => completions::run(shell), @@ -189,6 +214,6 @@ async fn config_from_current_dir() -> anyhow::Result<&'static Config> { Config::read_with_or_default(move || Ok(path)) }) - .await - .context("unexpected error loading config") + .await + .context("unexpected error loading config") } diff --git a/sqlx-cli/src/opt.rs b/sqlx-cli/src/opt.rs index e3211c72b6..daa8a71f5b 100644 --- a/sqlx-cli/src/opt.rs +++ b/sqlx-cli/src/opt.rs @@ -1,13 +1,19 @@ -use crate::config::migrate::{DefaultMigrationType, DefaultVersioning}; -use crate::config::Config; +use std::env; +use std::ops::{Deref, Not}; use anyhow::Context; -use chrono::Utc; -use clap::{Args, Parser}; +use clap::{ + builder::{styling::AnsiColor, Styles}, + Args, Parser, +}; #[cfg(feature = "completions")] use clap_complete::Shell; -use sqlx::migrate::Migrator; -use std::env; -use std::ops::{Deref, Not}; +use sqlx::config::Config; + +const HELP_STYLES: Styles = Styles::styled() + .header(AnsiColor::Blue.on_default().bold()) + .usage(AnsiColor::Blue.on_default().bold()) + .literal(AnsiColor::White.on_default()) + .placeholder(AnsiColor::Green.on_default()); #[derive(Parser, Debug)] #[clap(version, about, author, styles = HELP_STYLES)] @@ -376,9 +382,7 @@ impl ConnectOpts { /// Require a database URL to be provided, otherwise /// return an error. pub fn expect_db_url(&self) -> anyhow::Result<&str> { - self.database_url - .as_deref() - .context("BUG: database_url not populated") + self.database_url.as_deref().context("BUG: database_url not populated") } /// Populate `database_url` from the environment, if not set. @@ -402,7 +406,7 @@ impl ConnectOpts { } self.database_url = Some(url) - } + }, Err(env::VarError::NotPresent) => { anyhow::bail!("`--database-url` or `{var}`{context} must be set") } diff --git a/sqlx-core/src/config/mod.rs b/sqlx-core/src/config/mod.rs index b3afd9ea1b..02bde20f73 100644 --- a/sqlx-core/src/config/mod.rs +++ b/sqlx-core/src/config/mod.rs @@ -152,25 +152,7 @@ impl Config { /// ### Panics /// If the file exists but an unrecoverable error was encountered while parsing it. pub fn from_crate() -> &'static Self { - Self::try_from_crate().unwrap_or_else(|e| { - match e { - ConfigError::NotFound { path } => { - // Non-fatal - tracing::debug!("Not reading config, file {path:?} not found"); - CACHE.get_or_init(Config::default) - } - // FATAL ERRORS BELOW: - // In the case of migrations, - // we can't proceed with defaults as they may be completely wrong. - e @ ConfigError::ParseDisabled { .. } => { - // Only returned if the file exists but the feature is not enabled. - panic!("{e}") - } - e => { - panic!("failed to read sqlx config: {e}") - } - } - }) + Self::read_with_or_default(get_crate_path) } /// Get the cached config, or to read `$CARGO_MANIFEST_DIR/sqlx.toml`. @@ -179,11 +161,7 @@ impl Config { /// /// Errors if `CARGO_MANIFEST_DIR` is not set, or if the config file could not be read. pub fn try_from_crate() -> Result<&'static Self, ConfigError> { - Self::try_get_with(|| { - let mut path = PathBuf::from(std::env::var("CARGO_MANIFEST_DIR")?); - path.push("sqlx.toml"); - Ok(path) - }) + Self::try_read_with(get_crate_path) } /// Get the cached config, or attempt to read `sqlx.toml` from the current working directory. @@ -192,7 +170,7 @@ impl Config { /// /// Errors if the config file does not exist, or could not be read. pub fn try_from_current_dir() -> Result<&'static Self, ConfigError> { - Self::try_get_with(|| Ok("sqlx.toml".into())) + Self::try_read_with(|| Ok("sqlx.toml".into())) } /// Get the cached config, or attempt to read it from the path returned by the closure. @@ -200,7 +178,7 @@ impl Config { /// On success, the config is cached in a `static` and returned by future calls. /// /// Errors if the config file does not exist, or could not be read. - pub fn try_get_with( + pub fn try_read_with( make_path: impl FnOnce() -> Result, ) -> Result<&'static Self, ConfigError> { CACHE.get_or_try_init(|| { @@ -209,6 +187,36 @@ impl Config { }) } + /// Get the cached config, or attempt to read it from the path returned by the closure. + /// + /// On success, the config is cached in a `static` and returned by future calls. + /// + /// Returns `Config::default()` if the file does not exist. + pub fn read_with_or_default( + make_path: impl FnOnce() -> Result, + ) -> &'static Self { + CACHE.get_or_init(|| { + match make_path().and_then(Self::read_from) { + Ok(config) => config, + Err(ConfigError::NotFound { path }) => { + // Non-fatal + tracing::debug!("Not reading config, file {path:?} not found"); + Config::default() + } + // FATAL ERRORS BELOW: + // In the case of migrations, + // we can't proceed with defaults as they may be completely wrong. + Err(e @ ConfigError::ParseDisabled { .. }) => { + // Only returned if the file exists but the feature is not enabled. + panic!("{e}") + } + Err(e) => { + panic!("failed to read sqlx config: {e}") + } + } + }) + } + #[cfg(feature = "sqlx-toml")] fn read_from(path: PathBuf) -> Result { // The `toml` crate doesn't provide an incremental reader. @@ -238,3 +246,9 @@ impl Config { } } } + +fn get_crate_path() -> Result { + let mut path = PathBuf::from(std::env::var("CARGO_MANIFEST_DIR")?); + path.push("sqlx.toml"); + Ok(path) +} diff --git a/src/lib.rs b/src/lib.rs index 3d5fb9bb26..ce34f0e851 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -177,6 +177,12 @@ pub mod prelude { #[cfg(feature = "_unstable-doc")] pub use sqlx_core::config; +// NOTE: APIs exported in this module are SemVer-exempt. +#[doc(hidden)] +pub mod _unstable { + pub use sqlx_core::config; +} + #[doc(hidden)] #[cfg_attr( all(feature = "chrono", feature = "time"), From f069470f4220c9032590ea7dcfbd20dbda4bc508 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Mon, 23 Sep 2024 02:06:46 -0700 Subject: [PATCH 31/78] feat: teach macros about `migrate.table-name`, `migrations-dir` --- sqlx-macros-core/src/migrate.rs | 32 +++++++++++++++---------------- sqlx-macros-core/src/test_attr.rs | 3 ++- sqlx-macros/src/lib.rs | 2 +- 3 files changed, 19 insertions(+), 18 deletions(-) diff --git a/sqlx-macros-core/src/migrate.rs b/sqlx-macros-core/src/migrate.rs index 5342f8861e..6d9cf96cb6 100644 --- a/sqlx-macros-core/src/migrate.rs +++ b/sqlx-macros-core/src/migrate.rs @@ -9,6 +9,9 @@ use sqlx_core::config::Config; use sqlx_core::migrate::{Migration, MigrationType, ResolveConfig}; use syn::spanned::Spanned; use syn::LitStr; +use syn::spanned::Spanned; +use sqlx_core::config::Config; +use sqlx_core::migrate::{Migration, MigrationType}; pub const DEFAULT_PATH: &str = "./migrations"; @@ -85,9 +88,7 @@ impl ToTokens for QuoteMigration { } pub fn default_path(config: &Config) -> &str { - config - .migrate - .migrations_dir + config.migrate.migrations_dir .as_deref() .unwrap_or(DEFAULT_PATH) } @@ -95,10 +96,12 @@ pub fn default_path(config: &Config) -> &str { pub fn expand(path_arg: Option) -> crate::Result { let config = Config::from_crate(); - let path = match path_arg { - Some(path_arg) => crate::common::resolve_path(path_arg.value(), path_arg.span())?, - None => { crate::common::resolve_path(default_path(config), Span::call_site()) }?, - }; + let path = match path_arg { + Some(path_arg) => crate::common::resolve_path(path_arg.value(), path_arg.span())?, + None => { + crate::common::resolve_path(default_path(config), Span::call_site()) + }? + }; expand_with_path(config, &path) } @@ -136,21 +139,18 @@ pub fn expand_with_path(config: &Config, path: &Path) -> crate::Result crate::Result { let path = crate::migrate::default_path(config); - let resolved_path = crate::common::resolve_path(path, proc_macro2::Span::call_site())?; + let resolved_path = + crate::common::resolve_path(path, proc_macro2::Span::call_site())?; if resolved_path.is_dir() { let migrator = crate::migrate::expand_with_path(config, &resolved_path)?; diff --git a/sqlx-macros/src/lib.rs b/sqlx-macros/src/lib.rs index ccffc9bd2a..f527f5d2fd 100644 --- a/sqlx-macros/src/lib.rs +++ b/sqlx-macros/src/lib.rs @@ -68,7 +68,7 @@ pub fn derive_from_row(input: TokenStream) -> TokenStream { pub fn migrate(input: TokenStream) -> TokenStream { use syn::LitStr; - let input = syn::parse_macro_input!(input as Option); + let input = syn::parse_macro_input!(input as LitStr); match migrate::expand(input) { Ok(ts) => ts.into(), Err(e) => { From 77db4af4d29f2163201b70578676b00145b52dc4 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Mon, 23 Sep 2024 02:15:14 -0700 Subject: [PATCH 32/78] feat: teach macros about `migrate.ignored-chars` --- sqlx-macros-core/src/migrate.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/sqlx-macros-core/src/migrate.rs b/sqlx-macros-core/src/migrate.rs index 6d9cf96cb6..db46c49abb 100644 --- a/sqlx-macros-core/src/migrate.rs +++ b/sqlx-macros-core/src/migrate.rs @@ -11,7 +11,7 @@ use syn::spanned::Spanned; use syn::LitStr; use syn::spanned::Spanned; use sqlx_core::config::Config; -use sqlx_core::migrate::{Migration, MigrationType}; +use sqlx_core::migrate::{Migration, MigrationType, ResolveConfig}; pub const DEFAULT_PATH: &str = "./migrations"; @@ -114,7 +114,8 @@ pub fn expand_with_path(config: &Config, path: &Path) -> crate::Result Date: Sat, 5 Oct 2024 15:21:32 -0700 Subject: [PATCH 33/78] feat: teach `sqlx-cli` about `migrate.defaults` --- sqlx-cli/src/lib.rs | 9 +++-- sqlx-cli/src/migrate.rs | 33 ++++++++++++----- sqlx-cli/src/opt.rs | 78 ++++++++++++++++++++++++++++++++--------- 3 files changed, 90 insertions(+), 30 deletions(-) diff --git a/sqlx-cli/src/lib.rs b/sqlx-cli/src/lib.rs index 55eb86c55c..95e5273771 100644 --- a/sqlx-cli/src/lib.rs +++ b/sqlx-cli/src/lib.rs @@ -1,6 +1,6 @@ use std::future::Future; use std::io; -use std::path::{Path, PathBuf}; +use std::path::{PathBuf}; use std::time::Duration; use anyhow::{Context, Result}; @@ -23,8 +23,7 @@ mod prepare; pub use crate::opt::Opt; -pub use sqlx::_unstable::config; -use crate::config::Config; +pub use sqlx::_unstable::config::{self, Config}; /// Check arguments for `--no-dotenv` _before_ Clap parsing, and apply `.env` if not set. pub fn maybe_apply_dotenv() { @@ -53,11 +52,11 @@ pub async fn run(opt: Opt) -> Result<()> { } async fn do_run(opt: Opt) -> Result<()> { - let config = config_from_current_dir()?; + let config = config_from_current_dir().await?; match opt.command { Command::Migrate(migrate) => match migrate.command { - MigrateCommand::Add(opts) => migrate::add(config, opts).await?, + MigrateCommand::Add(opts)=> migrate::add(config, opts).await?, MigrateCommand::Run { source, config, diff --git a/sqlx-cli/src/migrate.rs b/sqlx-cli/src/migrate.rs index 3618fbe7a3..9db31d5482 100644 --- a/sqlx-cli/src/migrate.rs +++ b/sqlx-cli/src/migrate.rs @@ -1,5 +1,4 @@ -use crate::config::Config; -use crate::opt::{AddMigrationOpts, ConnectOpts, MigrationSourceOpt}; +use crate::opt::{AddMigrationOpts, ConnectOpts}; use anyhow::{bail, Context}; use console::style; use sqlx::migrate::{ @@ -12,6 +11,7 @@ use std::fmt::Write; use std::fs::{self, File}; use std::path::Path; use std::time::Duration; +use crate::config::Config; pub async fn add(config: &Config, opts: AddMigrationOpts) -> anyhow::Result<()> { let source = opts.source.resolve(config); @@ -22,22 +22,39 @@ pub async fn add(config: &Config, opts: AddMigrationOpts) -> anyhow::Result<()> let version_prefix = opts.version_prefix(config, &migrator); + let mut file = File::create(&path).context("Failed to create migration file")?; + + std::io::Write::write_all(&mut file, migration_type.file_content().as_bytes())?; + + Ok(()) +} + +pub async fn add( + config: &Config, + opts: AddMigrationOpts, +) -> anyhow::Result<()> { + fs::create_dir_all(&opts.source).context("Unable to create migrations directory")?; + + let migrator = Migrator::new(opts.source.as_ref()).await?; + + let version_prefix = opts.version_prefix(config, &migrator); + if opts.reversible(config, &migrator) { create_file( - source, + &opts.source, &version_prefix, &opts.description, MigrationType::ReversibleUp, )?; create_file( - source, + &opts.source, &version_prefix, &opts.description, MigrationType::ReversibleDown, )?; } else { create_file( - source, + &opts.source, &version_prefix, &opts.description, MigrationType::Simple, @@ -45,13 +62,13 @@ pub async fn add(config: &Config, opts: AddMigrationOpts) -> anyhow::Result<()> } // if the migrations directory is empty - let has_existing_migrations = fs::read_dir(source) + let has_existing_migrations = fs::read_dir(&opts.source) .map(|mut dir| dir.next().is_some()) .unwrap_or(false); if !has_existing_migrations { - let quoted_source = if opts.source.source.is_some() { - format!("{source:?}") + let quoted_source = if *opts.source != "migrations" { + format!("{:?}", *opts.source) } else { "".to_string() }; diff --git a/sqlx-cli/src/opt.rs b/sqlx-cli/src/opt.rs index daa8a71f5b..f6b1a0a9cf 100644 --- a/sqlx-cli/src/opt.rs +++ b/sqlx-cli/src/opt.rs @@ -1,13 +1,17 @@ use std::env; use std::ops::{Deref, Not}; +use std::path::Path; use anyhow::Context; +use chrono::Utc; use clap::{ builder::{styling::AnsiColor, Styles}, Args, Parser, }; #[cfg(feature = "completions")] use clap_complete::Shell; -use sqlx::config::Config; +use crate::config::Config; +use sqlx::migrate::Migrator; +use crate::config::migrate::{DefaultMigrationType, DefaultVersioning}; const HELP_STYLES: Styles = Styles::styled() .header(AnsiColor::Blue.on_default().bold()) @@ -278,6 +282,35 @@ pub enum MigrateCommand { }, } +#[derive(Args, Debug)] +pub struct AddMigrationOpts { + pub description: String, + + #[clap(flatten)] + pub source: Source, + + /// If set, create an up-migration only. Conflicts with `--reversible`. + #[clap(long, conflicts_with = "reversible")] + simple: bool, + + /// If set, create a pair of up and down migration files with same version. + /// + /// Conflicts with `--simple`. + #[clap(short, long, conflicts_with = "simple")] + reversible: bool, + + /// If set, use timestamp versioning for the new migration. Conflicts with `--sequential`. + /// + /// Timestamp format: `YYYYMMDDHHMMSS` + #[clap(short, long, conflicts_with = "sequential")] + timestamp: bool, + + /// If set, use sequential versioning for the new migration. Conflicts with `--timestamp`. + #[clap(short, long, conflicts_with = "timestamp")] + sequential: bool, +} + +/// Argument for the migration scripts source. #[derive(Args, Debug)] pub struct AddMigrationOpts { pub description: String, @@ -326,6 +359,12 @@ impl MigrationSourceOpt { } } +impl AsRef for Source { + fn as_ref(&self) -> &Path { + Path::new(&self.source) + } +} + /// Argument for the database URL. #[derive(Args, Debug)] pub struct ConnectOpts { @@ -454,20 +493,22 @@ impl Not for IgnoreMissing { impl AddMigrationOpts { pub fn reversible(&self, config: &Config, migrator: &Migrator) -> bool { - if self.reversible { - return true; - } - if self.simple { - return false; - } + if self.reversible { return true; } + if self.simple { return false; } match config.migrate.defaults.migration_type { - DefaultMigrationType::Inferred => migrator - .iter() - .last() - .is_some_and(|m| m.migration_type.is_reversible()), - DefaultMigrationType::Simple => false, - DefaultMigrationType::Reversible => true, + DefaultMigrationType::Inferred => { + migrator + .iter() + .last() + .is_some_and(|m| m.migration_type.is_reversible()) + } + DefaultMigrationType::Simple => { + false + } + DefaultMigrationType::Reversible => { + true + } } } @@ -479,7 +520,8 @@ impl AddMigrationOpts { } if self.sequential || matches!(default_versioning, DefaultVersioning::Sequential) { - return next_sequential(migrator).unwrap_or_else(|| fmt_sequential(1)); + return next_sequential(migrator) + .unwrap_or_else(|| fmt_sequential(1)); } next_sequential(migrator).unwrap_or_else(next_timestamp) @@ -499,11 +541,13 @@ fn next_sequential(migrator: &Migrator) -> Option { match migrations { [previous, latest] => { // If the latest two versions differ by 1, infer sequential. - (latest.version - previous.version == 1).then_some(latest.version + 1) - } + (latest.version - previous.version == 1) + .then_some(latest.version + 1) + }, [latest] => { // If only one migration exists and its version is 0 or 1, infer sequential - matches!(latest.version, 0 | 1).then_some(latest.version + 1) + matches!(latest.version, 0 | 1) + .then_some(latest.version + 1) } _ => unreachable!(), } From 217742a5b7bf4b0374724fdd07dc5c93f24e6b6d Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Wed, 15 Jan 2025 10:31:03 -0800 Subject: [PATCH 34/78] feat: teach `sqlx-cli` about `migrate.migrations-dir` --- sqlx-cli/src/database.rs | 12 ++--- sqlx-cli/src/lib.rs | 8 +-- sqlx-cli/src/migrate.rs | 64 +++++++----------------- sqlx-cli/src/opt.rs | 40 +-------------- sqlx-core/src/config/migrate.rs | 15 +++++- sqlx-macros-core/src/migrate.rs | 3 +- sqlx-macros/src/lib.rs | 2 +- sqlx-postgres/src/connection/describe.rs | 4 +- 8 files changed, 47 insertions(+), 101 deletions(-) diff --git a/sqlx-cli/src/database.rs b/sqlx-cli/src/database.rs index 367c77dbc9..3561477fac 100644 --- a/sqlx-cli/src/database.rs +++ b/sqlx-cli/src/database.rs @@ -1,7 +1,7 @@ -use crate::opt::{ConnectOpts, MigrationSourceOpt}; use crate::{migrate, Config}; -use console::style; -use promptly::{prompt, ReadlineError}; +use crate::opt::{ConnectOpts, MigrationSourceOpt}; +use console::{style, Term}; +use dialoguer::Confirm; use sqlx::any::Any; use sqlx::migrate::MigrateDatabase; use std::{io, mem}; @@ -56,11 +56,7 @@ pub async fn reset( setup(config, migration_source, connect_opts).await } -pub async fn setup( - config: &Config, - migration_source: &MigrationSourceOpt, - connect_opts: &ConnectOpts, -) -> anyhow::Result<()> { +pub async fn setup(config: &Config, migration_source: &MigrationSourceOpt, connect_opts: &ConnectOpts) -> anyhow::Result<()> { create(connect_opts).await?; migrate::run(config, migration_source, connect_opts, false, false, None).await } diff --git a/sqlx-cli/src/lib.rs b/sqlx-cli/src/lib.rs index 95e5273771..d1cfcb95eb 100644 --- a/sqlx-cli/src/lib.rs +++ b/sqlx-cli/src/lib.rs @@ -103,9 +103,9 @@ async fn do_run(opt: Opt) -> Result<()> { } => { connect_opts.populate_db_url(config)?; - migrate::info(&source, &connect_opts).await? + migrate::info(config, &source, &connect_opts).await? }, - MigrateCommand::BuildScript { source, force } => migrate::build_script(&source, force)?, + MigrateCommand::BuildScript { source, force } => migrate::build_script(config, &source, force)?, }, Command::Database(database) => match database.command { @@ -128,14 +128,14 @@ async fn do_run(opt: Opt) -> Result<()> { force, } => { connect_opts.populate_db_url(config)?; - database::reset(&source, &connect_opts, !confirmation.yes, force).await? + database::reset(config, &source, &connect_opts, !confirmation.yes, force).await? }, DatabaseCommand::Setup { source, mut connect_opts, } => { connect_opts.populate_db_url(config)?; - database::setup(&source, &connect_opts).await? + database::setup(config, &source, &connect_opts).await? }, }, diff --git a/sqlx-cli/src/migrate.rs b/sqlx-cli/src/migrate.rs index 9db31d5482..c124a0bdb2 100644 --- a/sqlx-cli/src/migrate.rs +++ b/sqlx-cli/src/migrate.rs @@ -1,9 +1,7 @@ -use crate::opt::{AddMigrationOpts, ConnectOpts}; +use crate::opt::{AddMigrationOpts, ConnectOpts, MigrationSourceOpt}; use anyhow::{bail, Context}; use console::style; -use sqlx::migrate::{ - AppliedMigration, Migrate, MigrateError, MigrationType, Migrator, ResolveWith, -}; +use sqlx::migrate::{AppliedMigration, Migrate, MigrateError, MigrationType, Migrator, ResolveWith}; use sqlx::Connection; use std::borrow::Cow; use std::collections::{HashMap, HashSet}; @@ -13,48 +11,34 @@ use std::path::Path; use std::time::Duration; use crate::config::Config; -pub async fn add(config: &Config, opts: AddMigrationOpts) -> anyhow::Result<()> { - let source = opts.source.resolve(config); - - fs::create_dir_all(source).context("Unable to create migrations directory")?; - - let migrator = Migrator::new(Path::new(source)).await?; - - let version_prefix = opts.version_prefix(config, &migrator); - - let mut file = File::create(&path).context("Failed to create migration file")?; - - std::io::Write::write_all(&mut file, migration_type.file_content().as_bytes())?; - - Ok(()) -} - pub async fn add( config: &Config, opts: AddMigrationOpts, ) -> anyhow::Result<()> { - fs::create_dir_all(&opts.source).context("Unable to create migrations directory")?; + let source = opts.source.resolve(config); + + fs::create_dir_all(source).context("Unable to create migrations directory")?; - let migrator = Migrator::new(opts.source.as_ref()).await?; + let migrator = Migrator::new(Path::new(source)).await?; let version_prefix = opts.version_prefix(config, &migrator); if opts.reversible(config, &migrator) { create_file( - &opts.source, + source, &version_prefix, &opts.description, MigrationType::ReversibleUp, )?; create_file( - &opts.source, + source, &version_prefix, &opts.description, MigrationType::ReversibleDown, )?; } else { create_file( - &opts.source, + source, &version_prefix, &opts.description, MigrationType::Simple, @@ -62,13 +46,13 @@ pub async fn add( } // if the migrations directory is empty - let has_existing_migrations = fs::read_dir(&opts.source) + let has_existing_migrations = fs::read_dir(source) .map(|mut dir| dir.next().is_some()) .unwrap_or(false); if !has_existing_migrations { - let quoted_source = if *opts.source != "migrations" { - format!("{:?}", *opts.source) + let quoted_source = if opts.source.source.is_some() { + format!("{source:?}") } else { "".to_string() }; @@ -140,18 +124,10 @@ fn short_checksum(checksum: &[u8]) -> String { s } -pub async fn info( - config: &Config, - migration_source: &MigrationSourceOpt, - connect_opts: &ConnectOpts, -) -> anyhow::Result<()> { +pub async fn info(config: &Config, migration_source: &MigrationSourceOpt, connect_opts: &ConnectOpts) -> anyhow::Result<()> { let source = migration_source.resolve(config); - - let migrator = Migrator::new(ResolveWith( - Path::new(source), - config.migrate.to_resolve_config(), - )) - .await?; + + let migrator = Migrator::new(ResolveWith(Path::new(source), config.migrate.to_resolve_config())).await?; let mut conn = crate::connect(connect_opts).await?; // FIXME: we shouldn't actually be creating anything here @@ -244,7 +220,7 @@ pub async fn run( target_version: Option, ) -> anyhow::Result<()> { let source = migration_source.resolve(config); - + let migrator = Migrator::new(Path::new(source)).await?; if let Some(target_version) = target_version { if !migrator.version_exists(target_version) { @@ -442,13 +418,9 @@ pub async fn revert( Ok(()) } -pub fn build_script( - config: &Config, - migration_source: &MigrationSourceOpt, - force: bool, -) -> anyhow::Result<()> { +pub fn build_script(config: &Config, migration_source: &MigrationSourceOpt, force: bool) -> anyhow::Result<()> { let source = migration_source.resolve(config); - + anyhow::ensure!( Path::new("Cargo.toml").exists(), "must be run in a Cargo project root" diff --git a/sqlx-cli/src/opt.rs b/sqlx-cli/src/opt.rs index f6b1a0a9cf..e1c9361f2c 100644 --- a/sqlx-cli/src/opt.rs +++ b/sqlx-cli/src/opt.rs @@ -1,6 +1,5 @@ use std::env; use std::ops::{Deref, Not}; -use std::path::Path; use anyhow::Context; use chrono::Utc; use clap::{ @@ -282,35 +281,6 @@ pub enum MigrateCommand { }, } -#[derive(Args, Debug)] -pub struct AddMigrationOpts { - pub description: String, - - #[clap(flatten)] - pub source: Source, - - /// If set, create an up-migration only. Conflicts with `--reversible`. - #[clap(long, conflicts_with = "reversible")] - simple: bool, - - /// If set, create a pair of up and down migration files with same version. - /// - /// Conflicts with `--simple`. - #[clap(short, long, conflicts_with = "simple")] - reversible: bool, - - /// If set, use timestamp versioning for the new migration. Conflicts with `--sequential`. - /// - /// Timestamp format: `YYYYMMDDHHMMSS` - #[clap(short, long, conflicts_with = "sequential")] - timestamp: bool, - - /// If set, use sequential versioning for the new migration. Conflicts with `--timestamp`. - #[clap(short, long, conflicts_with = "timestamp")] - sequential: bool, -} - -/// Argument for the migration scripts source. #[derive(Args, Debug)] pub struct AddMigrationOpts { pub description: String, @@ -343,7 +313,7 @@ pub struct AddMigrationOpts { #[derive(Args, Debug)] pub struct MigrationSourceOpt { /// Path to folder containing migrations. - /// + /// /// Defaults to `migrations/` if not specified, but a different default may be set by `sqlx.toml`. #[clap(long)] pub source: Option, @@ -354,17 +324,11 @@ impl MigrationSourceOpt { if let Some(source) = &self.source { return source; } - + config.migrate.migrations_dir() } } -impl AsRef for Source { - fn as_ref(&self) -> &Path { - Path::new(&self.source) - } -} - /// Argument for the database URL. #[derive(Args, Debug)] pub struct ConnectOpts { diff --git a/sqlx-core/src/config/migrate.rs b/sqlx-core/src/config/migrate.rs index 64529f9f02..666ed5bf92 100644 --- a/sqlx-core/src/config/migrate.rs +++ b/sqlx-core/src/config/migrate.rs @@ -85,7 +85,7 @@ pub struct Config { /// To make your migrations amenable to reformatting, you may wish to tell SQLx to ignore /// _all_ whitespace characters in migrations. /// - /// ##### Warning: Beware Syntatically Significant Whitespace! + /// ##### Warning: Beware Syntactically Significant Whitespace! /// If your migrations use string literals or quoted identifiers which contain whitespace, /// this configuration will cause the migration machinery to ignore some changes to these. /// This may result in a mismatch between the development and production versions of @@ -179,3 +179,16 @@ pub enum DefaultVersioning { /// Use sequential integers for migration versions. Sequential, } + +#[cfg(feature = "migrate")] +impl Config { + pub fn migrations_dir(&self) -> &str { + self.migrations_dir.as_deref().unwrap_or("migrations") + } + + pub fn to_resolve_config(&self) -> crate::migrate::ResolveConfig { + let mut config = crate::migrate::ResolveConfig::new(); + config.ignore_chars(self.ignored_chars.iter().copied()); + config + } +} \ No newline at end of file diff --git a/sqlx-macros-core/src/migrate.rs b/sqlx-macros-core/src/migrate.rs index db46c49abb..2b5f17bc97 100644 --- a/sqlx-macros-core/src/migrate.rs +++ b/sqlx-macros-core/src/migrate.rs @@ -114,8 +114,7 @@ pub fn expand_with_path(config: &Config, path: &Path) -> crate::Result TokenStream { pub fn migrate(input: TokenStream) -> TokenStream { use syn::LitStr; - let input = syn::parse_macro_input!(input as LitStr); + let input = syn::parse_macro_input!(input as Option); match migrate::expand(input) { Ok(ts) => ts.into(), Err(e) => { diff --git a/sqlx-postgres/src/connection/describe.rs b/sqlx-postgres/src/connection/describe.rs index 74f8e1212d..f8d2b6d6c6 100644 --- a/sqlx-postgres/src/connection/describe.rs +++ b/sqlx-postgres/src/connection/describe.rs @@ -228,7 +228,8 @@ impl PgConnection { should_fetch: bool, ) -> Result { if let Some(origin) = - self.cache_table_to_column_names + self.inner + .cache_table_to_column_names .get(&relation_id) .and_then(|table_columns| { let column_name = table_columns.columns.get(&attribute_no).cloned()?; @@ -264,6 +265,7 @@ impl PgConnection { }; let table_columns = self + .inner .cache_table_to_column_names .entry(relation_id) .or_insert_with(|| TableColumns { From c6fea5bdb215927341d85c13d3176644a435ccfa Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Wed, 22 Jan 2025 14:24:18 -0800 Subject: [PATCH 35/78] feat: teach `sqlx-cli` about `migrate.table-name` --- sqlx-cli/src/migrate.rs | 31 ++--------- sqlx-cli/tests/common/mod.rs | 1 + sqlx-core/src/any/migrate.rs | 38 ++------------ sqlx-core/src/config/migrate.rs | 4 ++ sqlx-core/src/migrate/migrate.rs | 15 ++---- sqlx-core/src/migrate/migrator.rs | 6 +-- sqlx-mysql/src/migrate.rs | 85 ++++++++++++------------------- sqlx-postgres/src/migrate.rs | 67 ++++++++---------------- sqlx-sqlite/src/migrate.rs | 76 ++++++++------------------- 9 files changed, 95 insertions(+), 228 deletions(-) diff --git a/sqlx-cli/src/migrate.rs b/sqlx-cli/src/migrate.rs index c124a0bdb2..9e0119682e 100644 --- a/sqlx-cli/src/migrate.rs +++ b/sqlx-cli/src/migrate.rs @@ -130,13 +130,7 @@ pub async fn info(config: &Config, migration_source: &MigrationSourceOpt, connec let migrator = Migrator::new(ResolveWith(Path::new(source), config.migrate.to_resolve_config())).await?; let mut conn = crate::connect(connect_opts).await?; - // FIXME: we shouldn't actually be creating anything here - for schema_name in &config.migrate.create_schemas { - conn.create_schema_if_not_exists(schema_name).await?; - } - - conn.ensure_migrations_table(config.migrate.table_name()) - .await?; + conn.ensure_migrations_table(config.migrate.table_name()).await?; let applied_migrations: HashMap<_, _> = conn .list_applied_migrations(config.migrate.table_name()) @@ -230,21 +224,14 @@ pub async fn run( let mut conn = crate::connect(connect_opts).await?; - for schema_name in &config.migrate.create_schemas { - conn.create_schema_if_not_exists(schema_name).await?; - } - - conn.ensure_migrations_table(config.migrate.table_name()) - .await?; + conn.ensure_migrations_table(config.migrate.table_name()).await?; let version = conn.dirty_version(config.migrate.table_name()).await?; if let Some(version) = version { bail!(MigrateError::Dirty(version)); } - let applied_migrations = conn - .list_applied_migrations(config.migrate.table_name()) - .await?; + let applied_migrations = conn.list_applied_migrations(config.migrate.table_name()).await?; validate_applied_migrations(&applied_migrations, &migrator, ignore_missing)?; let latest_version = applied_migrations @@ -332,22 +319,14 @@ pub async fn revert( let mut conn = crate::connect(connect_opts).await?; - // FIXME: we should not be creating anything here if it doesn't exist - for schema_name in &config.migrate.create_schemas { - conn.create_schema_if_not_exists(schema_name).await?; - } - - conn.ensure_migrations_table(config.migrate.table_name()) - .await?; + conn.ensure_migrations_table(config.migrate.table_name()).await?; let version = conn.dirty_version(config.migrate.table_name()).await?; if let Some(version) = version { bail!(MigrateError::Dirty(version)); } - let applied_migrations = conn - .list_applied_migrations(config.migrate.table_name()) - .await?; + let applied_migrations = conn.list_applied_migrations(config.migrate.table_name()).await?; validate_applied_migrations(&applied_migrations, &migrator, ignore_missing)?; let latest_version = applied_migrations diff --git a/sqlx-cli/tests/common/mod.rs b/sqlx-cli/tests/common/mod.rs index b4a70b7bec..0514ca721f 100644 --- a/sqlx-cli/tests/common/mod.rs +++ b/sqlx-cli/tests/common/mod.rs @@ -6,6 +6,7 @@ use std::{ env, fs, path::{Path, PathBuf}, }; +use sqlx::_unstable::config::Config; pub struct TestDatabase { file_path: PathBuf, diff --git a/sqlx-core/src/any/migrate.rs b/sqlx-core/src/any/migrate.rs index 69b5bf6ab6..b287ec45e5 100644 --- a/sqlx-core/src/any/migrate.rs +++ b/sqlx-core/src/any/migrate.rs @@ -44,44 +44,16 @@ impl MigrateDatabase for Any { } impl Migrate for AnyConnection { - fn create_schema_if_not_exists<'e>( - &'e mut self, - schema_name: &'e str, - ) -> BoxFuture<'e, Result<(), MigrateError>> { - Box::pin(async { - self.get_migrate()? - .create_schema_if_not_exists(schema_name) - .await - }) - } - - fn ensure_migrations_table<'e>( - &'e mut self, - table_name: &'e str, - ) -> BoxFuture<'e, Result<(), MigrateError>> { - Box::pin(async { - self.get_migrate()? - .ensure_migrations_table(table_name) - .await - }) + fn ensure_migrations_table<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result<(), MigrateError>> { + Box::pin(async { self.get_migrate()?.ensure_migrations_table(table_name).await }) } - fn dirty_version<'e>( - &'e mut self, - table_name: &'e str, - ) -> BoxFuture<'e, Result, MigrateError>> { + fn dirty_version<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>> { Box::pin(async { self.get_migrate()?.dirty_version(table_name).await }) } - fn list_applied_migrations<'e>( - &'e mut self, - table_name: &'e str, - ) -> BoxFuture<'e, Result, MigrateError>> { - Box::pin(async { - self.get_migrate()? - .list_applied_migrations(table_name) - .await - }) + fn list_applied_migrations<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>> { + Box::pin(async { self.get_migrate()?.list_applied_migrations(table_name).await }) } fn lock(&mut self) -> BoxFuture<'_, Result<(), MigrateError>> { diff --git a/sqlx-core/src/config/migrate.rs b/sqlx-core/src/config/migrate.rs index 666ed5bf92..a70938b209 100644 --- a/sqlx-core/src/config/migrate.rs +++ b/sqlx-core/src/config/migrate.rs @@ -186,6 +186,10 @@ impl Config { self.migrations_dir.as_deref().unwrap_or("migrations") } + pub fn table_name(&self) -> &str { + self.table_name.as_deref().unwrap_or("_sqlx_migrations") + } + pub fn to_resolve_config(&self) -> crate::migrate::ResolveConfig { let mut config = crate::migrate::ResolveConfig::new(); config.ignore_chars(self.ignored_chars.iter().copied()); diff --git a/sqlx-core/src/migrate/migrate.rs b/sqlx-core/src/migrate/migrate.rs index 841f775966..b2c36bc1d0 100644 --- a/sqlx-core/src/migrate/migrate.rs +++ b/sqlx-core/src/migrate/migrate.rs @@ -33,23 +33,14 @@ pub trait Migrate { // ensure migrations table exists // will create or migrate it if needed - fn ensure_migrations_table<'e>( - &'e mut self, - table_name: &'e str, - ) -> BoxFuture<'e, Result<(), MigrateError>>; + fn ensure_migrations_table<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result<(), MigrateError>>; // Return the version on which the database is dirty or None otherwise. // "dirty" means there is a partially applied migration that failed. - fn dirty_version<'e>( - &'e mut self, - table_name: &'e str, - ) -> BoxFuture<'e, Result, MigrateError>>; + fn dirty_version<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>>; // Return the ordered list of applied migrations - fn list_applied_migrations<'e>( - &'e mut self, - table_name: &'e str, - ) -> BoxFuture<'e, Result, MigrateError>>; + fn list_applied_migrations<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>>; // Should acquire a database lock so that only one migration process // can run at a time. [`Migrate`] will call this function before applying diff --git a/sqlx-core/src/migrate/migrator.rs b/sqlx-core/src/migrate/migrator.rs index 554073b918..bdb18aa6de 100644 --- a/sqlx-core/src/migrate/migrator.rs +++ b/sqlx-core/src/migrate/migrator.rs @@ -27,10 +27,6 @@ pub struct Migrator { pub table_name: Cow<'static, str>, } - #[doc(hidden)] - pub create_schemas: Cow<'static, [Cow<'static, str>]>, -} - impl Migrator { #[doc(hidden)] pub const DEFAULT: Migrator = Migrator { @@ -190,7 +186,7 @@ impl Migrator { // Target version reached break; } - + if migration.migration_type.is_down_migration() { continue; } diff --git a/sqlx-mysql/src/migrate.rs b/sqlx-mysql/src/migrate.rs index 45ca7d98ef..83e823dcf1 100644 --- a/sqlx-mysql/src/migrate.rs +++ b/sqlx-mysql/src/migrate.rs @@ -2,6 +2,8 @@ use std::str::FromStr; use std::time::Duration; use std::time::Instant; +use futures_core::future::BoxFuture; +pub(crate) use sqlx_core::migrate::*; use crate::connection::{ConnectOptions, Connection}; use crate::error::Error; use crate::executor::Executor; @@ -74,27 +76,11 @@ impl MigrateDatabase for MySql { } impl Migrate for MySqlConnection { - fn create_schema_if_not_exists<'e>( - &'e mut self, - schema_name: &'e str, - ) -> BoxFuture<'e, Result<(), MigrateError>> { - Box::pin(async move { - // language=SQL - self.execute(&*format!(r#"CREATE SCHEMA IF NOT EXISTS {schema_name};"#)) - .await?; - - Ok(()) - }) - } - - fn ensure_migrations_table<'e>( - &'e mut self, - table_name: &'e str, - ) -> BoxFuture<'e, Result<(), MigrateError>> { + fn ensure_migrations_table<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result<(), MigrateError>> { Box::pin(async move { // language=MySQL - self.execute(&*format!( - r#" + self.execute( + &*format!(r#" CREATE TABLE IF NOT EXISTS {table_name} ( version BIGINT PRIMARY KEY, description TEXT NOT NULL, @@ -103,23 +89,20 @@ CREATE TABLE IF NOT EXISTS {table_name} ( checksum BLOB NOT NULL, execution_time BIGINT NOT NULL ); - "# - )) + "#), + ) .await?; Ok(()) }) } - fn dirty_version<'e>( - &'e mut self, - table_name: &'e str, - ) -> BoxFuture<'e, Result, MigrateError>> { + fn dirty_version<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>> { Box::pin(async move { // language=SQL - let row: Option<(i64,)> = query_as(&format!( - "SELECT version FROM {table_name} WHERE success = false ORDER BY version LIMIT 1" - )) + let row: Option<(i64,)> = query_as( + &format!("SELECT version FROM {table_name} WHERE success = false ORDER BY version LIMIT 1"), + ) .fetch_optional(self) .await?; @@ -127,17 +110,13 @@ CREATE TABLE IF NOT EXISTS {table_name} ( }) } - fn list_applied_migrations<'e>( - &'e mut self, - table_name: &'e str, - ) -> BoxFuture<'e, Result, MigrateError>> { + fn list_applied_migrations<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>> { Box::pin(async move { // language=SQL - let rows: Vec<(i64, Vec)> = query_as(&format!( - "SELECT version, checksum FROM {table_name} ORDER BY version" - )) - .fetch_all(self) - .await?; + let rows: Vec<(i64, Vec)> = + query_as(&format!("SELECT version, checksum FROM {table_name} ORDER BY version")) + .fetch_all(self) + .await?; let migrations = rows .into_iter() @@ -208,12 +187,12 @@ CREATE TABLE IF NOT EXISTS {table_name} ( // `success=FALSE` and later modify the flag. // // language=MySQL - let _ = query(&format!( - r#" + let _ = query( + &format!(r#" INSERT INTO {table_name} ( version, description, success, checksum, execution_time ) VALUES ( ?, ?, FALSE, ?, -1 ) - "# - )) + "#), + ) .bind(migration.version) .bind(&*migration.description) .bind(&*migration.checksum) @@ -226,13 +205,13 @@ CREATE TABLE IF NOT EXISTS {table_name} ( .map_err(|e| MigrateError::ExecuteMigration(e, migration.version))?; // language=MySQL - let _ = query(&format!( - r#" + let _ = query( + &format!(r#" UPDATE {table_name} SET success = TRUE WHERE version = ? - "# - )) + "#), + ) .bind(migration.version) .execute(&mut *tx) .await?; @@ -246,13 +225,13 @@ CREATE TABLE IF NOT EXISTS {table_name} ( let elapsed = start.elapsed(); #[allow(clippy::cast_possible_truncation)] - let _ = query(&format!( - r#" + let _ = query( + &format!(r#" UPDATE {table_name} SET execution_time = ? WHERE version = ? - "# - )) + "#), + ) .bind(elapsed.as_nanos() as i64) .bind(migration.version) .execute(self) @@ -280,13 +259,13 @@ CREATE TABLE IF NOT EXISTS {table_name} ( // `success=FALSE` and later remove the migration altogether. // // language=MySQL - let _ = query(&format!( - r#" + let _ = query( + &format!(r#" UPDATE {table_name} SET success = FALSE WHERE version = ? - "# - )) + "#), + ) .bind(migration.version) .execute(&mut *tx) .await?; diff --git a/sqlx-postgres/src/migrate.rs b/sqlx-postgres/src/migrate.rs index 90ebd49a73..2646466399 100644 --- a/sqlx-postgres/src/migrate.rs +++ b/sqlx-postgres/src/migrate.rs @@ -111,27 +111,11 @@ impl MigrateDatabase for Postgres { } impl Migrate for PgConnection { - fn create_schema_if_not_exists<'e>( - &'e mut self, - schema_name: &'e str, - ) -> BoxFuture<'e, Result<(), MigrateError>> { - Box::pin(async move { - // language=SQL - self.execute(&*format!(r#"CREATE SCHEMA IF NOT EXISTS {schema_name};"#)) - .await?; - - Ok(()) - }) - } - - fn ensure_migrations_table<'e>( - &'e mut self, - table_name: &'e str, - ) -> BoxFuture<'e, Result<(), MigrateError>> { + fn ensure_migrations_table<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result<(), MigrateError>> { Box::pin(async move { // language=SQL - self.execute(&*format!( - r#" + self.execute( + &*format!(r#" CREATE TABLE IF NOT EXISTS {table_name} ( version BIGINT PRIMARY KEY, description TEXT NOT NULL, @@ -140,23 +124,20 @@ CREATE TABLE IF NOT EXISTS {table_name} ( checksum BYTEA NOT NULL, execution_time BIGINT NOT NULL ); - "# - )) + "#), + ) .await?; Ok(()) }) } - fn dirty_version<'e>( - &'e mut self, - table_name: &'e str, - ) -> BoxFuture<'e, Result, MigrateError>> { + fn dirty_version<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>> { Box::pin(async move { // language=SQL - let row: Option<(i64,)> = query_as(&*format!( - "SELECT version FROM {table_name} WHERE success = false ORDER BY version LIMIT 1" - )) + let row: Option<(i64,)> = query_as( + &*format!("SELECT version FROM {table_name} WHERE success = false ORDER BY version LIMIT 1"), + ) .fetch_optional(self) .await?; @@ -164,17 +145,13 @@ CREATE TABLE IF NOT EXISTS {table_name} ( }) } - fn list_applied_migrations<'e>( - &'e mut self, - table_name: &'e str, - ) -> BoxFuture<'e, Result, MigrateError>> { + fn list_applied_migrations<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>> { Box::pin(async move { // language=SQL - let rows: Vec<(i64, Vec)> = query_as(&*format!( - "SELECT version, checksum FROM {table_name} ORDER BY version" - )) - .fetch_all(self) - .await?; + let rows: Vec<(i64, Vec)> = + query_as(&*format!("SELECT version, checksum FROM {table_name} ORDER BY version")) + .fetch_all(self) + .await?; let migrations = rows .into_iter() @@ -253,13 +230,13 @@ CREATE TABLE IF NOT EXISTS {table_name} ( // language=SQL #[allow(clippy::cast_possible_truncation)] - let _ = query(&*format!( - r#" + let _ = query( + &*format!(r#" UPDATE {table_name} SET execution_time = $1 WHERE version = $2 - "# - )) + "#), + ) .bind(elapsed.as_nanos() as i64) .bind(migration.version) .execute(self) @@ -306,12 +283,12 @@ async fn execute_migration( .map_err(|e| MigrateError::ExecuteMigration(e, migration.version))?; // language=SQL - let _ = query(&*format!( - r#" + let _ = query( + &*format!(r#" INSERT INTO {table_name} ( version, description, success, checksum, execution_time ) VALUES ( $1, $2, TRUE, $3, -1 ) - "# - )) + "#), + ) .bind(migration.version) .bind(&*migration.description) .bind(&*migration.checksum) diff --git a/sqlx-sqlite/src/migrate.rs b/sqlx-sqlite/src/migrate.rs index e475f70308..bd339c7b51 100644 --- a/sqlx-sqlite/src/migrate.rs +++ b/sqlx-sqlite/src/migrate.rs @@ -65,35 +65,10 @@ impl MigrateDatabase for Sqlite { } impl Migrate for SqliteConnection { - fn create_schema_if_not_exists<'e>( - &'e mut self, - schema_name: &'e str, - ) -> BoxFuture<'e, Result<(), MigrateError>> { - Box::pin(async move { - // Check if the schema already exists; if so, don't error. - let schema_version: Option = - query_scalar(&format!("PRAGMA {schema_name}.schema_version")) - .fetch_optional(&mut *self) - .await?; - - if schema_version.is_some() { - return Ok(()); - } - - Err(MigrateError::CreateSchemasNotSupported( - format!("cannot create new schema {schema_name}; creation of additional schemas in SQLite requires attaching extra database files"), - )) - }) - } - - fn ensure_migrations_table<'e>( - &'e mut self, - table_name: &'e str, - ) -> BoxFuture<'e, Result<(), MigrateError>> { + fn ensure_migrations_table<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result<(), MigrateError>> { Box::pin(async move { // language=SQLite - self.execute(&*format!( - r#" + self.execute(&*format!(r#" CREATE TABLE IF NOT EXISTS {table_name} ( version BIGINT PRIMARY KEY, description TEXT NOT NULL, @@ -102,23 +77,20 @@ CREATE TABLE IF NOT EXISTS {table_name} ( checksum BLOB NOT NULL, execution_time BIGINT NOT NULL ); - "# - )) - .await?; + "#), + ) + .await?; Ok(()) }) } - fn dirty_version<'e>( - &'e mut self, - table_name: &'e str, - ) -> BoxFuture<'e, Result, MigrateError>> { + fn dirty_version<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>> { Box::pin(async move { // language=SQLite - let row: Option<(i64,)> = query_as(&format!( - "SELECT version FROM {table_name} WHERE success = false ORDER BY version LIMIT 1" - )) + let row: Option<(i64,)> = query_as( + &format!("SELECT version FROM {table_name} WHERE success = false ORDER BY version LIMIT 1"), + ) .fetch_optional(self) .await?; @@ -126,17 +98,13 @@ CREATE TABLE IF NOT EXISTS {table_name} ( }) } - fn list_applied_migrations<'e>( - &'e mut self, - table_name: &'e str, - ) -> BoxFuture<'e, Result, MigrateError>> { + fn list_applied_migrations<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>> { Box::pin(async move { // language=SQLite - let rows: Vec<(i64, Vec)> = query_as(&format!( - "SELECT version, checksum FROM {table_name} ORDER BY version" - )) - .fetch_all(self) - .await?; + let rows: Vec<(i64, Vec)> = + query_as(&format!("SELECT version, checksum FROM {table_name} ORDER BY version")) + .fetch_all(self) + .await?; let migrations = rows .into_iter() @@ -178,12 +146,12 @@ CREATE TABLE IF NOT EXISTS {table_name} ( .map_err(|e| MigrateError::ExecuteMigration(e, migration.version))?; // language=SQL - let _ = query(&format!( - r#" + let _ = query( + &format!(r#" INSERT INTO {table_name} ( version, description, success, checksum, execution_time ) VALUES ( ?1, ?2, TRUE, ?3, -1 ) - "# - )) + "#), + ) .bind(migration.version) .bind(&*migration.description) .bind(&*migration.checksum) @@ -200,13 +168,13 @@ CREATE TABLE IF NOT EXISTS {table_name} ( // language=SQL #[allow(clippy::cast_possible_truncation)] - let _ = query(&format!( - r#" + let _ = query( + &format!(r#" UPDATE {table_name} SET execution_time = ?1 WHERE version = ?2 - "# - )) + "#), + ) .bind(elapsed.as_nanos() as i64) .bind(migration.version) .execute(self) From b43a957c9e7e28afd969fbc6581633093caba58a Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Wed, 22 Jan 2025 15:32:50 -0800 Subject: [PATCH 36/78] feat: introduce `migrate.create-schemas` --- sqlx-cli/src/database.rs | 8 ++- sqlx-cli/src/lib.rs | 24 ++++--- sqlx-cli/src/migrate.rs | 68 +++++++++++++----- sqlx-cli/src/opt.rs | 55 +++++++-------- sqlx-cli/tests/common/mod.rs | 1 - sqlx-core/src/any/migrate.rs | 38 ++++++++-- sqlx-core/src/config/migrate.rs | 20 +++++- sqlx-core/src/migrate/migrate.rs | 15 +++- sqlx-core/src/migrate/migrator.rs | 19 ++++- sqlx-macros-core/src/migrate.rs | 32 ++++----- sqlx-macros-core/src/test_attr.rs | 3 +- sqlx-mysql/src/migrate.rs | 85 ++++++++++++++--------- sqlx-postgres/src/connection/describe.rs | 24 +++---- sqlx-postgres/src/connection/establish.rs | 3 +- sqlx-postgres/src/migrate.rs | 67 ++++++++++++------ sqlx-sqlite/src/migrate.rs | 76 ++++++++++++++------ 16 files changed, 358 insertions(+), 180 deletions(-) diff --git a/sqlx-cli/src/database.rs b/sqlx-cli/src/database.rs index 3561477fac..eaba46eed9 100644 --- a/sqlx-cli/src/database.rs +++ b/sqlx-cli/src/database.rs @@ -1,5 +1,5 @@ -use crate::{migrate, Config}; use crate::opt::{ConnectOpts, MigrationSourceOpt}; +use crate::{migrate, Config}; use console::{style, Term}; use dialoguer::Confirm; use sqlx::any::Any; @@ -56,7 +56,11 @@ pub async fn reset( setup(config, migration_source, connect_opts).await } -pub async fn setup(config: &Config, migration_source: &MigrationSourceOpt, connect_opts: &ConnectOpts) -> anyhow::Result<()> { +pub async fn setup( + config: &Config, + migration_source: &MigrationSourceOpt, + connect_opts: &ConnectOpts, +) -> anyhow::Result<()> { create(connect_opts).await?; migrate::run(config, migration_source, connect_opts, false, false, None).await } diff --git a/sqlx-cli/src/lib.rs b/sqlx-cli/src/lib.rs index d1cfcb95eb..52fb232b5a 100644 --- a/sqlx-cli/src/lib.rs +++ b/sqlx-cli/src/lib.rs @@ -1,6 +1,6 @@ use std::future::Future; use std::io; -use std::path::{PathBuf}; +use std::path::PathBuf; use std::time::Duration; use anyhow::{Context, Result}; @@ -56,7 +56,7 @@ async fn do_run(opt: Opt) -> Result<()> { match opt.command { Command::Migrate(migrate) => match migrate.command { - MigrateCommand::Add(opts)=> migrate::add(config, opts).await?, + MigrateCommand::Add(opts) => migrate::add(config, opts).await?, MigrateCommand::Run { source, config, @@ -104,15 +104,17 @@ async fn do_run(opt: Opt) -> Result<()> { connect_opts.populate_db_url(config)?; migrate::info(config, &source, &connect_opts).await? - }, - MigrateCommand::BuildScript { source, force } => migrate::build_script(config, &source, force)?, + } + MigrateCommand::BuildScript { source, force } => { + migrate::build_script(config, &source, force)? + } }, Command::Database(database) => match database.command { DatabaseCommand::Create { mut connect_opts } => { connect_opts.populate_db_url(config)?; database::create(&connect_opts).await? - }, + } DatabaseCommand::Drop { confirmation, mut connect_opts, @@ -120,7 +122,7 @@ async fn do_run(opt: Opt) -> Result<()> { } => { connect_opts.populate_db_url(config)?; database::drop(&connect_opts, !confirmation.yes, force).await? - }, + } DatabaseCommand::Reset { confirmation, source, @@ -129,14 +131,14 @@ async fn do_run(opt: Opt) -> Result<()> { } => { connect_opts.populate_db_url(config)?; database::reset(config, &source, &connect_opts, !confirmation.yes, force).await? - }, + } DatabaseCommand::Setup { source, mut connect_opts, } => { connect_opts.populate_db_url(config)?; database::setup(config, &source, &connect_opts).await? - }, + } }, Command::Prepare { @@ -148,7 +150,7 @@ async fn do_run(opt: Opt) -> Result<()> { } => { connect_opts.populate_db_url(config)?; prepare::run(check, all, workspace, connect_opts, args).await? - }, + } #[cfg(feature = "completions")] Command::Completions { shell } => completions::run(shell), @@ -213,6 +215,6 @@ async fn config_from_current_dir() -> anyhow::Result<&'static Config> { Config::read_with_or_default(move || Ok(path)) }) - .await - .context("unexpected error loading config") + .await + .context("unexpected error loading config") } diff --git a/sqlx-cli/src/migrate.rs b/sqlx-cli/src/migrate.rs index 9e0119682e..3618fbe7a3 100644 --- a/sqlx-cli/src/migrate.rs +++ b/sqlx-cli/src/migrate.rs @@ -1,7 +1,10 @@ +use crate::config::Config; use crate::opt::{AddMigrationOpts, ConnectOpts, MigrationSourceOpt}; use anyhow::{bail, Context}; use console::style; -use sqlx::migrate::{AppliedMigration, Migrate, MigrateError, MigrationType, Migrator, ResolveWith}; +use sqlx::migrate::{ + AppliedMigration, Migrate, MigrateError, MigrationType, Migrator, ResolveWith, +}; use sqlx::Connection; use std::borrow::Cow; use std::collections::{HashMap, HashSet}; @@ -9,14 +12,10 @@ use std::fmt::Write; use std::fs::{self, File}; use std::path::Path; use std::time::Duration; -use crate::config::Config; -pub async fn add( - config: &Config, - opts: AddMigrationOpts, -) -> anyhow::Result<()> { +pub async fn add(config: &Config, opts: AddMigrationOpts) -> anyhow::Result<()> { let source = opts.source.resolve(config); - + fs::create_dir_all(source).context("Unable to create migrations directory")?; let migrator = Migrator::new(Path::new(source)).await?; @@ -124,13 +123,27 @@ fn short_checksum(checksum: &[u8]) -> String { s } -pub async fn info(config: &Config, migration_source: &MigrationSourceOpt, connect_opts: &ConnectOpts) -> anyhow::Result<()> { +pub async fn info( + config: &Config, + migration_source: &MigrationSourceOpt, + connect_opts: &ConnectOpts, +) -> anyhow::Result<()> { let source = migration_source.resolve(config); - - let migrator = Migrator::new(ResolveWith(Path::new(source), config.migrate.to_resolve_config())).await?; + + let migrator = Migrator::new(ResolveWith( + Path::new(source), + config.migrate.to_resolve_config(), + )) + .await?; let mut conn = crate::connect(connect_opts).await?; - conn.ensure_migrations_table(config.migrate.table_name()).await?; + // FIXME: we shouldn't actually be creating anything here + for schema_name in &config.migrate.create_schemas { + conn.create_schema_if_not_exists(schema_name).await?; + } + + conn.ensure_migrations_table(config.migrate.table_name()) + .await?; let applied_migrations: HashMap<_, _> = conn .list_applied_migrations(config.migrate.table_name()) @@ -214,7 +227,7 @@ pub async fn run( target_version: Option, ) -> anyhow::Result<()> { let source = migration_source.resolve(config); - + let migrator = Migrator::new(Path::new(source)).await?; if let Some(target_version) = target_version { if !migrator.version_exists(target_version) { @@ -224,14 +237,21 @@ pub async fn run( let mut conn = crate::connect(connect_opts).await?; - conn.ensure_migrations_table(config.migrate.table_name()).await?; + for schema_name in &config.migrate.create_schemas { + conn.create_schema_if_not_exists(schema_name).await?; + } + + conn.ensure_migrations_table(config.migrate.table_name()) + .await?; let version = conn.dirty_version(config.migrate.table_name()).await?; if let Some(version) = version { bail!(MigrateError::Dirty(version)); } - let applied_migrations = conn.list_applied_migrations(config.migrate.table_name()).await?; + let applied_migrations = conn + .list_applied_migrations(config.migrate.table_name()) + .await?; validate_applied_migrations(&applied_migrations, &migrator, ignore_missing)?; let latest_version = applied_migrations @@ -319,14 +339,22 @@ pub async fn revert( let mut conn = crate::connect(connect_opts).await?; - conn.ensure_migrations_table(config.migrate.table_name()).await?; + // FIXME: we should not be creating anything here if it doesn't exist + for schema_name in &config.migrate.create_schemas { + conn.create_schema_if_not_exists(schema_name).await?; + } + + conn.ensure_migrations_table(config.migrate.table_name()) + .await?; let version = conn.dirty_version(config.migrate.table_name()).await?; if let Some(version) = version { bail!(MigrateError::Dirty(version)); } - let applied_migrations = conn.list_applied_migrations(config.migrate.table_name()).await?; + let applied_migrations = conn + .list_applied_migrations(config.migrate.table_name()) + .await?; validate_applied_migrations(&applied_migrations, &migrator, ignore_missing)?; let latest_version = applied_migrations @@ -397,9 +425,13 @@ pub async fn revert( Ok(()) } -pub fn build_script(config: &Config, migration_source: &MigrationSourceOpt, force: bool) -> anyhow::Result<()> { +pub fn build_script( + config: &Config, + migration_source: &MigrationSourceOpt, + force: bool, +) -> anyhow::Result<()> { let source = migration_source.resolve(config); - + anyhow::ensure!( Path::new("Cargo.toml").exists(), "must be run in a Cargo project root" diff --git a/sqlx-cli/src/opt.rs b/sqlx-cli/src/opt.rs index e1c9361f2c..c42dd82e8c 100644 --- a/sqlx-cli/src/opt.rs +++ b/sqlx-cli/src/opt.rs @@ -1,5 +1,5 @@ -use std::env; -use std::ops::{Deref, Not}; +use crate::config::migrate::{DefaultMigrationType, DefaultVersioning}; +use crate::config::Config; use anyhow::Context; use chrono::Utc; use clap::{ @@ -8,9 +8,9 @@ use clap::{ }; #[cfg(feature = "completions")] use clap_complete::Shell; -use crate::config::Config; use sqlx::migrate::Migrator; -use crate::config::migrate::{DefaultMigrationType, DefaultVersioning}; +use std::env; +use std::ops::{Deref, Not}; const HELP_STYLES: Styles = Styles::styled() .header(AnsiColor::Blue.on_default().bold()) @@ -313,7 +313,7 @@ pub struct AddMigrationOpts { #[derive(Args, Debug)] pub struct MigrationSourceOpt { /// Path to folder containing migrations. - /// + /// /// Defaults to `migrations/` if not specified, but a different default may be set by `sqlx.toml`. #[clap(long)] pub source: Option, @@ -324,7 +324,7 @@ impl MigrationSourceOpt { if let Some(source) = &self.source { return source; } - + config.migrate.migrations_dir() } } @@ -385,7 +385,9 @@ impl ConnectOpts { /// Require a database URL to be provided, otherwise /// return an error. pub fn expect_db_url(&self) -> anyhow::Result<&str> { - self.database_url.as_deref().context("BUG: database_url not populated") + self.database_url + .as_deref() + .context("BUG: database_url not populated") } /// Populate `database_url` from the environment, if not set. @@ -409,7 +411,7 @@ impl ConnectOpts { } self.database_url = Some(url) - }, + } Err(env::VarError::NotPresent) => { anyhow::bail!("`--database-url` or `{var}`{context} must be set") } @@ -457,22 +459,20 @@ impl Not for IgnoreMissing { impl AddMigrationOpts { pub fn reversible(&self, config: &Config, migrator: &Migrator) -> bool { - if self.reversible { return true; } - if self.simple { return false; } + if self.reversible { + return true; + } + if self.simple { + return false; + } match config.migrate.defaults.migration_type { - DefaultMigrationType::Inferred => { - migrator - .iter() - .last() - .is_some_and(|m| m.migration_type.is_reversible()) - } - DefaultMigrationType::Simple => { - false - } - DefaultMigrationType::Reversible => { - true - } + DefaultMigrationType::Inferred => migrator + .iter() + .last() + .is_some_and(|m| m.migration_type.is_reversible()), + DefaultMigrationType::Simple => false, + DefaultMigrationType::Reversible => true, } } @@ -484,8 +484,7 @@ impl AddMigrationOpts { } if self.sequential || matches!(default_versioning, DefaultVersioning::Sequential) { - return next_sequential(migrator) - .unwrap_or_else(|| fmt_sequential(1)); + return next_sequential(migrator).unwrap_or_else(|| fmt_sequential(1)); } next_sequential(migrator).unwrap_or_else(next_timestamp) @@ -505,13 +504,11 @@ fn next_sequential(migrator: &Migrator) -> Option { match migrations { [previous, latest] => { // If the latest two versions differ by 1, infer sequential. - (latest.version - previous.version == 1) - .then_some(latest.version + 1) - }, + (latest.version - previous.version == 1).then_some(latest.version + 1) + } [latest] => { // If only one migration exists and its version is 0 or 1, infer sequential - matches!(latest.version, 0 | 1) - .then_some(latest.version + 1) + matches!(latest.version, 0 | 1).then_some(latest.version + 1) } _ => unreachable!(), } diff --git a/sqlx-cli/tests/common/mod.rs b/sqlx-cli/tests/common/mod.rs index 0514ca721f..b4a70b7bec 100644 --- a/sqlx-cli/tests/common/mod.rs +++ b/sqlx-cli/tests/common/mod.rs @@ -6,7 +6,6 @@ use std::{ env, fs, path::{Path, PathBuf}, }; -use sqlx::_unstable::config::Config; pub struct TestDatabase { file_path: PathBuf, diff --git a/sqlx-core/src/any/migrate.rs b/sqlx-core/src/any/migrate.rs index b287ec45e5..69b5bf6ab6 100644 --- a/sqlx-core/src/any/migrate.rs +++ b/sqlx-core/src/any/migrate.rs @@ -44,16 +44,44 @@ impl MigrateDatabase for Any { } impl Migrate for AnyConnection { - fn ensure_migrations_table<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result<(), MigrateError>> { - Box::pin(async { self.get_migrate()?.ensure_migrations_table(table_name).await }) + fn create_schema_if_not_exists<'e>( + &'e mut self, + schema_name: &'e str, + ) -> BoxFuture<'e, Result<(), MigrateError>> { + Box::pin(async { + self.get_migrate()? + .create_schema_if_not_exists(schema_name) + .await + }) + } + + fn ensure_migrations_table<'e>( + &'e mut self, + table_name: &'e str, + ) -> BoxFuture<'e, Result<(), MigrateError>> { + Box::pin(async { + self.get_migrate()? + .ensure_migrations_table(table_name) + .await + }) } - fn dirty_version<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>> { + fn dirty_version<'e>( + &'e mut self, + table_name: &'e str, + ) -> BoxFuture<'e, Result, MigrateError>> { Box::pin(async { self.get_migrate()?.dirty_version(table_name).await }) } - fn list_applied_migrations<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>> { - Box::pin(async { self.get_migrate()?.list_applied_migrations(table_name).await }) + fn list_applied_migrations<'e>( + &'e mut self, + table_name: &'e str, + ) -> BoxFuture<'e, Result, MigrateError>> { + Box::pin(async { + self.get_migrate()? + .list_applied_migrations(table_name) + .await + }) } fn lock(&mut self) -> BoxFuture<'_, Result<(), MigrateError>> { diff --git a/sqlx-core/src/config/migrate.rs b/sqlx-core/src/config/migrate.rs index a70938b209..4865e24c76 100644 --- a/sqlx-core/src/config/migrate.rs +++ b/sqlx-core/src/config/migrate.rs @@ -19,6 +19,20 @@ use std::collections::BTreeSet; serde(default, rename_all = "kebab-case") )] pub struct Config { + /// Specify the names of schemas to create if they don't already exist. + /// + /// This is done before checking the existence of the migrations table + /// (`_sqlx_migrations` or overridden `table_name` below) so that it may be placed in + /// one of these schemas. + /// + /// ### Example + /// `sqlx.toml`: + /// ```toml + /// [migrate] + /// create-schemas = ["foo"] + /// ``` + pub create_schemas: BTreeSet>, + /// Override the name of the table used to track executed migrations. /// /// May be schema-qualified and/or contain quotes. Defaults to `_sqlx_migrations`. @@ -185,14 +199,14 @@ impl Config { pub fn migrations_dir(&self) -> &str { self.migrations_dir.as_deref().unwrap_or("migrations") } - + pub fn table_name(&self) -> &str { self.table_name.as_deref().unwrap_or("_sqlx_migrations") } - + pub fn to_resolve_config(&self) -> crate::migrate::ResolveConfig { let mut config = crate::migrate::ResolveConfig::new(); config.ignore_chars(self.ignored_chars.iter().copied()); config } -} \ No newline at end of file +} diff --git a/sqlx-core/src/migrate/migrate.rs b/sqlx-core/src/migrate/migrate.rs index b2c36bc1d0..841f775966 100644 --- a/sqlx-core/src/migrate/migrate.rs +++ b/sqlx-core/src/migrate/migrate.rs @@ -33,14 +33,23 @@ pub trait Migrate { // ensure migrations table exists // will create or migrate it if needed - fn ensure_migrations_table<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result<(), MigrateError>>; + fn ensure_migrations_table<'e>( + &'e mut self, + table_name: &'e str, + ) -> BoxFuture<'e, Result<(), MigrateError>>; // Return the version on which the database is dirty or None otherwise. // "dirty" means there is a partially applied migration that failed. - fn dirty_version<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>>; + fn dirty_version<'e>( + &'e mut self, + table_name: &'e str, + ) -> BoxFuture<'e, Result, MigrateError>>; // Return the ordered list of applied migrations - fn list_applied_migrations<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>>; + fn list_applied_migrations<'e>( + &'e mut self, + table_name: &'e str, + ) -> BoxFuture<'e, Result, MigrateError>>; // Should acquire a database lock so that only one migration process // can run at a time. [`Migrate`] will call this function before applying diff --git a/sqlx-core/src/migrate/migrator.rs b/sqlx-core/src/migrate/migrator.rs index bdb18aa6de..1ae4813106 100644 --- a/sqlx-core/src/migrate/migrator.rs +++ b/sqlx-core/src/migrate/migrator.rs @@ -25,6 +25,9 @@ pub struct Migrator { pub no_tx: bool, #[doc(hidden)] pub table_name: Cow<'static, str>, + + #[doc(hidden)] + pub create_schemas: Cow<'static, [Cow<'static, str>]>, } impl Migrator { @@ -35,6 +38,7 @@ impl Migrator { no_tx: false, locking: true, table_name: Cow::Borrowed("_sqlx_migrations"), + create_schemas: Cow::Borrowed(&[]), }; /// Creates a new instance with the given source. @@ -84,6 +88,19 @@ impl Migrator { self } + /// Add a schema name to be created if it does not already exist. + /// + /// May be used with [`Self::dangerous_set_table_name()`] to place the migrations table + /// in a new schema without requiring it to exist first. + /// + /// ### Note: Support Depends on Database + /// SQLite cannot create new schemas without attaching them to a database file, + /// the path of which must be specified separately in an [`ATTACH DATABASE`](https://www.sqlite.org/lang_attach.html) command. + pub fn create_schema(&mut self, schema_name: impl Into>) -> &Self { + self.create_schemas.to_mut().push(schema_name.into()); + self + } + /// Specify whether applied migrations that are missing from the resolved migrations should be ignored. pub fn set_ignore_missing(&mut self, ignore_missing: bool) -> &mut Self { self.ignore_missing = ignore_missing; @@ -186,7 +203,7 @@ impl Migrator { // Target version reached break; } - + if migration.migration_type.is_down_migration() { continue; } diff --git a/sqlx-macros-core/src/migrate.rs b/sqlx-macros-core/src/migrate.rs index 2b5f17bc97..5342f8861e 100644 --- a/sqlx-macros-core/src/migrate.rs +++ b/sqlx-macros-core/src/migrate.rs @@ -9,9 +9,6 @@ use sqlx_core::config::Config; use sqlx_core::migrate::{Migration, MigrationType, ResolveConfig}; use syn::spanned::Spanned; use syn::LitStr; -use syn::spanned::Spanned; -use sqlx_core::config::Config; -use sqlx_core::migrate::{Migration, MigrationType, ResolveConfig}; pub const DEFAULT_PATH: &str = "./migrations"; @@ -88,7 +85,9 @@ impl ToTokens for QuoteMigration { } pub fn default_path(config: &Config) -> &str { - config.migrate.migrations_dir + config + .migrate + .migrations_dir .as_deref() .unwrap_or(DEFAULT_PATH) } @@ -96,12 +95,10 @@ pub fn default_path(config: &Config) -> &str { pub fn expand(path_arg: Option) -> crate::Result { let config = Config::from_crate(); - let path = match path_arg { - Some(path_arg) => crate::common::resolve_path(path_arg.value(), path_arg.span())?, - None => { - crate::common::resolve_path(default_path(config), Span::call_site()) - }? - }; + let path = match path_arg { + Some(path_arg) => crate::common::resolve_path(path_arg.value(), path_arg.span())?, + None => { crate::common::resolve_path(default_path(config), Span::call_site()) }?, + }; expand_with_path(config, &path) } @@ -139,18 +136,21 @@ pub fn expand_with_path(config: &Config, path: &Path) -> crate::Result crate::Result { let path = crate::migrate::default_path(config); - let resolved_path = - crate::common::resolve_path(path, proc_macro2::Span::call_site())?; + let resolved_path = crate::common::resolve_path(path, proc_macro2::Span::call_site())?; if resolved_path.is_dir() { let migrator = crate::migrate::expand_with_path(config, &resolved_path)?; diff --git a/sqlx-mysql/src/migrate.rs b/sqlx-mysql/src/migrate.rs index 83e823dcf1..45ca7d98ef 100644 --- a/sqlx-mysql/src/migrate.rs +++ b/sqlx-mysql/src/migrate.rs @@ -2,8 +2,6 @@ use std::str::FromStr; use std::time::Duration; use std::time::Instant; -use futures_core::future::BoxFuture; -pub(crate) use sqlx_core::migrate::*; use crate::connection::{ConnectOptions, Connection}; use crate::error::Error; use crate::executor::Executor; @@ -76,11 +74,27 @@ impl MigrateDatabase for MySql { } impl Migrate for MySqlConnection { - fn ensure_migrations_table<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result<(), MigrateError>> { + fn create_schema_if_not_exists<'e>( + &'e mut self, + schema_name: &'e str, + ) -> BoxFuture<'e, Result<(), MigrateError>> { + Box::pin(async move { + // language=SQL + self.execute(&*format!(r#"CREATE SCHEMA IF NOT EXISTS {schema_name};"#)) + .await?; + + Ok(()) + }) + } + + fn ensure_migrations_table<'e>( + &'e mut self, + table_name: &'e str, + ) -> BoxFuture<'e, Result<(), MigrateError>> { Box::pin(async move { // language=MySQL - self.execute( - &*format!(r#" + self.execute(&*format!( + r#" CREATE TABLE IF NOT EXISTS {table_name} ( version BIGINT PRIMARY KEY, description TEXT NOT NULL, @@ -89,20 +103,23 @@ CREATE TABLE IF NOT EXISTS {table_name} ( checksum BLOB NOT NULL, execution_time BIGINT NOT NULL ); - "#), - ) + "# + )) .await?; Ok(()) }) } - fn dirty_version<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>> { + fn dirty_version<'e>( + &'e mut self, + table_name: &'e str, + ) -> BoxFuture<'e, Result, MigrateError>> { Box::pin(async move { // language=SQL - let row: Option<(i64,)> = query_as( - &format!("SELECT version FROM {table_name} WHERE success = false ORDER BY version LIMIT 1"), - ) + let row: Option<(i64,)> = query_as(&format!( + "SELECT version FROM {table_name} WHERE success = false ORDER BY version LIMIT 1" + )) .fetch_optional(self) .await?; @@ -110,13 +127,17 @@ CREATE TABLE IF NOT EXISTS {table_name} ( }) } - fn list_applied_migrations<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>> { + fn list_applied_migrations<'e>( + &'e mut self, + table_name: &'e str, + ) -> BoxFuture<'e, Result, MigrateError>> { Box::pin(async move { // language=SQL - let rows: Vec<(i64, Vec)> = - query_as(&format!("SELECT version, checksum FROM {table_name} ORDER BY version")) - .fetch_all(self) - .await?; + let rows: Vec<(i64, Vec)> = query_as(&format!( + "SELECT version, checksum FROM {table_name} ORDER BY version" + )) + .fetch_all(self) + .await?; let migrations = rows .into_iter() @@ -187,12 +208,12 @@ CREATE TABLE IF NOT EXISTS {table_name} ( // `success=FALSE` and later modify the flag. // // language=MySQL - let _ = query( - &format!(r#" + let _ = query(&format!( + r#" INSERT INTO {table_name} ( version, description, success, checksum, execution_time ) VALUES ( ?, ?, FALSE, ?, -1 ) - "#), - ) + "# + )) .bind(migration.version) .bind(&*migration.description) .bind(&*migration.checksum) @@ -205,13 +226,13 @@ CREATE TABLE IF NOT EXISTS {table_name} ( .map_err(|e| MigrateError::ExecuteMigration(e, migration.version))?; // language=MySQL - let _ = query( - &format!(r#" + let _ = query(&format!( + r#" UPDATE {table_name} SET success = TRUE WHERE version = ? - "#), - ) + "# + )) .bind(migration.version) .execute(&mut *tx) .await?; @@ -225,13 +246,13 @@ CREATE TABLE IF NOT EXISTS {table_name} ( let elapsed = start.elapsed(); #[allow(clippy::cast_possible_truncation)] - let _ = query( - &format!(r#" + let _ = query(&format!( + r#" UPDATE {table_name} SET execution_time = ? WHERE version = ? - "#), - ) + "# + )) .bind(elapsed.as_nanos() as i64) .bind(migration.version) .execute(self) @@ -259,13 +280,13 @@ CREATE TABLE IF NOT EXISTS {table_name} ( // `success=FALSE` and later remove the migration altogether. // // language=MySQL - let _ = query( - &format!(r#" + let _ = query(&format!( + r#" UPDATE {table_name} SET success = FALSE WHERE version = ? - "#), - ) + "# + )) .bind(migration.version) .execute(&mut *tx) .await?; diff --git a/sqlx-postgres/src/connection/describe.rs b/sqlx-postgres/src/connection/describe.rs index f8d2b6d6c6..aae941b623 100644 --- a/sqlx-postgres/src/connection/describe.rs +++ b/sqlx-postgres/src/connection/describe.rs @@ -227,18 +227,18 @@ impl PgConnection { attribute_no: i16, should_fetch: bool, ) -> Result { - if let Some(origin) = - self.inner - .cache_table_to_column_names - .get(&relation_id) - .and_then(|table_columns| { - let column_name = table_columns.columns.get(&attribute_no).cloned()?; - - Some(ColumnOrigin::Table(TableColumn { - table: table_columns.table_name.clone(), - name: column_name, - })) - }) + if let Some(origin) = self + .inner + .cache_table_to_column_names + .get(&relation_id) + .and_then(|table_columns| { + let column_name = table_columns.columns.get(&attribute_no).cloned()?; + + Some(ColumnOrigin::Table(TableColumn { + table: table_columns.table_name.clone(), + name: column_name, + })) + }) { return Ok(origin); } diff --git a/sqlx-postgres/src/connection/establish.rs b/sqlx-postgres/src/connection/establish.rs index 684bf26599..634b71de4b 100644 --- a/sqlx-postgres/src/connection/establish.rs +++ b/sqlx-postgres/src/connection/establish.rs @@ -149,7 +149,8 @@ impl PgConnection { cache_type_info: HashMap::new(), cache_elem_type_to_array: HashMap::new(), cache_table_to_column_names: HashMap::new(), - log_settings: options.log_settings.clone(),}), + log_settings: options.log_settings.clone(), + }), }) } } diff --git a/sqlx-postgres/src/migrate.rs b/sqlx-postgres/src/migrate.rs index 2646466399..90ebd49a73 100644 --- a/sqlx-postgres/src/migrate.rs +++ b/sqlx-postgres/src/migrate.rs @@ -111,11 +111,27 @@ impl MigrateDatabase for Postgres { } impl Migrate for PgConnection { - fn ensure_migrations_table<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result<(), MigrateError>> { + fn create_schema_if_not_exists<'e>( + &'e mut self, + schema_name: &'e str, + ) -> BoxFuture<'e, Result<(), MigrateError>> { + Box::pin(async move { + // language=SQL + self.execute(&*format!(r#"CREATE SCHEMA IF NOT EXISTS {schema_name};"#)) + .await?; + + Ok(()) + }) + } + + fn ensure_migrations_table<'e>( + &'e mut self, + table_name: &'e str, + ) -> BoxFuture<'e, Result<(), MigrateError>> { Box::pin(async move { // language=SQL - self.execute( - &*format!(r#" + self.execute(&*format!( + r#" CREATE TABLE IF NOT EXISTS {table_name} ( version BIGINT PRIMARY KEY, description TEXT NOT NULL, @@ -124,20 +140,23 @@ CREATE TABLE IF NOT EXISTS {table_name} ( checksum BYTEA NOT NULL, execution_time BIGINT NOT NULL ); - "#), - ) + "# + )) .await?; Ok(()) }) } - fn dirty_version<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>> { + fn dirty_version<'e>( + &'e mut self, + table_name: &'e str, + ) -> BoxFuture<'e, Result, MigrateError>> { Box::pin(async move { // language=SQL - let row: Option<(i64,)> = query_as( - &*format!("SELECT version FROM {table_name} WHERE success = false ORDER BY version LIMIT 1"), - ) + let row: Option<(i64,)> = query_as(&*format!( + "SELECT version FROM {table_name} WHERE success = false ORDER BY version LIMIT 1" + )) .fetch_optional(self) .await?; @@ -145,13 +164,17 @@ CREATE TABLE IF NOT EXISTS {table_name} ( }) } - fn list_applied_migrations<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>> { + fn list_applied_migrations<'e>( + &'e mut self, + table_name: &'e str, + ) -> BoxFuture<'e, Result, MigrateError>> { Box::pin(async move { // language=SQL - let rows: Vec<(i64, Vec)> = - query_as(&*format!("SELECT version, checksum FROM {table_name} ORDER BY version")) - .fetch_all(self) - .await?; + let rows: Vec<(i64, Vec)> = query_as(&*format!( + "SELECT version, checksum FROM {table_name} ORDER BY version" + )) + .fetch_all(self) + .await?; let migrations = rows .into_iter() @@ -230,13 +253,13 @@ CREATE TABLE IF NOT EXISTS {table_name} ( // language=SQL #[allow(clippy::cast_possible_truncation)] - let _ = query( - &*format!(r#" + let _ = query(&*format!( + r#" UPDATE {table_name} SET execution_time = $1 WHERE version = $2 - "#), - ) + "# + )) .bind(elapsed.as_nanos() as i64) .bind(migration.version) .execute(self) @@ -283,12 +306,12 @@ async fn execute_migration( .map_err(|e| MigrateError::ExecuteMigration(e, migration.version))?; // language=SQL - let _ = query( - &*format!(r#" + let _ = query(&*format!( + r#" INSERT INTO {table_name} ( version, description, success, checksum, execution_time ) VALUES ( $1, $2, TRUE, $3, -1 ) - "#), - ) + "# + )) .bind(migration.version) .bind(&*migration.description) .bind(&*migration.checksum) diff --git a/sqlx-sqlite/src/migrate.rs b/sqlx-sqlite/src/migrate.rs index bd339c7b51..e475f70308 100644 --- a/sqlx-sqlite/src/migrate.rs +++ b/sqlx-sqlite/src/migrate.rs @@ -65,10 +65,35 @@ impl MigrateDatabase for Sqlite { } impl Migrate for SqliteConnection { - fn ensure_migrations_table<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result<(), MigrateError>> { + fn create_schema_if_not_exists<'e>( + &'e mut self, + schema_name: &'e str, + ) -> BoxFuture<'e, Result<(), MigrateError>> { + Box::pin(async move { + // Check if the schema already exists; if so, don't error. + let schema_version: Option = + query_scalar(&format!("PRAGMA {schema_name}.schema_version")) + .fetch_optional(&mut *self) + .await?; + + if schema_version.is_some() { + return Ok(()); + } + + Err(MigrateError::CreateSchemasNotSupported( + format!("cannot create new schema {schema_name}; creation of additional schemas in SQLite requires attaching extra database files"), + )) + }) + } + + fn ensure_migrations_table<'e>( + &'e mut self, + table_name: &'e str, + ) -> BoxFuture<'e, Result<(), MigrateError>> { Box::pin(async move { // language=SQLite - self.execute(&*format!(r#" + self.execute(&*format!( + r#" CREATE TABLE IF NOT EXISTS {table_name} ( version BIGINT PRIMARY KEY, description TEXT NOT NULL, @@ -77,20 +102,23 @@ CREATE TABLE IF NOT EXISTS {table_name} ( checksum BLOB NOT NULL, execution_time BIGINT NOT NULL ); - "#), - ) - .await?; + "# + )) + .await?; Ok(()) }) } - fn dirty_version<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>> { + fn dirty_version<'e>( + &'e mut self, + table_name: &'e str, + ) -> BoxFuture<'e, Result, MigrateError>> { Box::pin(async move { // language=SQLite - let row: Option<(i64,)> = query_as( - &format!("SELECT version FROM {table_name} WHERE success = false ORDER BY version LIMIT 1"), - ) + let row: Option<(i64,)> = query_as(&format!( + "SELECT version FROM {table_name} WHERE success = false ORDER BY version LIMIT 1" + )) .fetch_optional(self) .await?; @@ -98,13 +126,17 @@ CREATE TABLE IF NOT EXISTS {table_name} ( }) } - fn list_applied_migrations<'e>(&'e mut self, table_name: &'e str) -> BoxFuture<'e, Result, MigrateError>> { + fn list_applied_migrations<'e>( + &'e mut self, + table_name: &'e str, + ) -> BoxFuture<'e, Result, MigrateError>> { Box::pin(async move { // language=SQLite - let rows: Vec<(i64, Vec)> = - query_as(&format!("SELECT version, checksum FROM {table_name} ORDER BY version")) - .fetch_all(self) - .await?; + let rows: Vec<(i64, Vec)> = query_as(&format!( + "SELECT version, checksum FROM {table_name} ORDER BY version" + )) + .fetch_all(self) + .await?; let migrations = rows .into_iter() @@ -146,12 +178,12 @@ CREATE TABLE IF NOT EXISTS {table_name} ( .map_err(|e| MigrateError::ExecuteMigration(e, migration.version))?; // language=SQL - let _ = query( - &format!(r#" + let _ = query(&format!( + r#" INSERT INTO {table_name} ( version, description, success, checksum, execution_time ) VALUES ( ?1, ?2, TRUE, ?3, -1 ) - "#), - ) + "# + )) .bind(migration.version) .bind(&*migration.description) .bind(&*migration.checksum) @@ -168,13 +200,13 @@ CREATE TABLE IF NOT EXISTS {table_name} ( // language=SQL #[allow(clippy::cast_possible_truncation)] - let _ = query( - &format!(r#" + let _ = query(&format!( + r#" UPDATE {table_name} SET execution_time = ?1 WHERE version = ?2 - "#), - ) + "# + )) .bind(elapsed.as_nanos() as i64) .bind(migration.version) .execute(self) From 396b23af27165a4bc7346854268137c60da16114 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Sat, 1 Feb 2025 23:42:51 -0800 Subject: [PATCH 37/78] fix(postgres): don't fetch `ColumnOrigin` for transparently-prepared statements --- sqlx-postgres/src/connection/describe.rs | 2 +- sqlx-postgres/src/connection/executor.rs | 18 +++--------------- 2 files changed, 4 insertions(+), 16 deletions(-) diff --git a/sqlx-postgres/src/connection/describe.rs b/sqlx-postgres/src/connection/describe.rs index aae941b623..0c2f5ff154 100644 --- a/sqlx-postgres/src/connection/describe.rs +++ b/sqlx-postgres/src/connection/describe.rs @@ -128,7 +128,7 @@ impl PgConnection { let origin = if let (Some(relation_oid), Some(attribute_no)) = (field.relation_id, field.relation_attribute_no) { - self.maybe_fetch_column_origin(relation_oid, attribute_no, should_fetch) + self.maybe_fetch_column_origin(relation_oid, attribute_no, fetch_column_description) .await? } else { ColumnOrigin::Expression diff --git a/sqlx-postgres/src/connection/executor.rs b/sqlx-postgres/src/connection/executor.rs index 93cf4ec6bc..f8dbfe7ec6 100644 --- a/sqlx-postgres/src/connection/executor.rs +++ b/sqlx-postgres/src/connection/executor.rs @@ -86,9 +86,7 @@ async fn prepare( let parameters = conn.handle_parameter_description(parameters).await?; - let (columns, column_names) = conn - .handle_row_description(rows, true, fetch_column_origin) - .await?; + let (columns, column_names) = conn.handle_row_description(rows, true, fetch_column_origin).await?; // ensure that if we did fetch custom data, we wait until we are fully ready before // continuing @@ -182,15 +180,7 @@ impl PgConnection { return Ok((*statement).clone()); } - let statement = prepare( - self, - sql, - parameters, - metadata, - persistent, - fetch_column_origin, - ) - .await?; + let statement = prepare(self, sql, parameters, metadata, persistent, fetch_column_origin).await?; if persistent && self.inner.cache_statement.is_enabled() { if let Some((id, _)) = self.inner.cache_statement.insert(sql, statement.clone()) { @@ -465,9 +455,7 @@ impl<'c> Executor<'c> for &'c mut PgConnection { Box::pin(async move { self.wait_until_ready().await?; - let (_, metadata) = self - .get_or_prepare(sql, parameters, true, None, true) - .await?; + let (_, metadata) = self.get_or_prepare(sql, parameters, true, None, true).await?; Ok(PgStatement { sql: Cow::Borrowed(sql), From 59cd28823361fab5a6628635e0df69675074e16e Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Sat, 1 Feb 2025 23:53:23 -0800 Subject: [PATCH 38/78] feat: progress on axum-multi-tenant example --- .../postgres/axum-multi-tenant/Cargo.toml | 2 +- .../axum-multi-tenant/accounts/Cargo.toml | 6 +- .../accounts/migrations/02_account.sql | 2 +- .../axum-multi-tenant/accounts/sqlx.toml | 1 + .../axum-multi-tenant/accounts/src/lib.rs | 151 +++++++++++++----- .../axum-multi-tenant/payments/Cargo.toml | 2 +- 6 files changed, 117 insertions(+), 47 deletions(-) diff --git a/examples/postgres/axum-multi-tenant/Cargo.toml b/examples/postgres/axum-multi-tenant/Cargo.toml index 1be607c5b8..5d3b7167c3 100644 --- a/examples/postgres/axum-multi-tenant/Cargo.toml +++ b/examples/postgres/axum-multi-tenant/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "multi-tenant" +name = "axum-multi-tenant" version.workspace = true license.workspace = true edition.workspace = true diff --git a/examples/postgres/axum-multi-tenant/accounts/Cargo.toml b/examples/postgres/axum-multi-tenant/accounts/Cargo.toml index 485ba8eb73..bc414e0b33 100644 --- a/examples/postgres/axum-multi-tenant/accounts/Cargo.toml +++ b/examples/postgres/axum-multi-tenant/accounts/Cargo.toml @@ -4,10 +4,12 @@ version = "0.1.0" edition = "2021" [dependencies] -sqlx = { workspace = true, features = ["postgres", "time"] } -argon2 = { version = "0.5.3", features = ["password-hash"] } +sqlx = { workspace = true, features = ["postgres", "time", "uuid"] } tokio = { version = "1", features = ["rt", "sync"] } +argon2 = { version = "0.5.3", features = ["password-hash"] } +password-hash = { version = "0.5", features = ["std"] } + uuid = "1" thiserror = "1" rand = "0.8" diff --git a/examples/postgres/axum-multi-tenant/accounts/migrations/02_account.sql b/examples/postgres/axum-multi-tenant/accounts/migrations/02_account.sql index 91b9cf82e0..ea9b8b9531 100644 --- a/examples/postgres/axum-multi-tenant/accounts/migrations/02_account.sql +++ b/examples/postgres/axum-multi-tenant/accounts/migrations/02_account.sql @@ -1,4 +1,4 @@ -create table account +create table accounts.account ( account_id uuid primary key default gen_random_uuid(), email text unique not null, diff --git a/examples/postgres/axum-multi-tenant/accounts/sqlx.toml b/examples/postgres/axum-multi-tenant/accounts/sqlx.toml index 45042f1333..8ce3f3f5e0 100644 --- a/examples/postgres/axum-multi-tenant/accounts/sqlx.toml +++ b/examples/postgres/axum-multi-tenant/accounts/sqlx.toml @@ -4,3 +4,4 @@ migrations-table = "accounts._sqlx_migrations" [macros.table-overrides.'accounts.account'] 'account_id' = "crate::AccountId" +'password_hash' = "sqlx::types::Text" diff --git a/examples/postgres/axum-multi-tenant/accounts/src/lib.rs b/examples/postgres/axum-multi-tenant/accounts/src/lib.rs index f015af3d40..5535564e0c 100644 --- a/examples/postgres/axum-multi-tenant/accounts/src/lib.rs +++ b/examples/postgres/axum-multi-tenant/accounts/src/lib.rs @@ -1,5 +1,6 @@ +use argon2::{password_hash, Argon2, PasswordHasher, PasswordVerifier}; use std::error::Error; -use argon2::{password_hash, Argon2, PasswordHash, PasswordHasher, PasswordVerifier}; +use std::sync::Arc; use password_hash::PasswordHashString; @@ -10,21 +11,24 @@ use uuid::Uuid; use tokio::sync::Semaphore; -#[derive(sqlx::Type)] +#[derive(sqlx::Type, Debug)] #[sqlx(transparent)] pub struct AccountId(pub Uuid); - pub struct AccountsManager { - hashing_semaphore: Semaphore, + hashing_semaphore: Arc, } #[derive(Debug, thiserror::Error)] pub enum CreateError { - #[error("email in-use")] + #[error("error creating account: email in-use")] EmailInUse, - General(#[source] - #[from] GeneralError), + #[error("error creating account")] + General( + #[source] + #[from] + GeneralError, + ), } #[derive(Debug, thiserror::Error)] @@ -33,50 +37,95 @@ pub enum AuthenticateError { UnknownEmail, #[error("invalid password")] InvalidPassword, - General(#[source] - #[from] GeneralError), + #[error("authentication error")] + General( + #[source] + #[from] + GeneralError, + ), } #[derive(Debug, thiserror::Error)] pub enum GeneralError { - Sqlx(#[source] - #[from] sqlx::Error), - PasswordHash(#[source] #[from] argon2::password_hash::Error), - Task(#[source] - #[from] tokio::task::JoinError), + #[error("database error")] + Sqlx( + #[source] + #[from] + sqlx::Error, + ), + #[error("error hashing password")] + PasswordHash( + #[source] + #[from] + argon2::password_hash::Error, + ), + #[error("task panicked")] + Task( + #[source] + #[from] + tokio::task::JoinError, + ), } impl AccountsManager { - pub async fn new(conn: &mut PgConnection, max_hashing_threads: usize) -> Result { - sqlx::migrate!().run(conn).await?; + pub async fn new( + conn: &mut PgConnection, + max_hashing_threads: usize, + ) -> Result { + sqlx::migrate!() + .run(conn) + .await + .map_err(sqlx::Error::from)?; - AccountsManager { - hashing_semaphore: Semaphore::new(max_hashing_threads) - } + Ok(AccountsManager { + hashing_semaphore: Semaphore::new(max_hashing_threads).into(), + }) } - async fn hash_password(&self, password: String) -> Result { - let guard = self.hashing_semaphore.acquire().await + async fn hash_password(&self, password: String) -> Result { + let guard = self + .hashing_semaphore + .clone() + .acquire_owned() + .await .expect("BUG: this semaphore should not be closed"); // We transfer ownership to the blocking task and back to ensure Tokio doesn't spawn // excess threads. let (_guard, res) = tokio::task::spawn_blocking(move || { let salt = argon2::password_hash::SaltString::generate(rand::thread_rng()); - (guard, Argon2::default().hash_password(password.as_bytes(), &salt)) + ( + guard, + Argon2::default() + .hash_password(password.as_bytes(), &salt) + .map(|hash| hash.serialize()), + ) }) - .await?; + .await?; Ok(res?) } - async fn verify_password(&self, password: String, hash: PasswordHashString) -> Result<(), AuthenticateError> { - let guard = self.hashing_semaphore.acquire().await + async fn verify_password( + &self, + password: String, + hash: PasswordHashString, + ) -> Result<(), AuthenticateError> { + let guard = self + .hashing_semaphore + .clone() + .acquire_owned() + .await .expect("BUG: this semaphore should not be closed"); let (_guard, res) = tokio::task::spawn_blocking(move || { - (guard, Argon2::default().verify_password(password.as_bytes(), &hash.password_hash())) - }).await.map_err(GeneralError::from)?; + ( + guard, + Argon2::default().verify_password(password.as_bytes(), &hash.password_hash()), + ) + }) + .await + .map_err(GeneralError::from)?; if let Err(password_hash::Error::Password) = res { return Err(AuthenticateError::InvalidPassword); @@ -87,46 +136,64 @@ impl AccountsManager { Ok(()) } - pub async fn create(&self, txn: &mut PgTransaction, email: &str, password: String) -> Result { + pub async fn create( + &self, + txn: &mut PgTransaction<'_>, + email: &str, + password: String, + ) -> Result { // Hash password whether the account exists or not to make it harder // to tell the difference in the timing. let hash = self.hash_password(password).await?; + // Thanks to `sqlx.toml`, `account_id` maps to `AccountId` // language=PostgreSQL - sqlx::query!( + sqlx::query_scalar!( "insert into accounts.account(email, password_hash) \ values ($1, $2) \ returning account_id", email, - Text(hash) as Text>, + hash.as_str(), ) - .fetch_one(&mut *txn) - .await - .map_err(|e| if e.constraint() == Some("account_account_id_key") { + .fetch_one(&mut **txn) + .await + .map_err(|e| { + if e.as_database_error().and_then(|dbe| dbe.constraint()) == Some("account_account_id_key") { CreateError::EmailInUse } else { GeneralError::from(e).into() - }) + } + }) } - pub async fn authenticate(&self, conn: &mut PgConnection, email: &str, password: String) -> Result { + pub async fn authenticate( + &self, + conn: &mut PgConnection, + email: &str, + password: String, + ) -> Result { + // Thanks to `sqlx.toml`: + // * `account_id` maps to `AccountId` + // * `password_hash` maps to `Text` let maybe_account = sqlx::query!( - "select account_id, password_hash as \"password_hash: Text\" \ + "select account_id, password_hash \ from accounts.account \ - where email_id = $1", + where email = $1", email ) - .fetch_optional(&mut *conn) - .await - .map_err(GeneralError::from)?; + .fetch_optional(&mut *conn) + .await + .map_err(GeneralError::from)?; let Some(account) = maybe_account else { // Hash the password whether the account exists or not to hide the difference in timing. - self.hash_password(password).await.map_err(GeneralError::from)?; + self.hash_password(password) + .await + .map_err(GeneralError::from)?; return Err(AuthenticateError::UnknownEmail); }; - self.verify_password(password, account.password_hash.into())?; + self.verify_password(password, account.password_hash.into_inner()).await?; Ok(account.account_id) } diff --git a/examples/postgres/axum-multi-tenant/payments/Cargo.toml b/examples/postgres/axum-multi-tenant/payments/Cargo.toml index 0a2485955b..d7dc430553 100644 --- a/examples/postgres/axum-multi-tenant/payments/Cargo.toml +++ b/examples/postgres/axum-multi-tenant/payments/Cargo.toml @@ -4,4 +4,4 @@ version = "0.1.0" edition = "2021" [dependencies] -sqlx = { workspace = true, features = ["postgres", "time"] } +sqlx = { workspace = true, features = ["postgres", "time", "uuid"] } From 08b23648757df5d957a3d9d0ee09e5225291d27d Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Fri, 21 Feb 2025 15:52:27 -0800 Subject: [PATCH 39/78] feat(config): better errors for mislabeled fields --- sqlx-core/src/config/common.rs | 2 +- sqlx-core/src/config/macros.rs | 2 +- sqlx-core/src/config/migrate.rs | 2 +- sqlx-core/src/config/mod.rs | 5 ++++- 4 files changed, 7 insertions(+), 4 deletions(-) diff --git a/sqlx-core/src/config/common.rs b/sqlx-core/src/config/common.rs index 7f88c223a7..45d3e689ee 100644 --- a/sqlx-core/src/config/common.rs +++ b/sqlx-core/src/config/common.rs @@ -3,7 +3,7 @@ #[cfg_attr( feature = "sqlx-toml", derive(serde::Deserialize), - serde(default, rename_all = "kebab-case") + serde(default, rename_all = "kebab-case", deny_unknown_fields) )] pub struct Config { /// Override the database URL environment variable. diff --git a/sqlx-core/src/config/macros.rs b/sqlx-core/src/config/macros.rs index 19e5f42fa0..9acabf2d6a 100644 --- a/sqlx-core/src/config/macros.rs +++ b/sqlx-core/src/config/macros.rs @@ -5,7 +5,7 @@ use std::collections::BTreeMap; #[cfg_attr( feature = "sqlx-toml", derive(serde::Deserialize), - serde(default, rename_all = "kebab-case") + serde(default, rename_all = "kebab-case", deny_unknown_fields) )] pub struct Config { /// Specify which crates' types to use when types from multiple crates apply. diff --git a/sqlx-core/src/config/migrate.rs b/sqlx-core/src/config/migrate.rs index 4865e24c76..0dd6cc2257 100644 --- a/sqlx-core/src/config/migrate.rs +++ b/sqlx-core/src/config/migrate.rs @@ -16,7 +16,7 @@ use std::collections::BTreeSet; #[cfg_attr( feature = "sqlx-toml", derive(serde::Deserialize), - serde(default, rename_all = "kebab-case") + serde(default, rename_all = "kebab-case", deny_unknown_fields) )] pub struct Config { /// Specify the names of schemas to create if they don't already exist. diff --git a/sqlx-core/src/config/mod.rs b/sqlx-core/src/config/mod.rs index 02bde20f73..5801af888c 100644 --- a/sqlx-core/src/config/mod.rs +++ b/sqlx-core/src/config/mod.rs @@ -48,7 +48,7 @@ mod tests; #[cfg_attr( feature = "sqlx-toml", derive(serde::Deserialize), - serde(default, rename_all = "kebab-case") + serde(default, rename_all = "kebab-case", deny_unknown_fields) )] pub struct Config { /// Configuration shared by multiple components. @@ -210,6 +210,9 @@ impl Config { // Only returned if the file exists but the feature is not enabled. panic!("{e}") } + Err(ConfigError::Parse { error, path }) => { + panic!("error parsing sqlx config {path:?}: {error}") + } Err(e) => { panic!("failed to read sqlx config: {e}") } From bda547ceb44092278d3692b3916fe887d15ae863 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Wed, 26 Feb 2025 13:36:53 -0800 Subject: [PATCH 40/78] WIP feat: filling out axum-multi-tenant example --- .../postgres/axum-multi-tenant/Cargo.toml | 10 +++ examples/postgres/axum-multi-tenant/README.md | 19 ++++-- .../axum-multi-tenant/accounts/Cargo.toml | 7 +- .../accounts/migrations/01_setup.sql | 30 +++++++++ .../accounts/migrations/02_account.sql | 12 ++-- .../axum-multi-tenant/accounts/sqlx.toml | 2 +- .../axum-multi-tenant/accounts/src/lib.rs | 53 +++++++++++---- .../axum-multi-tenant/payments/Cargo.toml | 12 +++- .../payments/migrations/01_setup.sql | 30 +++++++++ .../payments/migrations/02_payment.sql | 58 +++++++++++++++++ .../axum-multi-tenant/payments/sqlx.toml | 10 +++ .../axum-multi-tenant/payments/src/lib.rs | 40 +++++++++--- .../axum-multi-tenant/src/http/mod.rs | 7 ++ .../postgres/axum-multi-tenant/src/main.rs | 65 ++++++++++++++++++- sqlx-macros-core/src/migrate.rs | 11 +--- 15 files changed, 320 insertions(+), 46 deletions(-) create mode 100644 examples/postgres/axum-multi-tenant/payments/migrations/01_setup.sql create mode 100644 examples/postgres/axum-multi-tenant/payments/migrations/02_payment.sql create mode 100644 examples/postgres/axum-multi-tenant/payments/sqlx.toml create mode 100644 examples/postgres/axum-multi-tenant/src/http/mod.rs diff --git a/examples/postgres/axum-multi-tenant/Cargo.toml b/examples/postgres/axum-multi-tenant/Cargo.toml index 5d3b7167c3..7ea32bbc43 100644 --- a/examples/postgres/axum-multi-tenant/Cargo.toml +++ b/examples/postgres/axum-multi-tenant/Cargo.toml @@ -12,7 +12,17 @@ authors.workspace = true accounts = { path = "accounts" } payments = { path = "payments" } +tokio = { version = "1", features = ["rt-multi-thread", "macros"] } + sqlx = { path = "../../..", version = "0.8.3", features = ["runtime-tokio", "postgres"] } +axum = "0.8.1" + +clap = { version = "4.5.30", features = ["derive", "env"] } +color-eyre = "0.6.3" +dotenvy = "0.15.7" +tracing-subscriber = "0.3.19" + + [lints] workspace = true diff --git a/examples/postgres/axum-multi-tenant/README.md b/examples/postgres/axum-multi-tenant/README.md index d38f7f3ea5..aae3a6f1fe 100644 --- a/examples/postgres/axum-multi-tenant/README.md +++ b/examples/postgres/axum-multi-tenant/README.md @@ -3,9 +3,20 @@ This example project involves three crates, each owning a different schema in one database, with their own set of migrations. -* The main crate, an Axum app. - * Owns the `public` schema (tables are referenced unqualified). +* The main crate, an Axum app. + * Owns the `public` schema (tables are referenced unqualified). * `accounts`: a subcrate simulating a reusable account-management crate. - * Owns schema `accounts`. + * Owns schema `accounts`. * `payments`: a subcrate simulating a wrapper for a payments API. - * Owns schema `payments`. + * Owns schema `payments`. + +## Note: Schema-Qualified Names + +This example uses schema-qualified names everywhere for clarity. + +It can be tempting to change the `search_path` of the connection (MySQL, Postgres) to eliminate the need for schema +prefixes, but this can cause some really confusing issues when names conflict. + +This example will generate a `_sqlx_migrations` table in three different schemas, and if `search_path` is set +to `public,accounts,payments` and the migrator for the main application attempts to reference the table unqualified, +it would throw an error. diff --git a/examples/postgres/axum-multi-tenant/accounts/Cargo.toml b/examples/postgres/axum-multi-tenant/accounts/Cargo.toml index bc414e0b33..dd95a890af 100644 --- a/examples/postgres/axum-multi-tenant/accounts/Cargo.toml +++ b/examples/postgres/axum-multi-tenant/accounts/Cargo.toml @@ -4,7 +4,7 @@ version = "0.1.0" edition = "2021" [dependencies] -sqlx = { workspace = true, features = ["postgres", "time", "uuid"] } +sqlx = { workspace = true, features = ["postgres", "time", "uuid", "macros", "sqlx-toml"] } tokio = { version = "1", features = ["rt", "sync"] } argon2 = { version = "0.5.3", features = ["password-hash"] } @@ -13,3 +13,8 @@ password-hash = { version = "0.5", features = ["std"] } uuid = "1" thiserror = "1" rand = "0.8" + +time = "0.3.37" + +[dev-dependencies] +sqlx = { workspace = true, features = ["runtime-tokio"] } diff --git a/examples/postgres/axum-multi-tenant/accounts/migrations/01_setup.sql b/examples/postgres/axum-multi-tenant/accounts/migrations/01_setup.sql index e69de29bb2..5aa8fa23cf 100644 --- a/examples/postgres/axum-multi-tenant/accounts/migrations/01_setup.sql +++ b/examples/postgres/axum-multi-tenant/accounts/migrations/01_setup.sql @@ -0,0 +1,30 @@ +-- We try to ensure every table has `created_at` and `updated_at` columns, which can help immensely with debugging +-- and auditing. +-- +-- While `created_at` can just be `default now()`, setting `updated_at` on update requires a trigger which +-- is a lot of boilerplate. These two functions save us from writing that every time as instead we can just do +-- +-- select accounts.trigger_updated_at(''); +-- +-- after a `CREATE TABLE`. +create or replace function accounts.set_updated_at() + returns trigger as +$$ +begin + NEW.updated_at = now(); +return NEW; +end; +$$ language plpgsql; + +create or replace function accounts.trigger_updated_at(tablename regclass) + returns void as +$$ +begin +execute format('CREATE TRIGGER set_updated_at + BEFORE UPDATE + ON %s + FOR EACH ROW + WHEN (OLD is distinct from NEW) + EXECUTE FUNCTION accounts.set_updated_at();', tablename); +end; +$$ language plpgsql; diff --git a/examples/postgres/axum-multi-tenant/accounts/migrations/02_account.sql b/examples/postgres/axum-multi-tenant/accounts/migrations/02_account.sql index ea9b8b9531..a75814bd09 100644 --- a/examples/postgres/axum-multi-tenant/accounts/migrations/02_account.sql +++ b/examples/postgres/axum-multi-tenant/accounts/migrations/02_account.sql @@ -1,8 +1,10 @@ create table accounts.account ( - account_id uuid primary key default gen_random_uuid(), - email text unique not null, - password_hash text not null, - created_at timestamptz not null default now(), - updated_at timestamptz + account_id uuid primary key default gen_random_uuid(), + email text unique not null, + password_hash text not null, + created_at timestamptz not null default now(), + updated_at timestamptz ); + +select accounts.trigger_updated_at('accounts.account'); diff --git a/examples/postgres/axum-multi-tenant/accounts/sqlx.toml b/examples/postgres/axum-multi-tenant/accounts/sqlx.toml index 8ce3f3f5e0..1d02130c2d 100644 --- a/examples/postgres/axum-multi-tenant/accounts/sqlx.toml +++ b/examples/postgres/axum-multi-tenant/accounts/sqlx.toml @@ -1,6 +1,6 @@ [migrate] create-schemas = ["accounts"] -migrations-table = "accounts._sqlx_migrations" +table-name = "accounts._sqlx_migrations" [macros.table-overrides.'accounts.account'] 'account_id' = "crate::AccountId" diff --git a/examples/postgres/axum-multi-tenant/accounts/src/lib.rs b/examples/postgres/axum-multi-tenant/accounts/src/lib.rs index 5535564e0c..3037463e4c 100644 --- a/examples/postgres/axum-multi-tenant/accounts/src/lib.rs +++ b/examples/postgres/axum-multi-tenant/accounts/src/lib.rs @@ -1,11 +1,9 @@ use argon2::{password_hash, Argon2, PasswordHasher, PasswordVerifier}; -use std::error::Error; use std::sync::Arc; use password_hash::PasswordHashString; -use sqlx::{PgConnection, PgTransaction}; -use sqlx::types::Text; +use sqlx::{PgConnection, PgPool, PgTransaction}; use uuid::Uuid; @@ -16,6 +14,37 @@ use tokio::sync::Semaphore; pub struct AccountId(pub Uuid); pub struct AccountsManager { + /// Controls how many blocking tasks are allowed to run concurrently for Argon2 hashing. + /// + /// ### Motivation + /// Tokio blocking tasks are generally not designed for CPU-bound work. + /// + /// If no threads are idle, Tokio will automatically spawn new ones to handle + /// new blocking tasks up to a very high limit--512 by default. + /// + /// This is because blocking tasks are expected to spend their time *blocked*, e.g. on + /// blocking I/O, and thus not consume CPU resources or require a lot of context switching. + /// + /// This strategy is not the most efficient way to use threads for CPU-bound work, which + /// should schedule work to a fixed number of threads to minimize context switching + /// and memory usage (each new thread needs significant space allocated for its stack). + /// + /// We can work around this by using a purpose-designed thread-pool, like Rayon, + /// but we still have the problem that those APIs usually are not designed to support `async`, + /// so we end up needing blocking tasks anyway, or implementing our own work queue using + /// channels. Rayon also does not shut down idle worker threads. + /// + /// `block_in_place` is not a silver bullet, either, as it simply uses `spawn_blocking` + /// internally to take over from the current thread while it is executing blocking work. + /// This also prevents futures from being polled concurrently in the current task. + /// + /// We can lower the limit for blocking threads when creating the runtime, but this risks + /// starving other blocking tasks that are being created by the application or the Tokio + /// runtime itself + /// (which are used for `tokio::fs`, stdio, resolving of hostnames by `ToSocketAddrs`, etc.). + /// + /// Instead, we can just use a Semaphore to limit how many blocking tasks are spawned at once, + /// emulating the behavior of a thread pool like Rayon without needing any additional crates. hashing_semaphore: Arc, } @@ -57,7 +86,7 @@ pub enum GeneralError { PasswordHash( #[source] #[from] - argon2::password_hash::Error, + password_hash::Error, ), #[error("task panicked")] Task( @@ -68,12 +97,9 @@ pub enum GeneralError { } impl AccountsManager { - pub async fn new( - conn: &mut PgConnection, - max_hashing_threads: usize, - ) -> Result { + pub async fn setup(pool: &PgPool, max_hashing_threads: usize) -> Result { sqlx::migrate!() - .run(conn) + .run(pool) .await .map_err(sqlx::Error::from)?; @@ -147,8 +173,8 @@ impl AccountsManager { let hash = self.hash_password(password).await?; // Thanks to `sqlx.toml`, `account_id` maps to `AccountId` - // language=PostgreSQL sqlx::query_scalar!( + // language=PostgreSQL "insert into accounts.account(email, password_hash) \ values ($1, $2) \ returning account_id", @@ -158,7 +184,9 @@ impl AccountsManager { .fetch_one(&mut **txn) .await .map_err(|e| { - if e.as_database_error().and_then(|dbe| dbe.constraint()) == Some("account_account_id_key") { + if e.as_database_error().and_then(|dbe| dbe.constraint()) + == Some("account_account_id_key") + { CreateError::EmailInUse } else { GeneralError::from(e).into() @@ -193,7 +221,8 @@ impl AccountsManager { return Err(AuthenticateError::UnknownEmail); }; - self.verify_password(password, account.password_hash.into_inner()).await?; + self.verify_password(password, account.password_hash.into_inner()) + .await?; Ok(account.account_id) } diff --git a/examples/postgres/axum-multi-tenant/payments/Cargo.toml b/examples/postgres/axum-multi-tenant/payments/Cargo.toml index d7dc430553..6a0e4d2672 100644 --- a/examples/postgres/axum-multi-tenant/payments/Cargo.toml +++ b/examples/postgres/axum-multi-tenant/payments/Cargo.toml @@ -4,4 +4,14 @@ version = "0.1.0" edition = "2021" [dependencies] -sqlx = { workspace = true, features = ["postgres", "time", "uuid"] } +accounts = { path = "../accounts" } + +sqlx = { workspace = true, features = ["postgres", "time", "uuid", "rust_decimal", "sqlx-toml"] } + +rust_decimal = "1.36.0" + +time = "0.3.37" +uuid = "1.12.1" + +[dev-dependencies] +sqlx = { workspace = true, features = ["runtime-tokio"] } diff --git a/examples/postgres/axum-multi-tenant/payments/migrations/01_setup.sql b/examples/postgres/axum-multi-tenant/payments/migrations/01_setup.sql new file mode 100644 index 0000000000..4935a63705 --- /dev/null +++ b/examples/postgres/axum-multi-tenant/payments/migrations/01_setup.sql @@ -0,0 +1,30 @@ +-- We try to ensure every table has `created_at` and `updated_at` columns, which can help immensely with debugging +-- and auditing. +-- +-- While `created_at` can just be `default now()`, setting `updated_at` on update requires a trigger which +-- is a lot of boilerplate. These two functions save us from writing that every time as instead we can just do +-- +-- select payments.trigger_updated_at('
'); +-- +-- after a `CREATE TABLE`. +create or replace function payments.set_updated_at() + returns trigger as +$$ +begin + NEW.updated_at = now(); +return NEW; +end; +$$ language plpgsql; + +create or replace function payments.trigger_updated_at(tablename regclass) + returns void as +$$ +begin +execute format('CREATE TRIGGER set_updated_at + BEFORE UPDATE + ON %s + FOR EACH ROW + WHEN (OLD is distinct from NEW) + EXECUTE FUNCTION payments.set_updated_at();', tablename); +end; +$$ language plpgsql; diff --git a/examples/postgres/axum-multi-tenant/payments/migrations/02_payment.sql b/examples/postgres/axum-multi-tenant/payments/migrations/02_payment.sql new file mode 100644 index 0000000000..cc372f01b7 --- /dev/null +++ b/examples/postgres/axum-multi-tenant/payments/migrations/02_payment.sql @@ -0,0 +1,58 @@ +-- `payments::PaymentStatus` +-- +-- Historically at LaunchBadge we preferred not to define enums on the database side because it can be annoying +-- and error-prone to keep them in-sync with the application. +-- Instead, we let the application define the enum and just have the database store a compact representation of it. +-- This is mostly a matter of taste, however. +-- +-- For the purposes of this example, we're using an in-database enum because this is a common use-case +-- for needing type overrides. +create type payments.payment_status as enum ( + 'pending', + 'success', + 'failed' + ); + +create table payments.payment +( + payment_id uuid primary key default gen_random_uuid(), + -- This cross-schema reference means migrations for the `accounts` crate should be run first. + account_id uuid not null references accounts.account (account_id), + + status payments.payment_status NOT NULL, + + -- ISO 4217 currency code (https://en.wikipedia.org/wiki/ISO_4217#List_of_ISO_4217_currency_codes) + -- + -- This *could* be an ENUM of currency codes, but constraining this to a set of known values in the database + -- would be annoying to keep up to date as support for more currencies is added. + -- + -- Consider also if support for cryptocurrencies is desired; those are not covered by ISO 4217. + -- + -- Though ISO 4217 is a three-character code, `TEXT`, `VARCHAR` and `CHAR(N)` + -- all use the same storage format in Postgres. Any constraint against the length of this field + -- would purely be a sanity check. + currency text NOT NULL, + -- There's an endless debate about what type should be used to represent currency amounts. + -- + -- Postgres has the `MONEY` type, but the fractional precision depends on a C locale setting and the type is mostly + -- optimized for storing USD, or other currencies with a minimum fraction of 1 cent. + -- + -- NEVER use `FLOAT` or `DOUBLE`. IEEE-754 rounding point has round-off and precision errors that make it wholly + -- unsuitable for representing real money amounts. + -- + -- `NUMERIC`, being an arbitrary-precision decimal format, is a safe default choice that can support any currency, + -- and so is what we've chosen here. + amount NUMERIC NOT NULL, + + -- Payments almost always take place through a third-party vendor (e.g. PayPal, Stripe, etc.), + -- so imagine this is an identifier string for this payment in such a vendor's systems. + -- + -- For privacy and security reasons, payment and personally-identifying information + -- (e.g. credit card numbers, bank account numbers, billing addresses) should only be stored with the vendor + -- unless there is a good reason otherwise. + external_payment_id TEXT NOT NULL UNIQUE, + created_at timestamptz default now(), + updated_at timestamptz +); + +select payments.trigger_updated_at('payments.payment'); diff --git a/examples/postgres/axum-multi-tenant/payments/sqlx.toml b/examples/postgres/axum-multi-tenant/payments/sqlx.toml new file mode 100644 index 0000000000..1a4a27dc6a --- /dev/null +++ b/examples/postgres/axum-multi-tenant/payments/sqlx.toml @@ -0,0 +1,10 @@ +[migrate] +create-schemas = ["payments"] +table-name = "payments._sqlx_migrations" + +[macros.table-overrides.'payments.payment'] +'payment_id' = "crate::PaymentId" +'account_id' = "accounts::AccountId" + +[macros.type-overrides] +'payments.payment_status' = "crate::PaymentStatus" diff --git a/examples/postgres/axum-multi-tenant/payments/src/lib.rs b/examples/postgres/axum-multi-tenant/payments/src/lib.rs index 7d12d9af81..b0efcfe17f 100644 --- a/examples/postgres/axum-multi-tenant/payments/src/lib.rs +++ b/examples/postgres/axum-multi-tenant/payments/src/lib.rs @@ -1,14 +1,34 @@ -pub fn add(left: usize, right: usize) -> usize { - left + right +use accounts::AccountId; +use sqlx::PgPool; +use time::OffsetDateTime; +use uuid::Uuid; + +#[derive(sqlx::Type, Debug)] +#[sqlx(transparent)] +pub struct PaymentId(pub Uuid); + +#[derive(sqlx::Type, Debug)] +#[sqlx(type_name = "payments.payment_status")] +#[sqlx(rename_all = "snake_case")] +pub enum PaymentStatus { + Pending, + Successful, } -#[cfg(test)] -mod tests { - use super::*; +#[derive(Debug)] +pub struct Payment { + pub payment_id: PaymentId, + pub account_id: AccountId, + pub status: PaymentStatus, + pub currency: String, + // `rust_decimal::Decimal` has more than enough precision for any real-world amount of money. + pub amount: rust_decimal::Decimal, + pub external_payment_id: String, + pub created_at: OffsetDateTime, + pub updated_at: Option, +} - #[test] - fn it_works() { - let result = add(2, 2); - assert_eq!(result, 4); - } +pub async fn migrate(pool: &PgPool) -> sqlx::Result<()> { + sqlx::migrate!().run(pool).await?; + Ok(()) } diff --git a/examples/postgres/axum-multi-tenant/src/http/mod.rs b/examples/postgres/axum-multi-tenant/src/http/mod.rs new file mode 100644 index 0000000000..9197a2042f --- /dev/null +++ b/examples/postgres/axum-multi-tenant/src/http/mod.rs @@ -0,0 +1,7 @@ +use accounts::AccountsManager; +use color_eyre::eyre; +use sqlx::PgPool; + +pub async fn run(pool: PgPool, accounts: AccountsManager) -> eyre::Result<()> { + axum::serve +} diff --git a/examples/postgres/axum-multi-tenant/src/main.rs b/examples/postgres/axum-multi-tenant/src/main.rs index e7a11a969c..3d4b0cba64 100644 --- a/examples/postgres/axum-multi-tenant/src/main.rs +++ b/examples/postgres/axum-multi-tenant/src/main.rs @@ -1,3 +1,64 @@ -fn main() { - println!("Hello, world!"); +mod http; + +use accounts::AccountsManager; +use color_eyre::eyre; +use color_eyre::eyre::Context; + +#[derive(clap::Parser)] +struct Args { + #[clap(long, env)] + database_url: String, + + #[clap(long, env, default_value_t = 0)] + max_hashing_threads: usize, +} + +#[tokio::main] +async fn main() -> eyre::Result<()> { + color_eyre::install()?; + let _ = dotenvy::dotenv(); + + // (@abonander) I prefer to keep `clap::Parser` fully qualified here because it makes it clear + // what crate the derive macro is coming from. Otherwise, it requires contextual knowledge + // to understand that this is parsing CLI arguments. + let args: Args = clap::Parser::parse(); + + tracing_subscriber::fmt::init(); + + let pool = sqlx::PgPool::connect( + // `env::var()` doesn't include the variable name for context like it should. + &dotenvy::var("DATABASE_URL").wrap_err("DATABASE_URL must be set")?, + ) + .await + .wrap_err("could not connect to database")?; + + let max_hashing_threads = if args.max_hashing_threads == 0 { + std::thread::available_parallelism() + // We could just default to 1 but that would be a silent pessimization, + // which would be hard to debug. + .wrap_err("unable to determine number of available CPU cores; set `--max-hashing-threads` to a nonzero amount")? + .get() + } else { + args.max_hashing_threads + }; + + // Runs migration for `accounts` internally. + let accounts = AccountsManager::setup(&pool, max_hashing_threads) + .await + .wrap_err("error initializing AccountsManager")?; + + payments::migrate(&pool) + .await + .wrap_err("error running payments migrations")?; + + // `main()` doesn't actually run from a Tokio worker thread, + // so spawned tasks hit the global injection queue first and communication with the driver + // core is always cross-thread. + // + // The recommendation is to spawn the `axum::serve` future as a task so it executes directly + // on a worker thread. + + let http_task = tokio::spawn(http::run(pool, accounts)); + + Ok(()) } diff --git a/sqlx-macros-core/src/migrate.rs b/sqlx-macros-core/src/migrate.rs index 5342f8861e..729d61ce91 100644 --- a/sqlx-macros-core/src/migrate.rs +++ b/sqlx-macros-core/src/migrate.rs @@ -136,22 +136,13 @@ pub fn expand_with_path(config: &Config, path: &Path) -> crate::Result Date: Thu, 27 Feb 2025 16:20:09 -0800 Subject: [PATCH 41/78] feat: multi-tenant example No longer Axum-based because filling out the request routes would have distracted from the purpose of the example. --- Cargo.toml | 2 +- .../postgres/axum-multi-tenant/Cargo.toml | 28 --- examples/postgres/axum-multi-tenant/README.md | 22 -- .../axum-multi-tenant/accounts/Cargo.toml | 20 -- .../accounts/migrations/01_setup.sql | 30 --- .../accounts/migrations/02_account.sql | 10 - .../axum-multi-tenant/accounts/sqlx.toml | 7 - .../axum-multi-tenant/accounts/src/lib.rs | 229 ------------------ .../axum-multi-tenant/payments/Cargo.toml | 17 -- .../payments/migrations/01_setup.sql | 30 --- .../payments/migrations/02_payment.sql | 58 ----- .../axum-multi-tenant/payments/sqlx.toml | 10 - .../axum-multi-tenant/payments/src/lib.rs | 34 --- .../axum-multi-tenant/src/http/mod.rs | 7 - .../postgres/axum-multi-tenant/src/main.rs | 64 ----- examples/postgres/multi-tenant/Cargo.toml | 22 +- examples/postgres/multi-tenant/README.md | 24 +- .../postgres/multi-tenant/accounts/Cargo.toml | 8 +- .../postgres/multi-tenant/payments/Cargo.toml | 14 +- examples/postgres/multi-tenant/src/main.rs | 5 +- 20 files changed, 22 insertions(+), 619 deletions(-) delete mode 100644 examples/postgres/axum-multi-tenant/Cargo.toml delete mode 100644 examples/postgres/axum-multi-tenant/README.md delete mode 100644 examples/postgres/axum-multi-tenant/accounts/Cargo.toml delete mode 100644 examples/postgres/axum-multi-tenant/accounts/migrations/01_setup.sql delete mode 100644 examples/postgres/axum-multi-tenant/accounts/migrations/02_account.sql delete mode 100644 examples/postgres/axum-multi-tenant/accounts/sqlx.toml delete mode 100644 examples/postgres/axum-multi-tenant/accounts/src/lib.rs delete mode 100644 examples/postgres/axum-multi-tenant/payments/Cargo.toml delete mode 100644 examples/postgres/axum-multi-tenant/payments/migrations/01_setup.sql delete mode 100644 examples/postgres/axum-multi-tenant/payments/migrations/02_payment.sql delete mode 100644 examples/postgres/axum-multi-tenant/payments/sqlx.toml delete mode 100644 examples/postgres/axum-multi-tenant/payments/src/lib.rs delete mode 100644 examples/postgres/axum-multi-tenant/src/http/mod.rs delete mode 100644 examples/postgres/axum-multi-tenant/src/main.rs diff --git a/Cargo.toml b/Cargo.toml index ca799fb03b..cafb89c08f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -11,7 +11,7 @@ members = [ "sqlx-postgres", "sqlx-sqlite", "examples/mysql/todos", - "examples/postgres/axum-multi-tenant", + "examples/postgres/multi-tenant", "examples/postgres/axum-social-with-tests", "examples/postgres/chat", "examples/postgres/files", diff --git a/examples/postgres/axum-multi-tenant/Cargo.toml b/examples/postgres/axum-multi-tenant/Cargo.toml deleted file mode 100644 index 7ea32bbc43..0000000000 --- a/examples/postgres/axum-multi-tenant/Cargo.toml +++ /dev/null @@ -1,28 +0,0 @@ -[package] -name = "axum-multi-tenant" -version.workspace = true -license.workspace = true -edition.workspace = true -repository.workspace = true -keywords.workspace = true -categories.workspace = true -authors.workspace = true - -[dependencies] -accounts = { path = "accounts" } -payments = { path = "payments" } - -tokio = { version = "1", features = ["rt-multi-thread", "macros"] } - -sqlx = { path = "../../..", version = "0.8.3", features = ["runtime-tokio", "postgres"] } - -axum = "0.8.1" - -clap = { version = "4.5.30", features = ["derive", "env"] } -color-eyre = "0.6.3" -dotenvy = "0.15.7" -tracing-subscriber = "0.3.19" - - -[lints] -workspace = true diff --git a/examples/postgres/axum-multi-tenant/README.md b/examples/postgres/axum-multi-tenant/README.md deleted file mode 100644 index aae3a6f1fe..0000000000 --- a/examples/postgres/axum-multi-tenant/README.md +++ /dev/null @@ -1,22 +0,0 @@ -# Axum App with Multi-tenant Database - -This example project involves three crates, each owning a different schema in one database, -with their own set of migrations. - -* The main crate, an Axum app. - * Owns the `public` schema (tables are referenced unqualified). -* `accounts`: a subcrate simulating a reusable account-management crate. - * Owns schema `accounts`. -* `payments`: a subcrate simulating a wrapper for a payments API. - * Owns schema `payments`. - -## Note: Schema-Qualified Names - -This example uses schema-qualified names everywhere for clarity. - -It can be tempting to change the `search_path` of the connection (MySQL, Postgres) to eliminate the need for schema -prefixes, but this can cause some really confusing issues when names conflict. - -This example will generate a `_sqlx_migrations` table in three different schemas, and if `search_path` is set -to `public,accounts,payments` and the migrator for the main application attempts to reference the table unqualified, -it would throw an error. diff --git a/examples/postgres/axum-multi-tenant/accounts/Cargo.toml b/examples/postgres/axum-multi-tenant/accounts/Cargo.toml deleted file mode 100644 index dd95a890af..0000000000 --- a/examples/postgres/axum-multi-tenant/accounts/Cargo.toml +++ /dev/null @@ -1,20 +0,0 @@ -[package] -name = "accounts" -version = "0.1.0" -edition = "2021" - -[dependencies] -sqlx = { workspace = true, features = ["postgres", "time", "uuid", "macros", "sqlx-toml"] } -tokio = { version = "1", features = ["rt", "sync"] } - -argon2 = { version = "0.5.3", features = ["password-hash"] } -password-hash = { version = "0.5", features = ["std"] } - -uuid = "1" -thiserror = "1" -rand = "0.8" - -time = "0.3.37" - -[dev-dependencies] -sqlx = { workspace = true, features = ["runtime-tokio"] } diff --git a/examples/postgres/axum-multi-tenant/accounts/migrations/01_setup.sql b/examples/postgres/axum-multi-tenant/accounts/migrations/01_setup.sql deleted file mode 100644 index 5aa8fa23cf..0000000000 --- a/examples/postgres/axum-multi-tenant/accounts/migrations/01_setup.sql +++ /dev/null @@ -1,30 +0,0 @@ --- We try to ensure every table has `created_at` and `updated_at` columns, which can help immensely with debugging --- and auditing. --- --- While `created_at` can just be `default now()`, setting `updated_at` on update requires a trigger which --- is a lot of boilerplate. These two functions save us from writing that every time as instead we can just do --- --- select accounts.trigger_updated_at('
'); --- --- after a `CREATE TABLE`. -create or replace function accounts.set_updated_at() - returns trigger as -$$ -begin - NEW.updated_at = now(); -return NEW; -end; -$$ language plpgsql; - -create or replace function accounts.trigger_updated_at(tablename regclass) - returns void as -$$ -begin -execute format('CREATE TRIGGER set_updated_at - BEFORE UPDATE - ON %s - FOR EACH ROW - WHEN (OLD is distinct from NEW) - EXECUTE FUNCTION accounts.set_updated_at();', tablename); -end; -$$ language plpgsql; diff --git a/examples/postgres/axum-multi-tenant/accounts/migrations/02_account.sql b/examples/postgres/axum-multi-tenant/accounts/migrations/02_account.sql deleted file mode 100644 index a75814bd09..0000000000 --- a/examples/postgres/axum-multi-tenant/accounts/migrations/02_account.sql +++ /dev/null @@ -1,10 +0,0 @@ -create table accounts.account -( - account_id uuid primary key default gen_random_uuid(), - email text unique not null, - password_hash text not null, - created_at timestamptz not null default now(), - updated_at timestamptz -); - -select accounts.trigger_updated_at('accounts.account'); diff --git a/examples/postgres/axum-multi-tenant/accounts/sqlx.toml b/examples/postgres/axum-multi-tenant/accounts/sqlx.toml deleted file mode 100644 index 1d02130c2d..0000000000 --- a/examples/postgres/axum-multi-tenant/accounts/sqlx.toml +++ /dev/null @@ -1,7 +0,0 @@ -[migrate] -create-schemas = ["accounts"] -table-name = "accounts._sqlx_migrations" - -[macros.table-overrides.'accounts.account'] -'account_id' = "crate::AccountId" -'password_hash' = "sqlx::types::Text" diff --git a/examples/postgres/axum-multi-tenant/accounts/src/lib.rs b/examples/postgres/axum-multi-tenant/accounts/src/lib.rs deleted file mode 100644 index 3037463e4c..0000000000 --- a/examples/postgres/axum-multi-tenant/accounts/src/lib.rs +++ /dev/null @@ -1,229 +0,0 @@ -use argon2::{password_hash, Argon2, PasswordHasher, PasswordVerifier}; -use std::sync::Arc; - -use password_hash::PasswordHashString; - -use sqlx::{PgConnection, PgPool, PgTransaction}; - -use uuid::Uuid; - -use tokio::sync::Semaphore; - -#[derive(sqlx::Type, Debug)] -#[sqlx(transparent)] -pub struct AccountId(pub Uuid); - -pub struct AccountsManager { - /// Controls how many blocking tasks are allowed to run concurrently for Argon2 hashing. - /// - /// ### Motivation - /// Tokio blocking tasks are generally not designed for CPU-bound work. - /// - /// If no threads are idle, Tokio will automatically spawn new ones to handle - /// new blocking tasks up to a very high limit--512 by default. - /// - /// This is because blocking tasks are expected to spend their time *blocked*, e.g. on - /// blocking I/O, and thus not consume CPU resources or require a lot of context switching. - /// - /// This strategy is not the most efficient way to use threads for CPU-bound work, which - /// should schedule work to a fixed number of threads to minimize context switching - /// and memory usage (each new thread needs significant space allocated for its stack). - /// - /// We can work around this by using a purpose-designed thread-pool, like Rayon, - /// but we still have the problem that those APIs usually are not designed to support `async`, - /// so we end up needing blocking tasks anyway, or implementing our own work queue using - /// channels. Rayon also does not shut down idle worker threads. - /// - /// `block_in_place` is not a silver bullet, either, as it simply uses `spawn_blocking` - /// internally to take over from the current thread while it is executing blocking work. - /// This also prevents futures from being polled concurrently in the current task. - /// - /// We can lower the limit for blocking threads when creating the runtime, but this risks - /// starving other blocking tasks that are being created by the application or the Tokio - /// runtime itself - /// (which are used for `tokio::fs`, stdio, resolving of hostnames by `ToSocketAddrs`, etc.). - /// - /// Instead, we can just use a Semaphore to limit how many blocking tasks are spawned at once, - /// emulating the behavior of a thread pool like Rayon without needing any additional crates. - hashing_semaphore: Arc, -} - -#[derive(Debug, thiserror::Error)] -pub enum CreateError { - #[error("error creating account: email in-use")] - EmailInUse, - #[error("error creating account")] - General( - #[source] - #[from] - GeneralError, - ), -} - -#[derive(Debug, thiserror::Error)] -pub enum AuthenticateError { - #[error("unknown email")] - UnknownEmail, - #[error("invalid password")] - InvalidPassword, - #[error("authentication error")] - General( - #[source] - #[from] - GeneralError, - ), -} - -#[derive(Debug, thiserror::Error)] -pub enum GeneralError { - #[error("database error")] - Sqlx( - #[source] - #[from] - sqlx::Error, - ), - #[error("error hashing password")] - PasswordHash( - #[source] - #[from] - password_hash::Error, - ), - #[error("task panicked")] - Task( - #[source] - #[from] - tokio::task::JoinError, - ), -} - -impl AccountsManager { - pub async fn setup(pool: &PgPool, max_hashing_threads: usize) -> Result { - sqlx::migrate!() - .run(pool) - .await - .map_err(sqlx::Error::from)?; - - Ok(AccountsManager { - hashing_semaphore: Semaphore::new(max_hashing_threads).into(), - }) - } - - async fn hash_password(&self, password: String) -> Result { - let guard = self - .hashing_semaphore - .clone() - .acquire_owned() - .await - .expect("BUG: this semaphore should not be closed"); - - // We transfer ownership to the blocking task and back to ensure Tokio doesn't spawn - // excess threads. - let (_guard, res) = tokio::task::spawn_blocking(move || { - let salt = argon2::password_hash::SaltString::generate(rand::thread_rng()); - ( - guard, - Argon2::default() - .hash_password(password.as_bytes(), &salt) - .map(|hash| hash.serialize()), - ) - }) - .await?; - - Ok(res?) - } - - async fn verify_password( - &self, - password: String, - hash: PasswordHashString, - ) -> Result<(), AuthenticateError> { - let guard = self - .hashing_semaphore - .clone() - .acquire_owned() - .await - .expect("BUG: this semaphore should not be closed"); - - let (_guard, res) = tokio::task::spawn_blocking(move || { - ( - guard, - Argon2::default().verify_password(password.as_bytes(), &hash.password_hash()), - ) - }) - .await - .map_err(GeneralError::from)?; - - if let Err(password_hash::Error::Password) = res { - return Err(AuthenticateError::InvalidPassword); - } - - res.map_err(GeneralError::from)?; - - Ok(()) - } - - pub async fn create( - &self, - txn: &mut PgTransaction<'_>, - email: &str, - password: String, - ) -> Result { - // Hash password whether the account exists or not to make it harder - // to tell the difference in the timing. - let hash = self.hash_password(password).await?; - - // Thanks to `sqlx.toml`, `account_id` maps to `AccountId` - sqlx::query_scalar!( - // language=PostgreSQL - "insert into accounts.account(email, password_hash) \ - values ($1, $2) \ - returning account_id", - email, - hash.as_str(), - ) - .fetch_one(&mut **txn) - .await - .map_err(|e| { - if e.as_database_error().and_then(|dbe| dbe.constraint()) - == Some("account_account_id_key") - { - CreateError::EmailInUse - } else { - GeneralError::from(e).into() - } - }) - } - - pub async fn authenticate( - &self, - conn: &mut PgConnection, - email: &str, - password: String, - ) -> Result { - // Thanks to `sqlx.toml`: - // * `account_id` maps to `AccountId` - // * `password_hash` maps to `Text` - let maybe_account = sqlx::query!( - "select account_id, password_hash \ - from accounts.account \ - where email = $1", - email - ) - .fetch_optional(&mut *conn) - .await - .map_err(GeneralError::from)?; - - let Some(account) = maybe_account else { - // Hash the password whether the account exists or not to hide the difference in timing. - self.hash_password(password) - .await - .map_err(GeneralError::from)?; - return Err(AuthenticateError::UnknownEmail); - }; - - self.verify_password(password, account.password_hash.into_inner()) - .await?; - - Ok(account.account_id) - } -} diff --git a/examples/postgres/axum-multi-tenant/payments/Cargo.toml b/examples/postgres/axum-multi-tenant/payments/Cargo.toml deleted file mode 100644 index 6a0e4d2672..0000000000 --- a/examples/postgres/axum-multi-tenant/payments/Cargo.toml +++ /dev/null @@ -1,17 +0,0 @@ -[package] -name = "payments" -version = "0.1.0" -edition = "2021" - -[dependencies] -accounts = { path = "../accounts" } - -sqlx = { workspace = true, features = ["postgres", "time", "uuid", "rust_decimal", "sqlx-toml"] } - -rust_decimal = "1.36.0" - -time = "0.3.37" -uuid = "1.12.1" - -[dev-dependencies] -sqlx = { workspace = true, features = ["runtime-tokio"] } diff --git a/examples/postgres/axum-multi-tenant/payments/migrations/01_setup.sql b/examples/postgres/axum-multi-tenant/payments/migrations/01_setup.sql deleted file mode 100644 index 4935a63705..0000000000 --- a/examples/postgres/axum-multi-tenant/payments/migrations/01_setup.sql +++ /dev/null @@ -1,30 +0,0 @@ --- We try to ensure every table has `created_at` and `updated_at` columns, which can help immensely with debugging --- and auditing. --- --- While `created_at` can just be `default now()`, setting `updated_at` on update requires a trigger which --- is a lot of boilerplate. These two functions save us from writing that every time as instead we can just do --- --- select payments.trigger_updated_at('
'); --- --- after a `CREATE TABLE`. -create or replace function payments.set_updated_at() - returns trigger as -$$ -begin - NEW.updated_at = now(); -return NEW; -end; -$$ language plpgsql; - -create or replace function payments.trigger_updated_at(tablename regclass) - returns void as -$$ -begin -execute format('CREATE TRIGGER set_updated_at - BEFORE UPDATE - ON %s - FOR EACH ROW - WHEN (OLD is distinct from NEW) - EXECUTE FUNCTION payments.set_updated_at();', tablename); -end; -$$ language plpgsql; diff --git a/examples/postgres/axum-multi-tenant/payments/migrations/02_payment.sql b/examples/postgres/axum-multi-tenant/payments/migrations/02_payment.sql deleted file mode 100644 index cc372f01b7..0000000000 --- a/examples/postgres/axum-multi-tenant/payments/migrations/02_payment.sql +++ /dev/null @@ -1,58 +0,0 @@ --- `payments::PaymentStatus` --- --- Historically at LaunchBadge we preferred not to define enums on the database side because it can be annoying --- and error-prone to keep them in-sync with the application. --- Instead, we let the application define the enum and just have the database store a compact representation of it. --- This is mostly a matter of taste, however. --- --- For the purposes of this example, we're using an in-database enum because this is a common use-case --- for needing type overrides. -create type payments.payment_status as enum ( - 'pending', - 'success', - 'failed' - ); - -create table payments.payment -( - payment_id uuid primary key default gen_random_uuid(), - -- This cross-schema reference means migrations for the `accounts` crate should be run first. - account_id uuid not null references accounts.account (account_id), - - status payments.payment_status NOT NULL, - - -- ISO 4217 currency code (https://en.wikipedia.org/wiki/ISO_4217#List_of_ISO_4217_currency_codes) - -- - -- This *could* be an ENUM of currency codes, but constraining this to a set of known values in the database - -- would be annoying to keep up to date as support for more currencies is added. - -- - -- Consider also if support for cryptocurrencies is desired; those are not covered by ISO 4217. - -- - -- Though ISO 4217 is a three-character code, `TEXT`, `VARCHAR` and `CHAR(N)` - -- all use the same storage format in Postgres. Any constraint against the length of this field - -- would purely be a sanity check. - currency text NOT NULL, - -- There's an endless debate about what type should be used to represent currency amounts. - -- - -- Postgres has the `MONEY` type, but the fractional precision depends on a C locale setting and the type is mostly - -- optimized for storing USD, or other currencies with a minimum fraction of 1 cent. - -- - -- NEVER use `FLOAT` or `DOUBLE`. IEEE-754 rounding point has round-off and precision errors that make it wholly - -- unsuitable for representing real money amounts. - -- - -- `NUMERIC`, being an arbitrary-precision decimal format, is a safe default choice that can support any currency, - -- and so is what we've chosen here. - amount NUMERIC NOT NULL, - - -- Payments almost always take place through a third-party vendor (e.g. PayPal, Stripe, etc.), - -- so imagine this is an identifier string for this payment in such a vendor's systems. - -- - -- For privacy and security reasons, payment and personally-identifying information - -- (e.g. credit card numbers, bank account numbers, billing addresses) should only be stored with the vendor - -- unless there is a good reason otherwise. - external_payment_id TEXT NOT NULL UNIQUE, - created_at timestamptz default now(), - updated_at timestamptz -); - -select payments.trigger_updated_at('payments.payment'); diff --git a/examples/postgres/axum-multi-tenant/payments/sqlx.toml b/examples/postgres/axum-multi-tenant/payments/sqlx.toml deleted file mode 100644 index 1a4a27dc6a..0000000000 --- a/examples/postgres/axum-multi-tenant/payments/sqlx.toml +++ /dev/null @@ -1,10 +0,0 @@ -[migrate] -create-schemas = ["payments"] -table-name = "payments._sqlx_migrations" - -[macros.table-overrides.'payments.payment'] -'payment_id' = "crate::PaymentId" -'account_id' = "accounts::AccountId" - -[macros.type-overrides] -'payments.payment_status' = "crate::PaymentStatus" diff --git a/examples/postgres/axum-multi-tenant/payments/src/lib.rs b/examples/postgres/axum-multi-tenant/payments/src/lib.rs deleted file mode 100644 index b0efcfe17f..0000000000 --- a/examples/postgres/axum-multi-tenant/payments/src/lib.rs +++ /dev/null @@ -1,34 +0,0 @@ -use accounts::AccountId; -use sqlx::PgPool; -use time::OffsetDateTime; -use uuid::Uuid; - -#[derive(sqlx::Type, Debug)] -#[sqlx(transparent)] -pub struct PaymentId(pub Uuid); - -#[derive(sqlx::Type, Debug)] -#[sqlx(type_name = "payments.payment_status")] -#[sqlx(rename_all = "snake_case")] -pub enum PaymentStatus { - Pending, - Successful, -} - -#[derive(Debug)] -pub struct Payment { - pub payment_id: PaymentId, - pub account_id: AccountId, - pub status: PaymentStatus, - pub currency: String, - // `rust_decimal::Decimal` has more than enough precision for any real-world amount of money. - pub amount: rust_decimal::Decimal, - pub external_payment_id: String, - pub created_at: OffsetDateTime, - pub updated_at: Option, -} - -pub async fn migrate(pool: &PgPool) -> sqlx::Result<()> { - sqlx::migrate!().run(pool).await?; - Ok(()) -} diff --git a/examples/postgres/axum-multi-tenant/src/http/mod.rs b/examples/postgres/axum-multi-tenant/src/http/mod.rs deleted file mode 100644 index 9197a2042f..0000000000 --- a/examples/postgres/axum-multi-tenant/src/http/mod.rs +++ /dev/null @@ -1,7 +0,0 @@ -use accounts::AccountsManager; -use color_eyre::eyre; -use sqlx::PgPool; - -pub async fn run(pool: PgPool, accounts: AccountsManager) -> eyre::Result<()> { - axum::serve -} diff --git a/examples/postgres/axum-multi-tenant/src/main.rs b/examples/postgres/axum-multi-tenant/src/main.rs deleted file mode 100644 index 3d4b0cba64..0000000000 --- a/examples/postgres/axum-multi-tenant/src/main.rs +++ /dev/null @@ -1,64 +0,0 @@ -mod http; - -use accounts::AccountsManager; -use color_eyre::eyre; -use color_eyre::eyre::Context; - -#[derive(clap::Parser)] -struct Args { - #[clap(long, env)] - database_url: String, - - #[clap(long, env, default_value_t = 0)] - max_hashing_threads: usize, -} - -#[tokio::main] -async fn main() -> eyre::Result<()> { - color_eyre::install()?; - let _ = dotenvy::dotenv(); - - // (@abonander) I prefer to keep `clap::Parser` fully qualified here because it makes it clear - // what crate the derive macro is coming from. Otherwise, it requires contextual knowledge - // to understand that this is parsing CLI arguments. - let args: Args = clap::Parser::parse(); - - tracing_subscriber::fmt::init(); - - let pool = sqlx::PgPool::connect( - // `env::var()` doesn't include the variable name for context like it should. - &dotenvy::var("DATABASE_URL").wrap_err("DATABASE_URL must be set")?, - ) - .await - .wrap_err("could not connect to database")?; - - let max_hashing_threads = if args.max_hashing_threads == 0 { - std::thread::available_parallelism() - // We could just default to 1 but that would be a silent pessimization, - // which would be hard to debug. - .wrap_err("unable to determine number of available CPU cores; set `--max-hashing-threads` to a nonzero amount")? - .get() - } else { - args.max_hashing_threads - }; - - // Runs migration for `accounts` internally. - let accounts = AccountsManager::setup(&pool, max_hashing_threads) - .await - .wrap_err("error initializing AccountsManager")?; - - payments::migrate(&pool) - .await - .wrap_err("error running payments migrations")?; - - // `main()` doesn't actually run from a Tokio worker thread, - // so spawned tasks hit the global injection queue first and communication with the driver - // core is always cross-thread. - // - // The recommendation is to spawn the `axum::serve` future as a task so it executes directly - // on a worker thread. - - let http_task = tokio::spawn(http::run(pool, accounts)); - - Ok(()) -} diff --git a/examples/postgres/multi-tenant/Cargo.toml b/examples/postgres/multi-tenant/Cargo.toml index a219cce2b8..f7dca28855 100644 --- a/examples/postgres/multi-tenant/Cargo.toml +++ b/examples/postgres/multi-tenant/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "sqlx-example-postgres-multi-tenant" +name = "axum-multi-tenant" version.workspace = true license.workspace = true edition.workspace = true @@ -9,8 +9,15 @@ categories.workspace = true authors.workspace = true [dependencies] +accounts = { path = "accounts" } +payments = { path = "payments" } + tokio = { version = "1", features = ["rt-multi-thread", "macros"] } +sqlx = { path = "../../..", version = "0.8.3", features = ["runtime-tokio", "postgres"] } + +axum = { version = "0.8.1", features = ["macros"] } + color-eyre = "0.6.3" dotenvy = "0.15.7" tracing-subscriber = "0.3.19" @@ -19,18 +26,5 @@ rust_decimal = "1.36.0" rand = "0.8.5" -[dependencies.sqlx] -# version = "0.9.0" -workspace = true -features = ["runtime-tokio", "postgres", "migrate", "sqlx-toml"] - -[dependencies.accounts] -path = "accounts" -package = "sqlx-example-postgres-multi-tenant-accounts" - -[dependencies.payments] -path = "payments" -package = "sqlx-example-postgres-multi-tenant-payments" - [lints] workspace = true diff --git a/examples/postgres/multi-tenant/README.md b/examples/postgres/multi-tenant/README.md index 01848a3f83..9f96ff72f1 100644 --- a/examples/postgres/multi-tenant/README.md +++ b/examples/postgres/multi-tenant/README.md @@ -1,9 +1,9 @@ -# Multi-tenant Databases with `sqlx.toml` +# Axum App with Multi-tenant Database This example project involves three crates, each owning a different schema in one database, with their own set of migrations. -* The main crate, a simple binary simulating the action of a REST API. +* The main crate, an Axum app. * Owns the `public` schema (tables are referenced unqualified). * Migrations are moved to `src/migrations` using config key `migrate.migrations-dir` to visually separate them from the subcrate folders. @@ -19,7 +19,7 @@ This example uses schema-qualified names everywhere for clarity. It can be tempting to change the `search_path` of the connection (MySQL, Postgres) to eliminate the need for schema prefixes, but this can cause some really confusing issues when names conflict. -This example will generate a `_sqlx_migrations` table in three different schemas; if `search_path` is set +This example will generate a `_sqlx_migrations` table in three different schemas, and if `search_path` is set to `public,accounts,payments` and the migrator for the main application attempts to reference the table unqualified, it would throw an error. @@ -27,23 +27,11 @@ it would throw an error. This example requires running three different sets of migrations. -Ensure `sqlx-cli` is installed with Postgres and `sqlx.toml` support: +Ensure `sqlx-cli` is installed with Postgres support. -``` -cargo install sqlx-cli --features postgres,sqlx-toml -``` - -Start a Postgres server (shown here using Docker, `run` command also works with `podman`): +Start a Postgres server. -``` -docker run -d -e POSTGRES_PASSWORD=password -p 5432:5432 --name postgres postgres:latest -``` - -Create `.env` with `DATABASE_URL` or set the variable in your shell environment; - -``` -DATABASE_URL=postgres://postgres:password@localhost/example-multi-tenant -``` +Create `.env` with `DATABASE_URL` or set it in your shell environment. Run the following commands: diff --git a/examples/postgres/multi-tenant/accounts/Cargo.toml b/examples/postgres/multi-tenant/accounts/Cargo.toml index 40c365c607..0295dcec8a 100644 --- a/examples/postgres/multi-tenant/accounts/Cargo.toml +++ b/examples/postgres/multi-tenant/accounts/Cargo.toml @@ -1,9 +1,10 @@ [package] -name = "sqlx-example-postgres-multi-tenant-accounts" +name = "accounts" version = "0.1.0" edition = "2021" [dependencies] +sqlx = { workspace = true, features = ["postgres", "time", "uuid", "macros", "sqlx-toml"] } tokio = { version = "1", features = ["rt", "sync"] } argon2 = { version = "0.5.3", features = ["password-hash"] } @@ -17,10 +18,5 @@ time = { version = "0.3.37", features = ["serde"] } serde = { version = "1.0.218", features = ["derive"] } -[dependencies.sqlx] -# version = "0.9.0" -workspace = true -features = ["postgres", "time", "uuid", "macros", "sqlx-toml", "migrate"] - [dev-dependencies] sqlx = { workspace = true, features = ["runtime-tokio"] } diff --git a/examples/postgres/multi-tenant/payments/Cargo.toml b/examples/postgres/multi-tenant/payments/Cargo.toml index de15b21828..6a0e4d2672 100644 --- a/examples/postgres/multi-tenant/payments/Cargo.toml +++ b/examples/postgres/multi-tenant/payments/Cargo.toml @@ -1,23 +1,17 @@ [package] -name = "sqlx-example-postgres-multi-tenant-payments" +name = "payments" version = "0.1.0" edition = "2021" [dependencies] +accounts = { path = "../accounts" } + +sqlx = { workspace = true, features = ["postgres", "time", "uuid", "rust_decimal", "sqlx-toml"] } rust_decimal = "1.36.0" time = "0.3.37" uuid = "1.12.1" -[dependencies.sqlx] -# version = "0.9.0" -workspace = true -features = ["postgres", "time", "uuid", "rust_decimal", "sqlx-toml", "migrate"] - -[dependencies.accounts] -path = "../accounts" -package = "sqlx-example-postgres-multi-tenant-accounts" - [dev-dependencies] sqlx = { workspace = true, features = ["runtime-tokio"] } diff --git a/examples/postgres/multi-tenant/src/main.rs b/examples/postgres/multi-tenant/src/main.rs index 94a96fcf2b..4aa1b9c5a8 100644 --- a/examples/postgres/multi-tenant/src/main.rs +++ b/examples/postgres/multi-tenant/src/main.rs @@ -45,10 +45,7 @@ async fn main() -> eyre::Result<()> { txn.commit().await?; - println!( - "created account ID: {}, email: {user_email:?}, password: {user_password:?}", - account_id.0 - ); + println!("created account ID: {}, email: {user_email:?}, password: {user_password:?}", account_id.0); // POST /session // Log the user in. From 85b4507d3e59feb2475b6afc1a1fd644b9983aa1 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Thu, 27 Feb 2025 17:00:37 -0800 Subject: [PATCH 42/78] chore(ci): test multi-tenant example --- .github/workflows/examples.yml | 37 ++----------------- Cargo.toml | 3 -- examples/postgres/multi-tenant/Cargo.toml | 13 +++++-- .../postgres/multi-tenant/accounts/Cargo.toml | 2 +- .../postgres/multi-tenant/payments/Cargo.toml | 2 +- examples/postgres/multi-tenant/src/main.rs | 5 ++- 6 files changed, 19 insertions(+), 43 deletions(-) diff --git a/.github/workflows/examples.yml b/.github/workflows/examples.yml index d1b8ff4634..c630c79287 100644 --- a/.github/workflows/examples.yml +++ b/.github/workflows/examples.yml @@ -175,48 +175,19 @@ jobs: DATABASE_URL: postgres://postgres:password@localhost:5432/mockable-todos run: cargo run -p sqlx-example-postgres-mockable-todos - - name: Multi-Database (Setup) - working-directory: examples/postgres/multi-database - env: - DATABASE_URL: postgres://postgres:password@localhost:5432/multi-database - ACCOUNTS_DATABASE_URL: postgres://postgres:password@localhost:5432/multi-database-accounts - PAYMENTS_DATABASE_URL: postgres://postgres:password@localhost:5432/multi-database-payments - run: | - (cd accounts && sqlx db setup) - (cd payments && sqlx db setup) - sqlx db setup - - - name: Multi-Database (Run) - env: - DATABASE_URL: postgres://postgres:password@localhost:5432/multi-database - ACCOUNTS_DATABASE_URL: postgres://postgres:password@localhost:5432/multi-database-accounts - PAYMENTS_DATABASE_URL: postgres://postgres:password@localhost:5432/multi-database-payments - run: cargo run -p sqlx-example-postgres-multi-database - - name: Multi-Tenant (Setup) working-directory: examples/postgres/multi-tenant env: - DATABASE_URL: postgres://postgres:password@localhost:5432/multi-tenant + DATABASE_URL: postgres://postgres:password@localhost:5432/mockable-todos run: | (cd accounts && sqlx db setup) (cd payments && sqlx migrate run) sqlx migrate run - - name: Multi-Tenant (Run) - env: - DATABASE_URL: postgres://postgres:password@localhost:5432/multi-tenant - run: cargo run -p sqlx-example-postgres-multi-tenant - - - name: Preferred-Crates (Setup) - working-directory: examples/postgres/preferred-crates - env: - DATABASE_URL: postgres://postgres:password@localhost:5432/preferred-crates - run: sqlx db setup - - - name: Multi-Tenant (Run) + - name: Mockable TODOs (Run) env: - DATABASE_URL: postgres://postgres:password@localhost:5432/preferred-crates - run: cargo run -p sqlx-example-postgres-preferred-crates + DATABASE_URL: postgres://postgres:password@localhost:5432/mockable-todos + run: cargo run -p sqlx-example-postgres-mockable-todos - name: TODOs (Setup) working-directory: examples/postgres/todos diff --git a/Cargo.toml b/Cargo.toml index cafb89c08f..e12b16c5bb 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -11,16 +11,13 @@ members = [ "sqlx-postgres", "sqlx-sqlite", "examples/mysql/todos", - "examples/postgres/multi-tenant", "examples/postgres/axum-social-with-tests", "examples/postgres/chat", "examples/postgres/files", "examples/postgres/json", "examples/postgres/listen", "examples/postgres/mockable-todos", - "examples/postgres/multi-database", "examples/postgres/multi-tenant", - "examples/postgres/preferred-crates", "examples/postgres/todos", "examples/postgres/transaction", "examples/sqlite/todos", diff --git a/examples/postgres/multi-tenant/Cargo.toml b/examples/postgres/multi-tenant/Cargo.toml index f7dca28855..f93c91747a 100644 --- a/examples/postgres/multi-tenant/Cargo.toml +++ b/examples/postgres/multi-tenant/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "axum-multi-tenant" +name = "sqlx-example-postgres-multi-tenant" version.workspace = true license.workspace = true edition.workspace = true @@ -9,9 +9,6 @@ categories.workspace = true authors.workspace = true [dependencies] -accounts = { path = "accounts" } -payments = { path = "payments" } - tokio = { version = "1", features = ["rt-multi-thread", "macros"] } sqlx = { path = "../../..", version = "0.8.3", features = ["runtime-tokio", "postgres"] } @@ -26,5 +23,13 @@ rust_decimal = "1.36.0" rand = "0.8.5" +[dependencies.accounts] +package = "sqlx-example-postgres-multi-tenant-accounts" +path = "accounts" + +[dependencies.payments] +package = "sqlx-example-postgres-multi-tenant-accounts" +path = "payments" + [lints] workspace = true diff --git a/examples/postgres/multi-tenant/accounts/Cargo.toml b/examples/postgres/multi-tenant/accounts/Cargo.toml index 0295dcec8a..33b185912c 100644 --- a/examples/postgres/multi-tenant/accounts/Cargo.toml +++ b/examples/postgres/multi-tenant/accounts/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "accounts" +name = "sqlx-example-postgres-multi-tenant-accounts" version = "0.1.0" edition = "2021" diff --git a/examples/postgres/multi-tenant/payments/Cargo.toml b/examples/postgres/multi-tenant/payments/Cargo.toml index 6a0e4d2672..1c6d31868b 100644 --- a/examples/postgres/multi-tenant/payments/Cargo.toml +++ b/examples/postgres/multi-tenant/payments/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "payments" +name = "sqlx-example-postgres-multi-tenant-payments" version = "0.1.0" edition = "2021" diff --git a/examples/postgres/multi-tenant/src/main.rs b/examples/postgres/multi-tenant/src/main.rs index 4aa1b9c5a8..94a96fcf2b 100644 --- a/examples/postgres/multi-tenant/src/main.rs +++ b/examples/postgres/multi-tenant/src/main.rs @@ -45,7 +45,10 @@ async fn main() -> eyre::Result<()> { txn.commit().await?; - println!("created account ID: {}, email: {user_email:?}, password: {user_password:?}", account_id.0); + println!( + "created account ID: {}, email: {user_email:?}, password: {user_password:?}", + account_id.0 + ); // POST /session // Log the user in. From 2b1a27193e1f2ac68327722d42f7a6584e720091 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Thu, 27 Feb 2025 17:10:23 -0800 Subject: [PATCH 43/78] fixup after merge --- Cargo.lock | 288 ++++++++++++------ examples/postgres/multi-tenant/Cargo.toml | 4 +- .../postgres/multi-tenant/payments/Cargo.toml | 5 +- sqlx-postgres/src/connection/executor.rs | 8 +- 4 files changed, 205 insertions(+), 100 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ae4f589783..2838e39c51 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -392,16 +392,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "acee9fd5073ab6b045a275b3e709c163dd36c90685219cb21804a147b58dba43" dependencies = [ "async-trait", - "axum-core", - "axum-macros", + "axum-core 0.2.9", + "axum-macros 0.2.3", "bitflags 1.3.2", "bytes", "futures-util", - "http", - "http-body", - "hyper", + "http 0.2.12", + "http-body 0.4.6", + "hyper 0.14.32", "itoa", - "matchit", + "matchit 0.5.0", "memchr", "mime", "percent-encoding", @@ -409,14 +409,49 @@ dependencies = [ "serde", "serde_json", "serde_urlencoded", - "sync_wrapper", + "sync_wrapper 0.1.2", "tokio", - "tower", + "tower 0.4.13", "tower-http", "tower-layer", "tower-service", ] +[[package]] +name = "axum" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d6fd624c75e18b3b4c6b9caf42b1afe24437daaee904069137d8bab077be8b8" +dependencies = [ + "axum-core 0.5.0", + "axum-macros 0.5.0", + "bytes", + "form_urlencoded", + "futures-util", + "http 1.2.0", + "http-body 1.0.1", + "http-body-util", + "hyper 1.6.0", + "hyper-util", + "itoa", + "matchit 0.8.4", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", + "sync_wrapper 1.0.2", + "tokio", + "tower 0.5.2", + "tower-layer", + "tower-service", + "tracing", +] + [[package]] name = "axum-core" version = "0.2.9" @@ -426,11 +461,31 @@ dependencies = [ "async-trait", "bytes", "futures-util", - "http", - "http-body", + "http 0.2.12", + "http-body 0.4.6", + "mime", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-core" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df1362f362fd16024ae199c1970ce98f9661bf5ef94b9808fee734bc3698b733" +dependencies = [ + "bytes", + "futures-util", + "http 1.2.0", + "http-body 1.0.1", + "http-body-util", "mime", + "pin-project-lite", + "rustversion", + "sync_wrapper 1.0.2", "tower-layer", "tower-service", + "tracing", ] [[package]] @@ -445,6 +500,17 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "axum-macros" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "604fde5e028fea851ce1d8570bbdc034bec850d157f7569d10f347d06808c05c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + [[package]] name = "backoff" version = "0.4.0" @@ -1727,6 +1793,17 @@ dependencies = [ "itoa", ] +[[package]] +name = "http" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + [[package]] name = "http-body" version = "0.4.6" @@ -1734,7 +1811,30 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ "bytes", - "http", + "http 0.2.12", + "pin-project-lite", +] + +[[package]] +name = "http-body" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http 1.2.0", +] + +[[package]] +name = "http-body-util" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" +dependencies = [ + "bytes", + "futures-util", + "http 1.2.0", + "http-body 1.0.1", "pin-project-lite", ] @@ -1772,8 +1872,8 @@ dependencies = [ "futures-channel", "futures-core", "futures-util", - "http", - "http-body", + "http 0.2.12", + "http-body 0.4.6", "httparse", "httpdate", "itoa", @@ -1785,6 +1885,41 @@ dependencies = [ "want", ] +[[package]] +name = "hyper" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "http 1.2.0", + "http-body 1.0.1", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "smallvec", + "tokio", +] + +[[package]] +name = "hyper-util" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" +dependencies = [ + "bytes", + "futures-util", + "http 1.2.0", + "http-body 1.0.1", + "hyper 1.6.0", + "pin-project-lite", + "tokio", + "tower-service", +] + [[package]] name = "iana-time-zone" version = "0.1.61" @@ -2237,6 +2372,12 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "73cbba799671b762df5a175adf59ce145165747bb891505c43d09aefbbf38beb" +[[package]] +name = "matchit" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3" + [[package]] name = "md-5" version = "0.10.6" @@ -3318,6 +3459,16 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_path_to_error" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af99884400da37c88f5e9146b7f1fd0fbcae8f6eec4e9da38b67d05486f814a6" +dependencies = [ + "itoa", + "serde", +] + [[package]] name = "serde_spanned" version = "0.6.8" @@ -3632,7 +3783,7 @@ version = "0.1.0" dependencies = [ "anyhow", "argon2 0.4.1", - "axum", + "axum 0.5.17", "dotenvy", "rand", "regex", @@ -3643,7 +3794,7 @@ dependencies = [ "thiserror 2.0.11", "time", "tokio", - "tower", + "tower 0.4.13", "tracing", "uuid", "validator", @@ -3705,51 +3856,11 @@ dependencies = [ "tokio", ] -[[package]] -name = "sqlx-example-postgres-multi-database" -version = "0.9.0-alpha.1" -dependencies = [ - "color-eyre", - "dotenvy", - "rand", - "rust_decimal", - "sqlx", - "sqlx-example-postgres-multi-database-accounts", - "sqlx-example-postgres-multi-database-payments", - "tokio", - "tracing-subscriber", -] - -[[package]] -name = "sqlx-example-postgres-multi-database-accounts" -version = "0.1.0" -dependencies = [ - "argon2 0.5.3", - "password-hash 0.5.0", - "rand", - "serde", - "sqlx", - "thiserror 1.0.69", - "time", - "tokio", - "uuid", -] - -[[package]] -name = "sqlx-example-postgres-multi-database-payments" -version = "0.1.0" -dependencies = [ - "rust_decimal", - "sqlx", - "sqlx-example-postgres-multi-database-accounts", - "time", - "uuid", -] - [[package]] name = "sqlx-example-postgres-multi-tenant" -version = "0.9.0-alpha.1" +version = "0.8.3" dependencies = [ + "axum 0.8.1", "color-eyre", "dotenvy", "rand", @@ -3787,41 +3898,6 @@ dependencies = [ "uuid", ] -[[package]] -name = "sqlx-example-postgres-preferred-crates" -version = "0.9.0-alpha.1" -dependencies = [ - "anyhow", - "chrono", - "dotenvy", - "serde", - "sqlx", - "sqlx-example-postgres-preferred-crates-uses-rust-decimal", - "sqlx-example-postgres-preferred-crates-uses-time", - "tokio", - "uuid", -] - -[[package]] -name = "sqlx-example-postgres-preferred-crates-uses-rust-decimal" -version = "0.9.0-alpha.1" -dependencies = [ - "chrono", - "rust_decimal", - "sqlx", - "uuid", -] - -[[package]] -name = "sqlx-example-postgres-preferred-crates-uses-time" -version = "0.9.0-alpha.1" -dependencies = [ - "serde", - "sqlx", - "time", - "uuid", -] - [[package]] name = "sqlx-example-postgres-todos" version = "0.1.0" @@ -4185,6 +4261,12 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" +[[package]] +name = "sync_wrapper" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" + [[package]] name = "synstructure" version = "0.13.1" @@ -4464,6 +4546,22 @@ dependencies = [ "tracing", ] +[[package]] +name = "tower" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper 1.0.2", + "tokio", + "tower-layer", + "tower-service", + "tracing", +] + [[package]] name = "tower-http" version = "0.3.5" @@ -4474,11 +4572,11 @@ dependencies = [ "bytes", "futures-core", "futures-util", - "http", - "http-body", + "http 0.2.12", + "http-body 0.4.6", "http-range-header", "pin-project-lite", - "tower", + "tower 0.4.13", "tower-layer", "tower-service", ] diff --git a/examples/postgres/multi-tenant/Cargo.toml b/examples/postgres/multi-tenant/Cargo.toml index f93c91747a..200fcfd2e8 100644 --- a/examples/postgres/multi-tenant/Cargo.toml +++ b/examples/postgres/multi-tenant/Cargo.toml @@ -24,12 +24,12 @@ rust_decimal = "1.36.0" rand = "0.8.5" [dependencies.accounts] -package = "sqlx-example-postgres-multi-tenant-accounts" path = "accounts" +package = "sqlx-example-postgres-multi-tenant-accounts" [dependencies.payments] -package = "sqlx-example-postgres-multi-tenant-accounts" path = "payments" +package = "sqlx-example-postgres-multi-tenant-payments" [lints] workspace = true diff --git a/examples/postgres/multi-tenant/payments/Cargo.toml b/examples/postgres/multi-tenant/payments/Cargo.toml index 1c6d31868b..1f7d7c3f75 100644 --- a/examples/postgres/multi-tenant/payments/Cargo.toml +++ b/examples/postgres/multi-tenant/payments/Cargo.toml @@ -4,7 +4,6 @@ version = "0.1.0" edition = "2021" [dependencies] -accounts = { path = "../accounts" } sqlx = { workspace = true, features = ["postgres", "time", "uuid", "rust_decimal", "sqlx-toml"] } @@ -13,5 +12,9 @@ rust_decimal = "1.36.0" time = "0.3.37" uuid = "1.12.1" +[dependencies.accounts] +path = "../accounts" +package = "sqlx-example-postgres-multi-tenant-accounts" + [dev-dependencies] sqlx = { workspace = true, features = ["runtime-tokio"] } diff --git a/sqlx-postgres/src/connection/executor.rs b/sqlx-postgres/src/connection/executor.rs index f8dbfe7ec6..b6e972c4ee 100644 --- a/sqlx-postgres/src/connection/executor.rs +++ b/sqlx-postgres/src/connection/executor.rs @@ -86,7 +86,9 @@ async fn prepare( let parameters = conn.handle_parameter_description(parameters).await?; - let (columns, column_names) = conn.handle_row_description(rows, true, fetch_column_origin).await?; + let (columns, column_names) = conn + .handle_row_description(rows, true, fetch_column_origin) + .await?; // ensure that if we did fetch custom data, we wait until we are fully ready before // continuing @@ -455,7 +457,9 @@ impl<'c> Executor<'c> for &'c mut PgConnection { Box::pin(async move { self.wait_until_ready().await?; - let (_, metadata) = self.get_or_prepare(sql, parameters, true, None, true).await?; + let (_, metadata) = self + .get_or_prepare(sql, parameters, true, None, true) + .await?; Ok(PgStatement { sql: Cow::Borrowed(sql), From d998be1c1314f51787c3a6d15aa029a77ac3fbd5 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Thu, 27 Feb 2025 17:27:57 -0800 Subject: [PATCH 44/78] fix: CI, README for `multi-tenant` --- .github/workflows/examples.yml | 8 ++++---- examples/postgres/multi-tenant/README.md | 20 ++++++++++++++++---- 2 files changed, 20 insertions(+), 8 deletions(-) diff --git a/.github/workflows/examples.yml b/.github/workflows/examples.yml index c630c79287..5b87183473 100644 --- a/.github/workflows/examples.yml +++ b/.github/workflows/examples.yml @@ -178,16 +178,16 @@ jobs: - name: Multi-Tenant (Setup) working-directory: examples/postgres/multi-tenant env: - DATABASE_URL: postgres://postgres:password@localhost:5432/mockable-todos + DATABASE_URL: postgres://postgres:password@localhost:5432/multi-tenant run: | (cd accounts && sqlx db setup) (cd payments && sqlx migrate run) sqlx migrate run - - name: Mockable TODOs (Run) + - name: Multi-Tenant (Run) env: - DATABASE_URL: postgres://postgres:password@localhost:5432/mockable-todos - run: cargo run -p sqlx-example-postgres-mockable-todos + DATABASE_URL: postgres://postgres:password@localhost:5432/multi-tenant + run: cargo run -p sqlx-example-postgres-multi-tenant - name: TODOs (Setup) working-directory: examples/postgres/todos diff --git a/examples/postgres/multi-tenant/README.md b/examples/postgres/multi-tenant/README.md index 9f96ff72f1..8122d852a7 100644 --- a/examples/postgres/multi-tenant/README.md +++ b/examples/postgres/multi-tenant/README.md @@ -19,7 +19,7 @@ This example uses schema-qualified names everywhere for clarity. It can be tempting to change the `search_path` of the connection (MySQL, Postgres) to eliminate the need for schema prefixes, but this can cause some really confusing issues when names conflict. -This example will generate a `_sqlx_migrations` table in three different schemas, and if `search_path` is set +This example will generate a `_sqlx_migrations` table in three different schemas; if `search_path` is set to `public,accounts,payments` and the migrator for the main application attempts to reference the table unqualified, it would throw an error. @@ -27,11 +27,23 @@ it would throw an error. This example requires running three different sets of migrations. -Ensure `sqlx-cli` is installed with Postgres support. +Ensure `sqlx-cli` is installed with Postgres and `sqlx.toml` support: -Start a Postgres server. +``` +cargo install sqlx-cli --features postgres,sqlx-toml +``` + +Start a Postgres server (shown here using Docker, `run` command also works with `podman`): -Create `.env` with `DATABASE_URL` or set it in your shell environment. +``` +docker run -d -e POSTGRES_PASSWORD=password -p 5432:5432 --name postgres postgres:latest +``` + +Create `.env` with `DATABASE_URL` or set the variable in your shell environment; + +``` +DATABASE_URL=postgres://postgres:password@localhost/example-multi-tenant +``` Run the following commands: From 7ab599c26fde6b5e420c5330b86d4ea426a64787 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Thu, 27 Feb 2025 18:16:09 -0800 Subject: [PATCH 45/78] fix: clippy warnings --- sqlx-postgres/src/migrate.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/sqlx-postgres/src/migrate.rs b/sqlx-postgres/src/migrate.rs index 90ebd49a73..8275bda188 100644 --- a/sqlx-postgres/src/migrate.rs +++ b/sqlx-postgres/src/migrate.rs @@ -154,7 +154,7 @@ CREATE TABLE IF NOT EXISTS {table_name} ( ) -> BoxFuture<'e, Result, MigrateError>> { Box::pin(async move { // language=SQL - let row: Option<(i64,)> = query_as(&*format!( + let row: Option<(i64,)> = query_as(&format!( "SELECT version FROM {table_name} WHERE success = false ORDER BY version LIMIT 1" )) .fetch_optional(self) @@ -170,7 +170,7 @@ CREATE TABLE IF NOT EXISTS {table_name} ( ) -> BoxFuture<'e, Result, MigrateError>> { Box::pin(async move { // language=SQL - let rows: Vec<(i64, Vec)> = query_as(&*format!( + let rows: Vec<(i64, Vec)> = query_as(&format!( "SELECT version, checksum FROM {table_name} ORDER BY version" )) .fetch_all(self) @@ -253,7 +253,7 @@ CREATE TABLE IF NOT EXISTS {table_name} ( // language=SQL #[allow(clippy::cast_possible_truncation)] - let _ = query(&*format!( + let _ = query(&format!( r#" UPDATE {table_name} SET execution_time = $1 @@ -306,7 +306,7 @@ async fn execute_migration( .map_err(|e| MigrateError::ExecuteMigration(e, migration.version))?; // language=SQL - let _ = query(&*format!( + let _ = query(&format!( r#" INSERT INTO {table_name} ( version, description, success, checksum, execution_time ) VALUES ( $1, $2, TRUE, $3, -1 ) @@ -332,7 +332,7 @@ async fn revert_migration( .map_err(|e| MigrateError::ExecuteMigration(e, migration.version))?; // language=SQL - let _ = query(&*format!(r#"DELETE FROM {table_name} WHERE version = $1"#)) + let _ = query(&format!(r#"DELETE FROM {table_name} WHERE version = $1"#)) .bind(migration.version) .execute(conn) .await?; From 06940268dd67befd2c6d56e5890fa80528cb0b58 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Thu, 27 Feb 2025 18:17:40 -0800 Subject: [PATCH 46/78] fix: multi-tenant README --- examples/postgres/multi-tenant/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/postgres/multi-tenant/README.md b/examples/postgres/multi-tenant/README.md index 8122d852a7..3688202690 100644 --- a/examples/postgres/multi-tenant/README.md +++ b/examples/postgres/multi-tenant/README.md @@ -3,7 +3,7 @@ This example project involves three crates, each owning a different schema in one database, with their own set of migrations. -* The main crate, an Axum app. +* The main crate, a simple binary simulating the action of a REST API. * Owns the `public` schema (tables are referenced unqualified). * Migrations are moved to `src/migrations` using config key `migrate.migrations-dir` to visually separate them from the subcrate folders. From ec20110414fb0256ab156636b79a0013052eb33a Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Thu, 27 Feb 2025 18:30:43 -0800 Subject: [PATCH 47/78] fix: sequential versioning inference for migrations --- sqlx-cli/src/opt.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sqlx-cli/src/opt.rs b/sqlx-cli/src/opt.rs index c42dd82e8c..26ddb43101 100644 --- a/sqlx-cli/src/opt.rs +++ b/sqlx-cli/src/opt.rs @@ -498,8 +498,8 @@ fn next_timestamp() -> String { fn next_sequential(migrator: &Migrator) -> Option { let next_version = migrator .migrations - .windows(2) - .last() + .rchunks(2) + .next() .and_then(|migrations| { match migrations { [previous, latest] => { From 18933361fa44d4178412e90f1bcad3ff843093f9 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Thu, 27 Feb 2025 18:36:50 -0800 Subject: [PATCH 48/78] fix: migration versioning with explicit overrides --- sqlx-cli/src/opt.rs | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/sqlx-cli/src/opt.rs b/sqlx-cli/src/opt.rs index 26ddb43101..99f8ff95f6 100644 --- a/sqlx-cli/src/opt.rs +++ b/sqlx-cli/src/opt.rs @@ -479,15 +479,16 @@ impl AddMigrationOpts { pub fn version_prefix(&self, config: &Config, migrator: &Migrator) -> String { let default_versioning = &config.migrate.defaults.migration_versioning; - if self.timestamp || matches!(default_versioning, DefaultVersioning::Timestamp) { - return next_timestamp(); - } - - if self.sequential || matches!(default_versioning, DefaultVersioning::Sequential) { - return next_sequential(migrator).unwrap_or_else(|| fmt_sequential(1)); + match (self.timestamp, self.sequential, default_versioning) { + (true, false, _) | (false, false, DefaultVersioning::Timestamp) => next_timestamp(), + (false, true, _) | (false, false, DefaultVersioning::Sequential) => { + next_sequential(migrator).unwrap_or_else(|| fmt_sequential(1)) + } + (false, false, DefaultVersioning::Inferred) => { + next_sequential(migrator).unwrap_or_else(next_timestamp) + } + (true, true, _) => unreachable!("BUG: Clap should have rejected this case"), } - - next_sequential(migrator).unwrap_or_else(next_timestamp) } } From 52103a0bf754a175d3df132fd35a72eb481e1618 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Sun, 30 Mar 2025 03:42:28 -0700 Subject: [PATCH 49/78] fix: only warn on ambiguous crates if the invocation relies on it --- sqlx-core/src/type_checking.rs | 70 ++++++++++++++++++++++++++++ sqlx-macros-core/src/query/args.rs | 14 +++++- sqlx-macros-core/src/query/mod.rs | 30 +++++++----- sqlx-macros-core/src/query/output.rs | 28 ++++++++--- 4 files changed, 123 insertions(+), 19 deletions(-) diff --git a/sqlx-core/src/type_checking.rs b/sqlx-core/src/type_checking.rs index c92a3816b2..3d6ab35c94 100644 --- a/sqlx-core/src/type_checking.rs +++ b/sqlx-core/src/type_checking.rs @@ -60,6 +60,10 @@ pub enum Error { DateTimeCrateFeatureNotEnabled, #[error("Cargo feature for configured `macros.preferred-crates.numeric` not enabled")] NumericCrateFeatureNotEnabled, + #[error("multiple date-time types are possible; falling back to `{fallback}`")] + AmbiguousDateTimeType { fallback: &'static str }, + #[error("multiple numeric types are possible; falling back to `{fallback}`")] + AmbiguousNumericType { fallback: &'static str }, } /// An adapter for [`Value`] which attempts to decode the value and format it when printed using [`Debug`]. @@ -195,12 +199,24 @@ macro_rules! impl_type_checking { if matches!(preferred_crates.date_time, DateTimeCrate::Time | DateTimeCrate::Inferred) { $( if <$time_ty as sqlx_core::types::Type<$database>>::type_info() == *info { + if cfg!(feature = "chrono") { + return Err($crate::type_checking::Error::AmbiguousDateTimeType { + fallback: $crate::select_input_type!($time_ty $(, $time_input)?), + }); + } + return Ok($crate::select_input_type!($time_ty $(, $time_input)?)); } )* $( if <$time_ty as sqlx_core::types::Type<$database>>::compatible(info) { + if cfg!(feature = "chrono") { + return Err($crate::type_checking::Error::AmbiguousDateTimeType { + fallback: $crate::select_input_type!($time_ty $(, $time_input)?), + }); + } + return Ok($crate::select_input_type!($time_ty $(, $time_input)?)); } )* @@ -240,12 +256,24 @@ macro_rules! impl_type_checking { if matches!(preferred_crates.numeric, NumericCrate::BigDecimal | NumericCrate::Inferred) { $( if <$bigdecimal_ty as sqlx_core::types::Type<$database>>::type_info() == *info { + if cfg!(feature = "rust_decimal") { + return Err($crate::type_checking::Error::AmbiguousNumericType { + fallback: $crate::select_input_type!($bigdecimal_ty $(, $bigdecimal_input)?), + }); + } + return Ok($crate::select_input_type!($bigdecimal_ty $(, $bigdecimal_input)?)); } )* $( if <$bigdecimal_ty as sqlx_core::types::Type<$database>>::compatible(info) { + if cfg!(feature = "rust_decimal") { + return Err($crate::type_checking::Error::AmbiguousNumericType { + fallback: $crate::select_input_type!($bigdecimal_ty $(, $bigdecimal_input)?), + }); + } + return Ok($crate::select_input_type!($bigdecimal_ty $(, $bigdecimal_input)?)); } )* @@ -311,12 +339,24 @@ macro_rules! impl_type_checking { if matches!(preferred_crates.date_time, DateTimeCrate::Time | DateTimeCrate::Inferred) { $( if <$time_ty as sqlx_core::types::Type<$database>>::type_info() == *info { + if cfg!(feature = "chrono") { + return Err($crate::type_checking::Error::AmbiguousDateTimeType { + fallback: stringify!($time_ty), + }); + } + return Ok(stringify!($time_ty)); } )* $( if <$time_ty as sqlx_core::types::Type<$database>>::compatible(info) { + if cfg!(feature = "chrono") { + return Err($crate::type_checking::Error::AmbiguousDateTimeType { + fallback: stringify!($time_ty), + }); + } + return Ok(stringify!($time_ty)); } )* @@ -356,12 +396,24 @@ macro_rules! impl_type_checking { if matches!(preferred_crates.numeric, NumericCrate::BigDecimal | NumericCrate::Inferred) { $( if <$bigdecimal_ty as sqlx_core::types::Type<$database>>::type_info() == *info { + if cfg!(feature = "rust_decimal") { + return Err($crate::type_checking::Error::AmbiguousDateTimeType { + fallback: stringify!($bigdecimal_ty), + }); + } + return Ok(stringify!($bigdecimal_ty)); } )* $( if <$bigdecimal_ty as sqlx_core::types::Type<$database>>::compatible(info) { + if cfg!(feature = "rust_decimal") { + return Err($crate::type_checking::Error::AmbiguousDateTimeType { + fallback: stringify!($bigdecimal_ty), + }); + } + return Ok(stringify!($bigdecimal_ty)); } )* @@ -438,6 +490,24 @@ macro_rules! impl_type_checking { )* } + #[cfg(feature = "bigdecimal")] + { + $( + if <$bigdecimal_ty as sqlx_core::types::Type<$database>>::compatible(&info) { + return $crate::type_checking::FmtValue::debug::<$bigdecimal_ty>(value); + } + )* + } + + #[cfg(feature = "rust_decimal")] + { + $( + if <$rust_decimal_ty as sqlx_core::types::Type<$database>>::compatible(&info) { + return $crate::type_checking::FmtValue::debug::<$rust_decimal_ty>(value); + } + )* + } + $( $(#[$meta])? if <$ty as sqlx_core::types::Type<$database>>::compatible(&info) { diff --git a/sqlx-macros-core/src/query/args.rs b/sqlx-macros-core/src/query/args.rs index e546702e64..6195ee6bbc 100644 --- a/sqlx-macros-core/src/query/args.rs +++ b/sqlx-macros-core/src/query/args.rs @@ -6,6 +6,7 @@ use quote::{format_ident, quote, quote_spanned}; use sqlx_core::config::Config; use sqlx_core::describe::Describe; use sqlx_core::type_checking; +use sqlx_core::type_checking::Error; use sqlx_core::type_info::TypeInfo; use syn::spanned::Spanned; use syn::{Expr, ExprCast, ExprGroup, Type}; @@ -15,6 +16,7 @@ use syn::{Expr, ExprCast, ExprGroup, Type}; pub fn quote_args( input: &QueryMacroInput, config: &Config, + warnings: &mut Warnings, info: &Describe, ) -> crate::Result { let db_path = DB::db_path(); @@ -59,7 +61,7 @@ pub fn quote_args( return Ok(quote!()); } - let param_ty = get_param_type::(param_ty, config, i)?; + let param_ty = get_param_type::(param_ty, config, warnings, i)?; Ok(quote_spanned!(expr.span() => // this shouldn't actually run @@ -107,6 +109,7 @@ pub fn quote_args( fn get_param_type( param_ty: &DB::TypeInfo, config: &Config, + warnings: &mut Warnings, i: usize, ) -> crate::Result { if let Some(type_override) = config.macros.type_override(param_ty.name()) { @@ -156,6 +159,15 @@ fn get_param_type( (configured by `macros.preferred-crates.numeric` in sqlx.toml)", ) } + + Error::AmbiguousDateTimeType { fallback } => { + warnings.ambiguous_datetime = true; + return Ok(fallback.parse()?); + } + Error::AmbiguousNumericType { fallback } => { + warnings.ambiguous_numeric = true; + return Ok(fallback.parse()?); + } }; Err(message.into()) diff --git a/sqlx-macros-core/src/query/mod.rs b/sqlx-macros-core/src/query/mod.rs index cb2d8fc6a7..70ccd828b1 100644 --- a/sqlx-macros-core/src/query/mod.rs +++ b/sqlx-macros-core/src/query/mod.rs @@ -282,7 +282,9 @@ where } } - let args_tokens = args::quote_args(&input, config, &data.describe)?; + let mut warnings = Warnings::default(); + + let args_tokens = args::quote_args(&input, config, &mut warnings, &data.describe)?; let query_args = format_ident!("query_args"); @@ -301,7 +303,7 @@ where } else { match input.record_type { RecordType::Generated => { - let columns = output::columns_to_rust::(&data.describe, config)?; + let columns = output::columns_to_rust::(&data.describe, config, &mut warnings)?; let record_name: Type = syn::parse_str("Record").unwrap(); @@ -337,28 +339,32 @@ where record_tokens } RecordType::Given(ref out_ty) => { - let columns = output::columns_to_rust::(&data.describe, config)?; + let columns = output::columns_to_rust::(&data.describe, config, &mut warnings)?; output::quote_query_as::(&input, out_ty, &query_args, &columns) } - RecordType::Scalar => { - output::quote_query_scalar::(&input, config, &query_args, &data.describe)? - } + RecordType::Scalar => output::quote_query_scalar::( + &input, + config, + &mut warnings, + &query_args, + &data.describe, + )?, } }; - let mut warnings = TokenStream::new(); + let mut warnings_out = TokenStream::new(); - if config.macros.preferred_crates.date_time.is_inferred() { + if warnings.ambiguous_datetime { // Warns if the date-time crate is inferred but both `chrono` and `time` are enabled - warnings.extend(quote! { + warnings_out.extend(quote! { ::sqlx::warn_on_ambiguous_inferred_date_time_crate(); }); } - if config.macros.preferred_crates.numeric.is_inferred() { + if warnings.ambiguous_numeric { // Warns if the numeric crate is inferred but both `bigdecimal` and `rust_decimal` are enabled - warnings.extend(quote! { + warnings_out.extend(quote! { ::sqlx::warn_on_ambiguous_inferred_numeric_crate(); }); } @@ -369,7 +375,7 @@ where { use ::sqlx::Arguments as _; - #warnings + #warnings_out #args_tokens diff --git a/sqlx-macros-core/src/query/output.rs b/sqlx-macros-core/src/query/output.rs index 1a145e3a75..987dcaa3cb 100644 --- a/sqlx-macros-core/src/query/output.rs +++ b/sqlx-macros-core/src/query/output.rs @@ -7,7 +7,7 @@ use sqlx_core::describe::Describe; use crate::database::DatabaseExt; -use crate::query::QueryMacroInput; +use crate::query::{QueryMacroInput, Warnings}; use sqlx_core::config::Config; use sqlx_core::type_checking; use sqlx_core::type_checking::TypeChecking; @@ -82,15 +82,17 @@ impl Display for DisplayColumn<'_> { pub fn columns_to_rust( describe: &Describe, config: &Config, + warnings: &mut Warnings, ) -> crate::Result> { (0..describe.columns().len()) - .map(|i| column_to_rust(describe, config, i)) + .map(|i| column_to_rust(describe, config, warnings, i)) .collect::>>() } fn column_to_rust( describe: &Describe, config: &Config, + warnings: &mut Warnings, i: usize, ) -> crate::Result { let column = &describe.columns()[i]; @@ -116,7 +118,7 @@ fn column_to_rust( (ColumnTypeOverride::Wildcard, true) => ColumnType::OptWildcard, (ColumnTypeOverride::None, _) => { - let type_ = get_column_type::(config, i, column); + let type_ = get_column_type::(config, warnings, i, column); if !nullable { ColumnType::Exact(type_) } else { @@ -204,6 +206,7 @@ pub fn quote_query_as( pub fn quote_query_scalar( input: &QueryMacroInput, config: &Config, + warnings: &mut Warnings, bind_args: &Ident, describe: &Describe, ) -> crate::Result { @@ -218,10 +221,10 @@ pub fn quote_query_scalar( } // attempt to parse a column override, otherwise fall back to the inferred type of the column - let ty = if let Ok(rust_col) = column_to_rust(describe, config, 0) { + let ty = if let Ok(rust_col) = column_to_rust(describe, config, warnings, 0) { rust_col.type_.to_token_stream() } else if input.checked { - let ty = get_column_type::(config, 0, &columns[0]); + let ty = get_column_type::(config, warnings, 0, &columns[0]); if describe.nullable(0).unwrap_or(true) { quote! { ::std::option::Option<#ty> } } else { @@ -239,7 +242,12 @@ pub fn quote_query_scalar( }) } -fn get_column_type(config: &Config, i: usize, column: &DB::Column) -> TokenStream { +fn get_column_type( + config: &Config, + warnings: &mut Warnings, + i: usize, + column: &DB::Column, +) -> TokenStream { if let ColumnOrigin::Table(origin) = column.origin() { if let Some(column_override) = config.macros.column_override(&origin.table, &origin.name) { return column_override.parse().unwrap(); @@ -322,6 +330,14 @@ fn get_column_type(config: &Config, i: usize, column: &DB::Colu } ) } + type_checking::Error::AmbiguousDateTimeType { fallback } => { + warnings.ambiguous_datetime = true; + return fallback.parse().unwrap(); + } + type_checking::Error::AmbiguousNumericType { fallback } => { + warnings.ambiguous_numeric = true; + return fallback.parse().unwrap(); + } }; syn::Error::new(Span::call_site(), message).to_compile_error() From 0342c6fc77e58252384f529b9106e91d95ec7c04 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Sun, 30 Mar 2025 14:35:57 -0700 Subject: [PATCH 50/78] fix: remove unused imports --- sqlx-macros-core/src/migrate.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/sqlx-macros-core/src/migrate.rs b/sqlx-macros-core/src/migrate.rs index 729d61ce91..cfc3394757 100644 --- a/sqlx-macros-core/src/migrate.rs +++ b/sqlx-macros-core/src/migrate.rs @@ -6,8 +6,7 @@ use std::path::{Path, PathBuf}; use proc_macro2::{Span, TokenStream}; use quote::{quote, ToTokens, TokenStreamExt}; use sqlx_core::config::Config; -use sqlx_core::migrate::{Migration, MigrationType, ResolveConfig}; -use syn::spanned::Spanned; +use sqlx_core::migrate::{Migration, MigrationType}; use syn::LitStr; pub const DEFAULT_PATH: &str = "./migrations"; From 8f1a8b0ba3dbb5a834dfd252387ad024ad23c964 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Sun, 30 Mar 2025 23:33:07 -0700 Subject: [PATCH 51/78] fix: `sqlx mig add` behavior and tests --- sqlx-cli/src/opt.rs | 51 ++++++++++++++++++++++----------------------- 1 file changed, 25 insertions(+), 26 deletions(-) diff --git a/sqlx-cli/src/opt.rs b/sqlx-cli/src/opt.rs index 99f8ff95f6..04acfbdc69 100644 --- a/sqlx-cli/src/opt.rs +++ b/sqlx-cli/src/opt.rs @@ -481,11 +481,32 @@ impl AddMigrationOpts { match (self.timestamp, self.sequential, default_versioning) { (true, false, _) | (false, false, DefaultVersioning::Timestamp) => next_timestamp(), - (false, true, _) | (false, false, DefaultVersioning::Sequential) => { - next_sequential(migrator).unwrap_or_else(|| fmt_sequential(1)) - } + (false, true, _) | (false, false, DefaultVersioning::Sequential) => fmt_sequential( + migrator + .migrations + .last() + .map_or(1, |migration| migration.version + 1), + ), (false, false, DefaultVersioning::Inferred) => { - next_sequential(migrator).unwrap_or_else(next_timestamp) + migrator + .migrations + .rchunks(2) + .next() + .and_then(|migrations| { + match migrations { + [previous, latest] => { + // If the latest two versions differ by 1, infer sequential. + (latest.version - previous.version == 1) + .then_some(latest.version + 1) + } + [latest] => { + // If only one migration exists and its version is 0 or 1, infer sequential + matches!(latest.version, 0 | 1).then_some(latest.version + 1) + } + _ => unreachable!(), + } + }) + .map_or_else(next_timestamp, fmt_sequential) } (true, true, _) => unreachable!("BUG: Clap should have rejected this case"), } @@ -496,28 +517,6 @@ fn next_timestamp() -> String { Utc::now().format("%Y%m%d%H%M%S").to_string() } -fn next_sequential(migrator: &Migrator) -> Option { - let next_version = migrator - .migrations - .rchunks(2) - .next() - .and_then(|migrations| { - match migrations { - [previous, latest] => { - // If the latest two versions differ by 1, infer sequential. - (latest.version - previous.version == 1).then_some(latest.version + 1) - } - [latest] => { - // If only one migration exists and its version is 0 or 1, infer sequential - matches!(latest.version, 0 | 1).then_some(latest.version + 1) - } - _ => unreachable!(), - } - }); - - next_version.map(fmt_sequential) -} - fn fmt_sequential(version: i64) -> String { format!("{version:04}") } From 41df655118f205180f613551d03e18bc98a0cb30 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Mon, 31 Mar 2025 01:53:25 -0700 Subject: [PATCH 52/78] fix: restore original type-checking order --- sqlx-core/src/type_checking.rs | 58 +++++++++++++++++----------------- 1 file changed, 29 insertions(+), 29 deletions(-) diff --git a/sqlx-core/src/type_checking.rs b/sqlx-core/src/type_checking.rs index 3d6ab35c94..155bc4bcd1 100644 --- a/sqlx-core/src/type_checking.rs +++ b/sqlx-core/src/type_checking.rs @@ -190,6 +190,22 @@ macro_rules! impl_type_checking { use $crate::config::macros::{DateTimeCrate, NumericCrate}; use $crate::type_checking::Error; + // Check non-special types + // --------------------- + $( + $(#[$meta])? + if <$ty as sqlx_core::types::Type<$database>>::type_info() == *info { + return Ok($crate::select_input_type!($ty $(, $input)?)); + } + )* + + $( + $(#[$meta])? + if <$ty as sqlx_core::types::Type<$database>>::compatible(info) { + return Ok($crate::select_input_type!($ty $(, $input)?)); + } + )* + // Check `macros.preferred-crates.date-time` // // Due to legacy reasons, `time` takes precedent over `chrono` if both are enabled. @@ -304,32 +320,32 @@ macro_rules! impl_type_checking { return Err(Error::NumericCrateFeatureNotEnabled); } - // Check all other types + Err(Error::NoMappingFound) + } + + fn return_type_for_id( + info: &Self::TypeInfo, + preferred_crates: &$crate::config::macros::PreferredCrates, + ) -> Result<&'static str, $crate::type_checking::Error> { + use $crate::config::macros::{DateTimeCrate, NumericCrate}; + use $crate::type_checking::Error; + + // Check non-special types // --------------------- $( $(#[$meta])? if <$ty as sqlx_core::types::Type<$database>>::type_info() == *info { - return Ok($crate::select_input_type!($ty $(, $input)?)); + return Ok(stringify!($ty)); } )* $( $(#[$meta])? if <$ty as sqlx_core::types::Type<$database>>::compatible(info) { - return Ok($crate::select_input_type!($ty $(, $input)?)); + return Ok(stringify!($ty)); } )* - Err(Error::NoMappingFound) - } - - fn return_type_for_id( - info: &Self::TypeInfo, - preferred_crates: &$crate::config::macros::PreferredCrates, - ) -> Result<&'static str, $crate::type_checking::Error> { - use $crate::config::macros::{DateTimeCrate, NumericCrate}; - use $crate::type_checking::Error; - // Check `macros.preferred-crates.date-time` // // Due to legacy reasons, `time` takes precedent over `chrono` if both are enabled. @@ -444,22 +460,6 @@ macro_rules! impl_type_checking { return Err(Error::NumericCrateFeatureNotEnabled); } - // Check all other types - // --------------------- - $( - $(#[$meta])? - if <$ty as sqlx_core::types::Type<$database>>::type_info() == *info { - return Ok(stringify!($ty)); - } - )* - - $( - $(#[$meta])? - if <$ty as sqlx_core::types::Type<$database>>::compatible(info) { - return Ok(stringify!($ty)); - } - )* - Err(Error::NoMappingFound) } From 57b711fbb99303eb9a247f74a319ed4719e215c2 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Mon, 31 Mar 2025 03:09:47 -0700 Subject: [PATCH 53/78] fix: deprecation warning in `tests/postgres/macros.rs` --- sqlx-core/src/type_checking.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sqlx-core/src/type_checking.rs b/sqlx-core/src/type_checking.rs index 155bc4bcd1..a3ded72abb 100644 --- a/sqlx-core/src/type_checking.rs +++ b/sqlx-core/src/type_checking.rs @@ -413,7 +413,7 @@ macro_rules! impl_type_checking { $( if <$bigdecimal_ty as sqlx_core::types::Type<$database>>::type_info() == *info { if cfg!(feature = "rust_decimal") { - return Err($crate::type_checking::Error::AmbiguousDateTimeType { + return Err($crate::type_checking::Error::AmbiguousNumericType { fallback: stringify!($bigdecimal_ty), }); } @@ -425,7 +425,7 @@ macro_rules! impl_type_checking { $( if <$bigdecimal_ty as sqlx_core::types::Type<$database>>::compatible(info) { if cfg!(feature = "rust_decimal") { - return Err($crate::type_checking::Error::AmbiguousDateTimeType { + return Err($crate::type_checking::Error::AmbiguousNumericType { fallback: stringify!($bigdecimal_ty), }); } From 0d8774926b132327205e27e6fca71282cafa01b7 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Thu, 10 Apr 2025 19:27:45 -0700 Subject: [PATCH 54/78] feat: create postgres/multi-database example --- .github/workflows/examples.yml | 18 ++++++++ Cargo.lock | 42 +++++++++++++++++++ Cargo.toml | 1 + examples/postgres/multi-database/Cargo.toml | 9 ++-- examples/postgres/multi-database/README.md | 2 +- .../multi-database/accounts/src/lib.rs | 6 +-- .../multi-database/payments/src/lib.rs | 40 ++++++++++-------- examples/postgres/multi-database/src/main.rs | 34 +++++---------- 8 files changed, 103 insertions(+), 49 deletions(-) diff --git a/.github/workflows/examples.yml b/.github/workflows/examples.yml index 5b87183473..a7ff576515 100644 --- a/.github/workflows/examples.yml +++ b/.github/workflows/examples.yml @@ -175,6 +175,24 @@ jobs: DATABASE_URL: postgres://postgres:password@localhost:5432/mockable-todos run: cargo run -p sqlx-example-postgres-mockable-todos + - name: Multi-Database (Setup) + working-directory: examples/postgres/multi-database + env: + DATABASE_URL: postgres://postgres:password@localhost:5432/multi-database + ACCOUNTS_DATABASE_URL: postgres://postgres:password@localhost:5432/multi-database-accounts + PAYMENTS_DATABASE_URL: postgres://postgres:password@localhost:5432/multi-database-payments + run: | + (cd accounts && sqlx db setup) + (cd payments && sqlx db setup) + sqlx db setup + + - name: Multi-Database (Run) + env: + DATABASE_URL: postgres://postgres:password@localhost:5432/multi-database + ACCOUNTS_DATABASE_URL: postgres://postgres:password@localhost:5432/multi-database-accounts + PAYMENTS_DATABASE_URL: postgres://postgres:password@localhost:5432/multi-database-payments + run: cargo run -p sqlx-example-postgres-multi-database + - name: Multi-Tenant (Setup) working-directory: examples/postgres/multi-tenant env: diff --git a/Cargo.lock b/Cargo.lock index 2838e39c51..ae1e9b216f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3856,6 +3856,48 @@ dependencies = [ "tokio", ] +[[package]] +name = "sqlx-example-postgres-multi-database" +version = "0.8.3" +dependencies = [ + "axum 0.8.1", + "color-eyre", + "dotenvy", + "rand", + "rust_decimal", + "sqlx", + "sqlx-example-postgres-multi-database-accounts", + "sqlx-example-postgres-multi-database-payments", + "tokio", + "tracing-subscriber", +] + +[[package]] +name = "sqlx-example-postgres-multi-database-accounts" +version = "0.1.0" +dependencies = [ + "argon2 0.5.3", + "password-hash 0.5.0", + "rand", + "serde", + "sqlx", + "thiserror 1.0.69", + "time", + "tokio", + "uuid", +] + +[[package]] +name = "sqlx-example-postgres-multi-database-payments" +version = "0.1.0" +dependencies = [ + "rust_decimal", + "sqlx", + "sqlx-example-postgres-multi-database-accounts", + "time", + "uuid", +] + [[package]] name = "sqlx-example-postgres-multi-tenant" version = "0.8.3" diff --git a/Cargo.toml b/Cargo.toml index e12b16c5bb..054020a253 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -17,6 +17,7 @@ members = [ "examples/postgres/json", "examples/postgres/listen", "examples/postgres/mockable-todos", + "examples/postgres/multi-database", "examples/postgres/multi-tenant", "examples/postgres/todos", "examples/postgres/transaction", diff --git a/examples/postgres/multi-database/Cargo.toml b/examples/postgres/multi-database/Cargo.toml index c5e01621b8..1d1279c7e5 100644 --- a/examples/postgres/multi-database/Cargo.toml +++ b/examples/postgres/multi-database/Cargo.toml @@ -11,6 +11,10 @@ authors.workspace = true [dependencies] tokio = { version = "1", features = ["rt-multi-thread", "macros"] } +sqlx = { path = "../../..", version = "0.8.3", features = ["runtime-tokio", "postgres"] } + +axum = { version = "0.8.1", features = ["macros"] } + color-eyre = "0.6.3" dotenvy = "0.15.7" tracing-subscriber = "0.3.19" @@ -19,11 +23,6 @@ rust_decimal = "1.36.0" rand = "0.8.5" -[dependencies.sqlx] -# version = "0.9.0" -workspace = true -features = ["runtime-tokio", "postgres", "migrate", "sqlx-toml"] - [dependencies.accounts] path = "accounts" package = "sqlx-example-postgres-multi-database-accounts" diff --git a/examples/postgres/multi-database/README.md b/examples/postgres/multi-database/README.md index c7804f90d1..126565e952 100644 --- a/examples/postgres/multi-database/README.md +++ b/examples/postgres/multi-database/README.md @@ -1,4 +1,4 @@ -# Using Multiple Databases with `sqlx.toml` +# Axum App with multi-database Database This example project involves three crates, each owning a different schema in one database, with their own set of migrations. diff --git a/examples/postgres/multi-database/accounts/src/lib.rs b/examples/postgres/multi-database/accounts/src/lib.rs index a543d2fd45..0339865621 100644 --- a/examples/postgres/multi-database/accounts/src/lib.rs +++ b/examples/postgres/multi-database/accounts/src/lib.rs @@ -1,12 +1,12 @@ use argon2::{password_hash, Argon2, PasswordHasher, PasswordVerifier}; use password_hash::PasswordHashString; use rand::distributions::{Alphanumeric, DistString}; -use sqlx::PgPool; +use sqlx::{Acquire, Executor, PgPool, PgTransaction, Postgres}; use std::sync::Arc; use uuid::Uuid; -use sqlx::postgres::{PgConnectOptions, PgPoolOptions}; use tokio::sync::Semaphore; +use sqlx::postgres::{PgConnectOptions, PgPoolOptions}; #[derive(sqlx::Type, Copy, Clone, Debug, serde::Deserialize, serde::Serialize)] #[sqlx(transparent)] @@ -118,7 +118,7 @@ impl AccountsManager { .max_connections(5) .connect_with(opts) .await?; - + sqlx::migrate!() .run(&pool) .await diff --git a/examples/postgres/multi-database/payments/src/lib.rs b/examples/postgres/multi-database/payments/src/lib.rs index 356d173a5f..d194e1ceb0 100644 --- a/examples/postgres/multi-database/payments/src/lib.rs +++ b/examples/postgres/multi-database/payments/src/lib.rs @@ -1,8 +1,8 @@ use accounts::{AccountId, AccountsManager}; -use sqlx::postgres::{PgConnectOptions, PgPoolOptions}; use sqlx::{Acquire, PgConnection, PgPool, Postgres}; use time::OffsetDateTime; use uuid::Uuid; +use sqlx::postgres::{PgConnectOptions, PgPoolOptions}; #[derive(sqlx::Type, Copy, Clone, Debug)] #[sqlx(transparent)] @@ -41,13 +41,17 @@ pub struct PaymentsManager { } impl PaymentsManager { - pub async fn setup(opts: PgConnectOptions) -> sqlx::Result { + pub async fn setup( + opts: PgConnectOptions, + ) -> sqlx::Result { let pool = PgPoolOptions::new() .max_connections(5) .connect_with(opts) .await?; - sqlx::migrate!().run(&pool).await?; + sqlx::migrate!() + .run(&pool) + .await?; Ok(Self { pool }) } @@ -57,8 +61,8 @@ impl PaymentsManager { pub async fn create( &self, account_id: AccountId, - currency: &str, - amount: rust_decimal::Decimal, + currency: &str, + amount: rust_decimal::Decimal, ) -> sqlx::Result { // Check-out a connection to avoid paying the overhead of acquiring one for each call. let mut conn = self.pool.acquire().await?; @@ -87,8 +91,8 @@ impl PaymentsManager { currency, amount, ) - .fetch_one(&mut *conn) - .await?; + .fetch_one(&mut *conn) + .await?; // We then create the record with the payment vendor... let external_payment_id = "foobar1234"; @@ -99,17 +103,17 @@ impl PaymentsManager { // the order of columns gets baked into the binary, so if it changes between compile time and // run-time, you may run into errors. let payment = sqlx::query_as!( - Payment, - "update payment \ + Payment, + "update payment \ set status = $1, external_payment_id = $2 \ where payment_id = $3 \ returning *", - PaymentStatus::Created, - external_payment_id, - payment_id.0, - ) - .fetch_one(&mut *conn) - .await?; + PaymentStatus::Created, + external_payment_id, + payment_id.0, + ) + .fetch_one(&mut *conn) + .await?; Ok(payment) } @@ -121,7 +125,9 @@ impl PaymentsManager { "select * from payment where payment_id = $1", payment_id.0 ) - .fetch_optional(&self.pool) - .await + .fetch_optional(&self.pool) + .await } } + + diff --git a/examples/postgres/multi-database/src/main.rs b/examples/postgres/multi-database/src/main.rs index 263eff8e50..94a96fcf2b 100644 --- a/examples/postgres/multi-database/src/main.rs +++ b/examples/postgres/multi-database/src/main.rs @@ -1,7 +1,6 @@ use accounts::AccountsManager; use color_eyre::eyre; use color_eyre::eyre::{Context, OptionExt}; -use payments::PaymentsManager; use rand::distributions::{Alphanumeric, DistString}; use sqlx::Connection; @@ -18,24 +17,14 @@ async fn main() -> eyre::Result<()> { .await .wrap_err("could not connect to database")?; - let accounts = AccountsManager::setup( - dotenvy::var("ACCOUNTS_DATABASE_URL") - .wrap_err("ACCOUNTS_DATABASE_URL must be set")? - .parse() - .wrap_err("error parsing ACCOUNTS_DATABASE_URL")?, - 1, - ) - .await - .wrap_err("error initializing AccountsManager")?; + // Runs migration for `accounts` internally. + let accounts = AccountsManager::setup(&mut conn, 1) + .await + .wrap_err("error initializing AccountsManager")?; - let payments = PaymentsManager::setup( - dotenvy::var("PAYMENTS_DATABASE_URL") - .wrap_err("PAYMENTS_DATABASE_URL must be set")? - .parse() - .wrap_err("error parsing PAYMENTS_DATABASE_URL")?, - ) - .await - .wrap_err("error initializing PaymentsManager")?; + payments::migrate(&mut conn) + .await + .wrap_err("error running payments migrations")?; // For simplicity's sake, imagine each of these might be invoked by different request routes // in a web application. @@ -50,7 +39,7 @@ async fn main() -> eyre::Result<()> { let account_id = accounts // Takes ownership of the password string because it's sent to another thread for hashing. - .create(&user_email, user_password.clone()) + .create(&mut txn, &user_email, user_password.clone()) .await .wrap_err("error creating account")?; @@ -64,7 +53,7 @@ async fn main() -> eyre::Result<()> { // POST /session // Log the user in. let session = accounts - .create_session(&user_email, user_password.clone()) + .create_session(&mut conn, &user_email, user_password.clone()) .await .wrap_err("error creating session")?; @@ -87,7 +76,7 @@ async fn main() -> eyre::Result<()> { // may be easier for the frontend. By setting the cookie with `HttpOnly: true`, // it's impossible for malicious Javascript on the client to access and steal the session token. let account_id = accounts - .auth_session(&session.session_token.0) + .auth_session(&mut conn, &session.session_token.0) .await .wrap_err("error authenticating session")? .ok_or_eyre("session does not exist")?; @@ -95,8 +84,7 @@ async fn main() -> eyre::Result<()> { let purchase_amount: rust_decimal::Decimal = "12.34".parse().unwrap(); // Then, because the user is making a purchase, we record a payment. - let payment = payments - .create(account_id, "USD", purchase_amount) + let payment = payments::create(&mut conn, account_id, "USD", purchase_amount) .await .wrap_err("error creating payment")?; From 19d1e4afef669d1f0584e36c5204ed4ff96418b3 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Sat, 12 Apr 2025 20:10:09 -0700 Subject: [PATCH 55/78] fix: examples/postgres/multi-database --- examples/postgres/multi-database/src/main.rs | 28 +++++++++++++------- 1 file changed, 19 insertions(+), 9 deletions(-) diff --git a/examples/postgres/multi-database/src/main.rs b/examples/postgres/multi-database/src/main.rs index 94a96fcf2b..d94253dd42 100644 --- a/examples/postgres/multi-database/src/main.rs +++ b/examples/postgres/multi-database/src/main.rs @@ -2,6 +2,7 @@ use accounts::AccountsManager; use color_eyre::eyre; use color_eyre::eyre::{Context, OptionExt}; use rand::distributions::{Alphanumeric, DistString}; +use payments::PaymentsManager; use sqlx::Connection; #[tokio::main] @@ -17,14 +18,23 @@ async fn main() -> eyre::Result<()> { .await .wrap_err("could not connect to database")?; - // Runs migration for `accounts` internally. - let accounts = AccountsManager::setup(&mut conn, 1) + let accounts = AccountsManager::setup( + dotenvy::var("ACCOUNTS_DATABASE_URL") + .wrap_err("ACCOUNTS_DATABASE_URL must be set")? + .parse() + .wrap_err("error parsing ACCOUNTS_DATABASE_URL")?, + 1) .await .wrap_err("error initializing AccountsManager")?; - - payments::migrate(&mut conn) + + let payments = PaymentsManager::setup( + dotenvy::var("PAYMENTS_DATABASE_URL") + .wrap_err("PAYMENTS_DATABASE_URL must be set")? + .parse() + .wrap_err("error parsing PAYMENTS_DATABASE_URL")?, + ) .await - .wrap_err("error running payments migrations")?; + .wrap_err("error initializing PaymentsManager")?; // For simplicity's sake, imagine each of these might be invoked by different request routes // in a web application. @@ -39,7 +49,7 @@ async fn main() -> eyre::Result<()> { let account_id = accounts // Takes ownership of the password string because it's sent to another thread for hashing. - .create(&mut txn, &user_email, user_password.clone()) + .create(&user_email, user_password.clone()) .await .wrap_err("error creating account")?; @@ -53,7 +63,7 @@ async fn main() -> eyre::Result<()> { // POST /session // Log the user in. let session = accounts - .create_session(&mut conn, &user_email, user_password.clone()) + .create_session(&user_email, user_password.clone()) .await .wrap_err("error creating session")?; @@ -76,7 +86,7 @@ async fn main() -> eyre::Result<()> { // may be easier for the frontend. By setting the cookie with `HttpOnly: true`, // it's impossible for malicious Javascript on the client to access and steal the session token. let account_id = accounts - .auth_session(&mut conn, &session.session_token.0) + .auth_session(&session.session_token.0) .await .wrap_err("error authenticating session")? .ok_or_eyre("session does not exist")?; @@ -84,7 +94,7 @@ async fn main() -> eyre::Result<()> { let purchase_amount: rust_decimal::Decimal = "12.34".parse().unwrap(); // Then, because the user is making a purchase, we record a payment. - let payment = payments::create(&mut conn, account_id, "USD", purchase_amount) + let payment = payments.create(account_id, "USD", purchase_amount) .await .wrap_err("error creating payment")?; From 9b60b10ed15110bc651a99c38f20a30c4f76bb60 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Sat, 12 Apr 2025 20:29:23 -0700 Subject: [PATCH 56/78] fix: cargo fmt --- .../multi-database/accounts/src/lib.rs | 4 +- .../multi-database/payments/src/lib.rs | 40 ++++++++----------- examples/postgres/multi-database/src/main.rs | 20 +++++----- 3 files changed, 30 insertions(+), 34 deletions(-) diff --git a/examples/postgres/multi-database/accounts/src/lib.rs b/examples/postgres/multi-database/accounts/src/lib.rs index 0339865621..79c1199fbf 100644 --- a/examples/postgres/multi-database/accounts/src/lib.rs +++ b/examples/postgres/multi-database/accounts/src/lib.rs @@ -5,8 +5,8 @@ use sqlx::{Acquire, Executor, PgPool, PgTransaction, Postgres}; use std::sync::Arc; use uuid::Uuid; -use tokio::sync::Semaphore; use sqlx::postgres::{PgConnectOptions, PgPoolOptions}; +use tokio::sync::Semaphore; #[derive(sqlx::Type, Copy, Clone, Debug, serde::Deserialize, serde::Serialize)] #[sqlx(transparent)] @@ -118,7 +118,7 @@ impl AccountsManager { .max_connections(5) .connect_with(opts) .await?; - + sqlx::migrate!() .run(&pool) .await diff --git a/examples/postgres/multi-database/payments/src/lib.rs b/examples/postgres/multi-database/payments/src/lib.rs index d194e1ceb0..356d173a5f 100644 --- a/examples/postgres/multi-database/payments/src/lib.rs +++ b/examples/postgres/multi-database/payments/src/lib.rs @@ -1,8 +1,8 @@ use accounts::{AccountId, AccountsManager}; +use sqlx::postgres::{PgConnectOptions, PgPoolOptions}; use sqlx::{Acquire, PgConnection, PgPool, Postgres}; use time::OffsetDateTime; use uuid::Uuid; -use sqlx::postgres::{PgConnectOptions, PgPoolOptions}; #[derive(sqlx::Type, Copy, Clone, Debug)] #[sqlx(transparent)] @@ -41,17 +41,13 @@ pub struct PaymentsManager { } impl PaymentsManager { - pub async fn setup( - opts: PgConnectOptions, - ) -> sqlx::Result { + pub async fn setup(opts: PgConnectOptions) -> sqlx::Result { let pool = PgPoolOptions::new() .max_connections(5) .connect_with(opts) .await?; - sqlx::migrate!() - .run(&pool) - .await?; + sqlx::migrate!().run(&pool).await?; Ok(Self { pool }) } @@ -61,8 +57,8 @@ impl PaymentsManager { pub async fn create( &self, account_id: AccountId, - currency: &str, - amount: rust_decimal::Decimal, + currency: &str, + amount: rust_decimal::Decimal, ) -> sqlx::Result { // Check-out a connection to avoid paying the overhead of acquiring one for each call. let mut conn = self.pool.acquire().await?; @@ -91,8 +87,8 @@ impl PaymentsManager { currency, amount, ) - .fetch_one(&mut *conn) - .await?; + .fetch_one(&mut *conn) + .await?; // We then create the record with the payment vendor... let external_payment_id = "foobar1234"; @@ -103,17 +99,17 @@ impl PaymentsManager { // the order of columns gets baked into the binary, so if it changes between compile time and // run-time, you may run into errors. let payment = sqlx::query_as!( - Payment, - "update payment \ + Payment, + "update payment \ set status = $1, external_payment_id = $2 \ where payment_id = $3 \ returning *", - PaymentStatus::Created, - external_payment_id, - payment_id.0, - ) - .fetch_one(&mut *conn) - .await?; + PaymentStatus::Created, + external_payment_id, + payment_id.0, + ) + .fetch_one(&mut *conn) + .await?; Ok(payment) } @@ -125,9 +121,7 @@ impl PaymentsManager { "select * from payment where payment_id = $1", payment_id.0 ) - .fetch_optional(&self.pool) - .await + .fetch_optional(&self.pool) + .await } } - - diff --git a/examples/postgres/multi-database/src/main.rs b/examples/postgres/multi-database/src/main.rs index d94253dd42..263eff8e50 100644 --- a/examples/postgres/multi-database/src/main.rs +++ b/examples/postgres/multi-database/src/main.rs @@ -1,8 +1,8 @@ use accounts::AccountsManager; use color_eyre::eyre; use color_eyre::eyre::{Context, OptionExt}; -use rand::distributions::{Alphanumeric, DistString}; use payments::PaymentsManager; +use rand::distributions::{Alphanumeric, DistString}; use sqlx::Connection; #[tokio::main] @@ -18,23 +18,24 @@ async fn main() -> eyre::Result<()> { .await .wrap_err("could not connect to database")?; - let accounts = AccountsManager::setup( + let accounts = AccountsManager::setup( dotenvy::var("ACCOUNTS_DATABASE_URL") .wrap_err("ACCOUNTS_DATABASE_URL must be set")? .parse() .wrap_err("error parsing ACCOUNTS_DATABASE_URL")?, - 1) - .await - .wrap_err("error initializing AccountsManager")?; - + 1, + ) + .await + .wrap_err("error initializing AccountsManager")?; + let payments = PaymentsManager::setup( dotenvy::var("PAYMENTS_DATABASE_URL") .wrap_err("PAYMENTS_DATABASE_URL must be set")? .parse() .wrap_err("error parsing PAYMENTS_DATABASE_URL")?, ) - .await - .wrap_err("error initializing PaymentsManager")?; + .await + .wrap_err("error initializing PaymentsManager")?; // For simplicity's sake, imagine each of these might be invoked by different request routes // in a web application. @@ -94,7 +95,8 @@ async fn main() -> eyre::Result<()> { let purchase_amount: rust_decimal::Decimal = "12.34".parse().unwrap(); // Then, because the user is making a purchase, we record a payment. - let payment = payments.create(account_id, "USD", purchase_amount) + let payment = payments + .create(account_id, "USD", purchase_amount) .await .wrap_err("error creating payment")?; From d903aef792217069fe1566ff93f95844e3004101 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Fri, 2 May 2025 18:02:47 -0700 Subject: [PATCH 57/78] chore: add tests for config `migrate.defaults` --- sqlx-cli/tests/add.rs | 46 +++++++++++++----------------------- sqlx-cli/tests/common/mod.rs | 7 +++--- 2 files changed, 21 insertions(+), 32 deletions(-) diff --git a/sqlx-cli/tests/add.rs b/sqlx-cli/tests/add.rs index cebbb51d53..ad1f61e75b 100644 --- a/sqlx-cli/tests/add.rs +++ b/sqlx-cli/tests/add.rs @@ -82,31 +82,22 @@ impl Index for AddMigrationsResult { } } -struct AddMigrations { +struct AddMigrations<'a> { tempdir: TempDir, - config_arg: Option, + config: Option<&'a str>, } -impl AddMigrations { +impl<'a> AddMigrations<'a> { fn new() -> anyhow::Result { anyhow::Ok(Self { tempdir: TempDir::new()?, - config_arg: None, + config: None, }) } - fn with_config(mut self, filename: &str) -> anyhow::Result { - let path = format!("./tests/assets/{filename}"); - - let path = std::fs::canonicalize(&path) - .with_context(|| format!("error canonicalizing path {path:?}"))?; - - let path = path - .to_str() - .with_context(|| format!("canonicalized version of path {path:?} is not UTF-8"))?; - - self.config_arg = Some(format!("--config={path}")); - Ok(self) + fn with_config(mut self, config: &'a str) -> Self { + self.config = Some(config); + self } fn run( @@ -122,7 +113,9 @@ impl AddMigrations { .args( [ vec!["sqlx", "migrate", "add", description], - self.config_arg.as_deref().map_or(vec![], |arg| vec![arg]), + self.config + .map(|path| vec!["--config", path]) + .unwrap_or_default(), match revesible { true => vec!["-r"], false => vec![], @@ -138,7 +131,6 @@ impl AddMigrations { ] .concat(), ) - .env("RUST_BACKTRACE", "1") .assert(); if expect_success { cmd_result.success(); @@ -321,24 +313,19 @@ fn add_migration_timestamp_reversible() -> anyhow::Result<()> { #[test] fn add_migration_config_default_type_reversible() -> anyhow::Result<()> { let files = AddMigrations::new()? - .with_config("config_default_type_reversible.toml")? + .with_config("sqlx-cli/tests/assets/config_default_type_reversible.toml") // Type should default to reversible without any flags .run("hello world", false, false, false, true)? .run("hello world2", false, false, false, true)? .run("hello world3", false, false, false, true)? .fs_output()?; - assert_eq!(files.len(), 6); - files.assert_is_reversible(); + assert_eq!(files.len(), 3); + files.assert_is_not_reversible(); files[0].assert_is_timestamp(); - assert_eq!(files[1].id, files[0].id); - + files[1].assert_is_timestamp(); files[2].assert_is_timestamp(); - assert_eq!(files[3].id, files[2].id); - - files[4].assert_is_timestamp(); - assert_eq!(files[5].id, files[4].id); Ok(()) } @@ -346,7 +333,7 @@ fn add_migration_config_default_type_reversible() -> anyhow::Result<()> { #[test] fn add_migration_config_default_versioning_sequential() -> anyhow::Result<()> { let files = AddMigrations::new()? - .with_config("config_default_versioning_sequential.toml")? + .with_config("sqlx-cli/tests/assets/config_default_versioning_sequential.toml") // Versioning should default to timestamp without any flags .run("hello world", false, false, false, true)? .run("hello world2", false, false, false, true)? @@ -383,7 +370,8 @@ fn add_migration_config_default_versioning_timestamp() -> anyhow::Result<()> { assert_eq!(files[2].id, 3); // Now set a config that uses `default-versioning = "timestamp"` - let migrations = migrations.with_config("config_default_versioning_timestamp.toml")?; + let migrations = + migrations.with_config("sqlx-cli/tests/assets/config_default_versioning_timestamp.toml"); // Now the default should be a timestamp migrations diff --git a/sqlx-cli/tests/common/mod.rs b/sqlx-cli/tests/common/mod.rs index b4a70b7bec..07bb830cb5 100644 --- a/sqlx-cli/tests/common/mod.rs +++ b/sqlx-cli/tests/common/mod.rs @@ -10,7 +10,6 @@ use std::{ pub struct TestDatabase { file_path: PathBuf, migrations: String, - config: &'static Config, } impl TestDatabase { @@ -33,7 +32,6 @@ impl TestDatabase { let this = Self { file_path, migrations: String::from(migrations_path.to_str().unwrap()), - config: Config::from_crate(), }; Command::cargo_bin("cargo-sqlx") @@ -92,7 +90,10 @@ impl TestDatabase { let mut conn = SqliteConnection::connect(&self.connection_string()) .await .unwrap(); - conn.list_applied_migrations(self.config.migrate.table_name()) + + let config = Config::default(); + + conn.list_applied_migrations(config.migrate.table_name()) .await .unwrap() .iter() From 502d2b3a40aff3094fb0467a95303736f6bead56 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Sat, 3 May 2025 01:13:44 -0700 Subject: [PATCH 58/78] fix: sqlx-cli/tests/add.rs --- Cargo.lock | 4 ++-- sqlx-cli/tests/add.rs | 16 +++++++--------- 2 files changed, 9 insertions(+), 11 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ae1e9b216f..cbfab34433 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3858,7 +3858,7 @@ dependencies = [ [[package]] name = "sqlx-example-postgres-multi-database" -version = "0.8.3" +version = "0.8.5" dependencies = [ "axum 0.8.1", "color-eyre", @@ -3900,7 +3900,7 @@ dependencies = [ [[package]] name = "sqlx-example-postgres-multi-tenant" -version = "0.8.3" +version = "0.8.5" dependencies = [ "axum 0.8.1", "color-eyre", diff --git a/sqlx-cli/tests/add.rs b/sqlx-cli/tests/add.rs index ad1f61e75b..8df18271fc 100644 --- a/sqlx-cli/tests/add.rs +++ b/sqlx-cli/tests/add.rs @@ -82,21 +82,21 @@ impl Index for AddMigrationsResult { } } -struct AddMigrations<'a> { +struct AddMigrations { tempdir: TempDir, - config: Option<&'a str>, + config_arg: Option, } -impl<'a> AddMigrations<'a> { +impl AddMigrations { fn new() -> anyhow::Result { anyhow::Ok(Self { tempdir: TempDir::new()?, - config: None, + config_arg: None, }) } - fn with_config(mut self, config: &'a str) -> Self { - self.config = Some(config); + fn with_config(mut self, path: &str) -> Self { + self.config_arg = Some(format!("--config={path}")); self } @@ -113,9 +113,7 @@ impl<'a> AddMigrations<'a> { .args( [ vec!["sqlx", "migrate", "add", description], - self.config - .map(|path| vec!["--config", path]) - .unwrap_or_default(), + self.config_arg.as_deref().map_or(vec![], |arg| vec![arg]), match revesible { true => vec!["-r"], false => vec![], From 21bd2420c9d2097b8444f2c4afbb10d75e162994 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Sat, 3 May 2025 01:37:30 -0700 Subject: [PATCH 59/78] feat(cli): add `--config` override to all relevant commands --- sqlx-cli/src/lib.rs | 63 +++++++++++++++++++++++++---------------- sqlx-cli/src/migrate.rs | 4 ++- sqlx-cli/src/opt.rs | 46 ++++++++++++++++++++++++++++++ 3 files changed, 87 insertions(+), 26 deletions(-) diff --git a/sqlx-cli/src/lib.rs b/sqlx-cli/src/lib.rs index 52fb232b5a..189b4df80e 100644 --- a/sqlx-cli/src/lib.rs +++ b/sqlx-cli/src/lib.rs @@ -1,9 +1,7 @@ use std::future::Future; use std::io; -use std::path::PathBuf; use std::time::Duration; -use anyhow::{Context, Result}; use futures::{Future, TryFutureExt}; use sqlx::{AnyConnection, Connection}; @@ -34,8 +32,11 @@ pub fn maybe_apply_dotenv() { dotenvy::dotenv().ok(); } -pub async fn run(opt: Opt) -> Result<()> { - let config = config_from_current_dir().await?; +pub async fn run(opt: Opt) -> anyhow::Result<()> { + // This `select!` is here so that when the process receives a `SIGINT` (CTRL + C), + // the futures currently running on this task get dropped before the program exits. + // This is currently necessary for the consumers of the `dialoguer` crate to restore + // the user's terminal if the process is interrupted while a dialog is being displayed. let ctrlc_fut = signal::ctrl_c(); let do_run_fut = do_run(opt); @@ -51,12 +52,10 @@ pub async fn run(opt: Opt) -> Result<()> { } } -async fn do_run(opt: Opt) -> Result<()> { - let config = config_from_current_dir().await?; - +async fn do_run(opt: Opt) -> anyhow::Result<()> { match opt.command { Command::Migrate(migrate) => match migrate.command { - MigrateCommand::Add(opts) => migrate::add(config, opts).await?, + MigrateCommand::Add(opts) => migrate::add(opts).await?, MigrateCommand::Run { source, config, @@ -65,6 +64,8 @@ async fn do_run(opt: Opt) -> Result<()> { mut connect_opts, target_version, } => { + let config = config.load_config().await?; + connect_opts.populate_db_url(config)?; migrate::run( @@ -85,6 +86,8 @@ async fn do_run(opt: Opt) -> Result<()> { mut connect_opts, target_version, } => { + let config = config.load_config().await?; + connect_opts.populate_db_url(config)?; migrate::revert( @@ -99,43 +102,66 @@ async fn do_run(opt: Opt) -> Result<()> { } MigrateCommand::Info { source, + config, mut connect_opts, } => { + let config = config.load_config().await?; + connect_opts.populate_db_url(config)?; migrate::info(config, &source, &connect_opts).await? } - MigrateCommand::BuildScript { source, force } => { + MigrateCommand::BuildScript { + source, + config, + force, + } => { + let config = config.load_config().await?; + migrate::build_script(config, &source, force)? } }, Command::Database(database) => match database.command { - DatabaseCommand::Create { mut connect_opts } => { + DatabaseCommand::Create { + config, + mut connect_opts, + } => { + let config = config.load_config().await?; + connect_opts.populate_db_url(config)?; database::create(&connect_opts).await? } DatabaseCommand::Drop { confirmation, + config, mut connect_opts, force, } => { + let config = config.load_config().await?; + connect_opts.populate_db_url(config)?; database::drop(&connect_opts, !confirmation.yes, force).await? } DatabaseCommand::Reset { confirmation, source, + config, mut connect_opts, force, } => { + let config = config.load_config().await?; + connect_opts.populate_db_url(config)?; database::reset(config, &source, &connect_opts, !confirmation.yes, force).await? } DatabaseCommand::Setup { source, + config, mut connect_opts, } => { + let config = config.load_config().await?; + connect_opts.populate_db_url(config)?; database::setup(config, &source, &connect_opts).await? } @@ -147,7 +173,9 @@ async fn do_run(opt: Opt) -> Result<()> { workspace, mut connect_opts, args, + config, } => { + let config = config.load_config().await?; connect_opts.populate_db_url(config)?; prepare::run(check, all, workspace, connect_opts, args).await? } @@ -203,18 +231,3 @@ where ) .await } - -async fn config_from_current_dir() -> anyhow::Result<&'static Config> { - // Tokio does file I/O on a background task anyway - tokio::task::spawn_blocking(|| { - let path = PathBuf::from("sqlx.toml"); - - if path.exists() { - eprintln!("Found `sqlx.toml` in current directory; reading..."); - } - - Config::read_with_or_default(move || Ok(path)) - }) - .await - .context("unexpected error loading config") -} diff --git a/sqlx-cli/src/migrate.rs b/sqlx-cli/src/migrate.rs index 3618fbe7a3..d25902cd7b 100644 --- a/sqlx-cli/src/migrate.rs +++ b/sqlx-cli/src/migrate.rs @@ -13,7 +13,9 @@ use std::fs::{self, File}; use std::path::Path; use std::time::Duration; -pub async fn add(config: &Config, opts: AddMigrationOpts) -> anyhow::Result<()> { +pub async fn add(opts: AddMigrationOpts) -> anyhow::Result<()> { + let config = opts.config.load_config().await?; + let source = opts.source.resolve(config); fs::create_dir_all(source).context("Unable to create migrations directory")?; diff --git a/sqlx-cli/src/opt.rs b/sqlx-cli/src/opt.rs index 04acfbdc69..ffa874f737 100644 --- a/sqlx-cli/src/opt.rs +++ b/sqlx-cli/src/opt.rs @@ -11,6 +11,7 @@ use clap_complete::Shell; use sqlx::migrate::Migrator; use std::env; use std::ops::{Deref, Not}; +use std::path::PathBuf; const HELP_STYLES: Styles = Styles::styled() .header(AnsiColor::Blue.on_default().bold()) @@ -122,6 +123,9 @@ pub enum DatabaseCommand { #[clap(flatten)] source: MigrationSourceOpt, + #[clap(flatten)] + config: ConfigOpt, + #[clap(flatten)] connect_opts: ConnectOpts, @@ -135,6 +139,9 @@ pub enum DatabaseCommand { #[clap(flatten)] source: MigrationSourceOpt, + #[clap(flatten)] + config: ConfigOpt, + #[clap(flatten)] connect_opts: ConnectOpts, }, @@ -221,6 +228,9 @@ pub enum MigrateCommand { #[clap(flatten)] source: MigrationSourceOpt, + #[clap(flatten)] + config: ConfigOpt, + /// List all the migrations to be run without applying #[clap(long)] dry_run: bool, @@ -242,6 +252,9 @@ pub enum MigrateCommand { #[clap(flatten)] source: MigrationSourceOpt, + #[clap(flatten)] + config: ConfigOpt, + /// List the migration to be reverted without applying #[clap(long)] dry_run: bool, @@ -264,6 +277,9 @@ pub enum MigrateCommand { #[clap(flatten)] source: MigrationSourceOpt, + #[clap(flatten)] + config: ConfigOpt, + #[clap(flatten)] connect_opts: ConnectOpts, }, @@ -275,6 +291,9 @@ pub enum MigrateCommand { #[clap(flatten)] source: MigrationSourceOpt, + #[clap(flatten)] + config: ConfigOpt, + /// Overwrite the build script if it already exists. #[clap(long)] force: bool, @@ -288,6 +307,9 @@ pub struct AddMigrationOpts { #[clap(flatten)] pub source: MigrationSourceOpt, + #[clap(flatten)] + pub config: ConfigOpt, + /// If set, create an up-migration only. Conflicts with `--reversible`. #[clap(long, conflicts_with = "reversible")] simple: bool, @@ -424,6 +446,30 @@ impl ConnectOpts { } } +impl ConfigOpt { + pub async fn load_config(&self) -> anyhow::Result<&'static Config> { + let path = self.config.clone(); + + // Tokio does file I/O on a background task anyway + tokio::task::spawn_blocking(|| { + if let Some(path) = path { + let err_str = format!("error reading config from {path:?}"); + Config::try_read_with(|| Ok(path)).context(err_str) + } else { + let path = PathBuf::from("sqlx.toml"); + + if path.exists() { + eprintln!("Found `sqlx.toml` in current directory; reading..."); + } + + Ok(Config::read_with_or_default(move || Ok(path))) + } + }) + .await + .context("unexpected error loading config")? + } +} + /// Argument for automatic confirmation. #[derive(Args, Copy, Clone, Debug)] pub struct Confirmation { From 635670fa6ffa7e99865ec02f6d91195b266fdc38 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Sat, 3 May 2025 01:47:40 -0700 Subject: [PATCH 60/78] chore: run `sqlx mig add` test with `RUST_BACKTRACE=1` --- sqlx-cli/tests/add.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/sqlx-cli/tests/add.rs b/sqlx-cli/tests/add.rs index 8df18271fc..66c89e5806 100644 --- a/sqlx-cli/tests/add.rs +++ b/sqlx-cli/tests/add.rs @@ -129,6 +129,7 @@ impl AddMigrations { ] .concat(), ) + .env("RUST_BACKTRACE", "1") .assert(); if expect_success { cmd_result.success(); From 12b823ebbf400bbfe058b65decd81d783dcff25e Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Sat, 3 May 2025 01:58:36 -0700 Subject: [PATCH 61/78] fix: properly canonicalize config path for `sqlx mig add` test --- sqlx-cli/tests/add.rs | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/sqlx-cli/tests/add.rs b/sqlx-cli/tests/add.rs index 66c89e5806..1210577866 100644 --- a/sqlx-cli/tests/add.rs +++ b/sqlx-cli/tests/add.rs @@ -95,9 +95,18 @@ impl AddMigrations { }) } - fn with_config(mut self, path: &str) -> Self { + fn with_config(mut self, filename: &str) -> anyhow::Result { + let path = format!("sqlx-cli/tests/assets/{filename}"); + + let path = std::fs::canonicalize(&path) + .with_context(|| format!("error canonicalizing path {path:?}"))?; + + let path = path + .to_str() + .with_context(|| format!("canonicalized version of path {path:?} is not UTF-8"))?; + self.config_arg = Some(format!("--config={path}")); - self + Ok(self) } fn run( @@ -312,7 +321,7 @@ fn add_migration_timestamp_reversible() -> anyhow::Result<()> { #[test] fn add_migration_config_default_type_reversible() -> anyhow::Result<()> { let files = AddMigrations::new()? - .with_config("sqlx-cli/tests/assets/config_default_type_reversible.toml") + .with_config("config_default_type_reversible.toml")? // Type should default to reversible without any flags .run("hello world", false, false, false, true)? .run("hello world2", false, false, false, true)? @@ -332,7 +341,7 @@ fn add_migration_config_default_type_reversible() -> anyhow::Result<()> { #[test] fn add_migration_config_default_versioning_sequential() -> anyhow::Result<()> { let files = AddMigrations::new()? - .with_config("sqlx-cli/tests/assets/config_default_versioning_sequential.toml") + .with_config("config_default_versioning_sequential.toml")? // Versioning should default to timestamp without any flags .run("hello world", false, false, false, true)? .run("hello world2", false, false, false, true)? @@ -369,8 +378,7 @@ fn add_migration_config_default_versioning_timestamp() -> anyhow::Result<()> { assert_eq!(files[2].id, 3); // Now set a config that uses `default-versioning = "timestamp"` - let migrations = - migrations.with_config("sqlx-cli/tests/assets/config_default_versioning_timestamp.toml"); + let migrations = migrations.with_config("config_default_versioning_timestamp.toml")?; // Now the default should be a timestamp migrations From b4cc0d338027769c9af2380996b5d7d7c1258b80 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Sat, 3 May 2025 02:07:06 -0700 Subject: [PATCH 62/78] fix: get `sqlx mig add` test passing --- sqlx-cli/tests/add.rs | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/sqlx-cli/tests/add.rs b/sqlx-cli/tests/add.rs index 1210577866..cebbb51d53 100644 --- a/sqlx-cli/tests/add.rs +++ b/sqlx-cli/tests/add.rs @@ -96,7 +96,7 @@ impl AddMigrations { } fn with_config(mut self, filename: &str) -> anyhow::Result { - let path = format!("sqlx-cli/tests/assets/{filename}"); + let path = format!("./tests/assets/{filename}"); let path = std::fs::canonicalize(&path) .with_context(|| format!("error canonicalizing path {path:?}"))?; @@ -328,12 +328,17 @@ fn add_migration_config_default_type_reversible() -> anyhow::Result<()> { .run("hello world3", false, false, false, true)? .fs_output()?; - assert_eq!(files.len(), 3); - files.assert_is_not_reversible(); + assert_eq!(files.len(), 6); + files.assert_is_reversible(); files[0].assert_is_timestamp(); - files[1].assert_is_timestamp(); + assert_eq!(files[1].id, files[0].id); + files[2].assert_is_timestamp(); + assert_eq!(files[3].id, files[2].id); + + files[4].assert_is_timestamp(); + assert_eq!(files[5].id, files[4].id); Ok(()) } From 2d007c0613c29db264ab3383bac1c07febbb43a2 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Thu, 15 May 2025 18:47:55 -0700 Subject: [PATCH 63/78] fix(cli): test `migrate.ignored-chars`, fix bugs --- sqlx-cli/src/migrate.rs | 24 ++++++++---------------- sqlx-cli/src/opt.rs | 12 ++++++++++-- sqlx-cli/tests/common/mod.rs | 6 ++++-- 3 files changed, 22 insertions(+), 20 deletions(-) diff --git a/sqlx-cli/src/migrate.rs b/sqlx-cli/src/migrate.rs index d25902cd7b..45a38b202a 100644 --- a/sqlx-cli/src/migrate.rs +++ b/sqlx-cli/src/migrate.rs @@ -2,9 +2,7 @@ use crate::config::Config; use crate::opt::{AddMigrationOpts, ConnectOpts, MigrationSourceOpt}; use anyhow::{bail, Context}; use console::style; -use sqlx::migrate::{ - AppliedMigration, Migrate, MigrateError, MigrationType, Migrator, ResolveWith, -}; +use sqlx::migrate::{AppliedMigration, Migrate, MigrateError, MigrationType, Migrator}; use sqlx::Connection; use std::borrow::Cow; use std::collections::{HashMap, HashSet}; @@ -16,11 +14,11 @@ use std::time::Duration; pub async fn add(opts: AddMigrationOpts) -> anyhow::Result<()> { let config = opts.config.load_config().await?; - let source = opts.source.resolve(config); + let source = opts.source.resolve_path(config); fs::create_dir_all(source).context("Unable to create migrations directory")?; - let migrator = Migrator::new(Path::new(source)).await?; + let migrator = opts.source.resolve(config).await?; let version_prefix = opts.version_prefix(config, &migrator); @@ -130,13 +128,8 @@ pub async fn info( migration_source: &MigrationSourceOpt, connect_opts: &ConnectOpts, ) -> anyhow::Result<()> { - let source = migration_source.resolve(config); + let migrator = migration_source.resolve(config).await?; - let migrator = Migrator::new(ResolveWith( - Path::new(source), - config.migrate.to_resolve_config(), - )) - .await?; let mut conn = crate::connect(connect_opts).await?; // FIXME: we shouldn't actually be creating anything here @@ -228,9 +221,8 @@ pub async fn run( ignore_missing: bool, target_version: Option, ) -> anyhow::Result<()> { - let source = migration_source.resolve(config); + let migrator = migration_source.resolve(config).await?; - let migrator = Migrator::new(Path::new(source)).await?; if let Some(target_version) = target_version { if !migrator.version_exists(target_version) { bail!(MigrateError::VersionNotPresent(target_version)); @@ -331,8 +323,8 @@ pub async fn revert( ignore_missing: bool, target_version: Option, ) -> anyhow::Result<()> { - let source = migration_source.resolve(config); - let migrator = Migrator::new(Path::new(source)).await?; + let migrator = migration_source.resolve(config).await?; + if let Some(target_version) = target_version { if target_version != 0 && !migrator.version_exists(target_version) { bail!(MigrateError::VersionNotPresent(target_version)); @@ -432,7 +424,7 @@ pub fn build_script( migration_source: &MigrationSourceOpt, force: bool, ) -> anyhow::Result<()> { - let source = migration_source.resolve(config); + let source = migration_source.resolve_path(config); anyhow::ensure!( Path::new("Cargo.toml").exists(), diff --git a/sqlx-cli/src/opt.rs b/sqlx-cli/src/opt.rs index ffa874f737..272c343c50 100644 --- a/sqlx-cli/src/opt.rs +++ b/sqlx-cli/src/opt.rs @@ -8,7 +8,7 @@ use clap::{ }; #[cfg(feature = "completions")] use clap_complete::Shell; -use sqlx::migrate::Migrator; +use sqlx::migrate::{MigrateError, Migrator, ResolveWith}; use std::env; use std::ops::{Deref, Not}; use std::path::PathBuf; @@ -342,13 +342,21 @@ pub struct MigrationSourceOpt { } impl MigrationSourceOpt { - pub fn resolve<'a>(&'a self, config: &'a Config) -> &'a str { + pub fn resolve_path<'a>(&'a self, config: &'a Config) -> &'a str { if let Some(source) = &self.source { return source; } config.migrate.migrations_dir() } + + pub async fn resolve(&self, config: &Config) -> Result { + Migrator::new(ResolveWith( + self.resolve_path(config), + config.migrate.to_resolve_config(), + )) + .await + } } /// Argument for the database URL. diff --git a/sqlx-cli/tests/common/mod.rs b/sqlx-cli/tests/common/mod.rs index 07bb830cb5..66e7924859 100644 --- a/sqlx-cli/tests/common/mod.rs +++ b/sqlx-cli/tests/common/mod.rs @@ -9,7 +9,8 @@ use std::{ pub struct TestDatabase { file_path: PathBuf, - migrations: String, + migrations_path: PathBuf, + pub config_path: Option, } impl TestDatabase { @@ -31,7 +32,8 @@ impl TestDatabase { let this = Self { file_path, - migrations: String::from(migrations_path.to_str().unwrap()), + migrations_path: Path::new("tests").join(migrations), + config_path: None, }; Command::cargo_bin("cargo-sqlx") From 5453589d2aa809f131224664b6fb262eae6d426d Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Sun, 1 Jun 2025 21:03:40 -0700 Subject: [PATCH 64/78] feat: create `macros.preferred-crates` example --- .github/workflows/examples.yml | 11 ++++++ Cargo.lock | 35 +++++++++++++++++++ Cargo.toml | 1 + examples/postgres/preferred-crates/Cargo.toml | 6 ++-- .../postgres/preferred-crates/src/main.rs | 6 +--- .../uses-rust-decimal/src/lib.rs | 20 +++++------ .../preferred-crates/uses-time/src/lib.rs | 7 +--- 7 files changed, 61 insertions(+), 25 deletions(-) diff --git a/.github/workflows/examples.yml b/.github/workflows/examples.yml index a7ff576515..943e64a101 100644 --- a/.github/workflows/examples.yml +++ b/.github/workflows/examples.yml @@ -207,6 +207,17 @@ jobs: DATABASE_URL: postgres://postgres:password@localhost:5432/multi-tenant run: cargo run -p sqlx-example-postgres-multi-tenant + - name: Preferred-Crates (Setup) + working-directory: examples/postgres/preferred-crates + env: + DATABASE_URL: postgres://postgres:password@localhost:5432/preferred-crates + run: sqlx migrate run + + - name: Multi-Tenant (Run) + env: + DATABASE_URL: postgres://postgres:password@localhost:5432/preferred-crates + run: cargo run -p sqlx-example-postgres-preferred-crates + - name: TODOs (Setup) working-directory: examples/postgres/todos env: diff --git a/Cargo.lock b/Cargo.lock index cbfab34433..515572dfdc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3940,6 +3940,41 @@ dependencies = [ "uuid", ] +[[package]] +name = "sqlx-example-postgres-preferred-crates" +version = "0.8.5" +dependencies = [ + "anyhow", + "chrono", + "dotenvy", + "serde", + "sqlx", + "sqlx-example-postgres-preferred-crates-uses-rust-decimal", + "sqlx-example-postgres-preferred-crates-uses-time", + "tokio", + "uuid", +] + +[[package]] +name = "sqlx-example-postgres-preferred-crates-uses-rust-decimal" +version = "0.8.5" +dependencies = [ + "chrono", + "rust_decimal", + "sqlx", + "uuid", +] + +[[package]] +name = "sqlx-example-postgres-preferred-crates-uses-time" +version = "0.8.5" +dependencies = [ + "serde", + "sqlx", + "time", + "uuid", +] + [[package]] name = "sqlx-example-postgres-todos" version = "0.1.0" diff --git a/Cargo.toml b/Cargo.toml index 054020a253..216a79e0ab 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -19,6 +19,7 @@ members = [ "examples/postgres/mockable-todos", "examples/postgres/multi-database", "examples/postgres/multi-tenant", + "examples/postgres/preferred-crates", "examples/postgres/todos", "examples/postgres/transaction", "examples/sqlite/todos", diff --git a/examples/postgres/preferred-crates/Cargo.toml b/examples/postgres/preferred-crates/Cargo.toml index cf6b0aca1d..f442b751db 100644 --- a/examples/postgres/preferred-crates/Cargo.toml +++ b/examples/postgres/preferred-crates/Cargo.toml @@ -21,9 +21,9 @@ workspace = true features = ["rt-multi-thread", "macros"] [dependencies.sqlx] -# version = "0.9.0" -workspace = true -features = ["runtime-tokio", "postgres", "bigdecimal", "chrono", "derive", "migrate", "sqlx-toml"] +path = "../../.." +version = "0.8" +features = ["runtime-tokio", "postgres", "bigdecimal", "chrono", "derive"] [dependencies.uses-rust-decimal] path = "uses-rust-decimal" diff --git a/examples/postgres/preferred-crates/src/main.rs b/examples/postgres/preferred-crates/src/main.rs index 5d6e4dc9b8..df595eb885 100644 --- a/examples/postgres/preferred-crates/src/main.rs +++ b/examples/postgres/preferred-crates/src/main.rs @@ -34,7 +34,7 @@ async fn main() -> anyhow::Result<()> { uses_rust_decimal::create_table(&mut conn).await?; uses_time::create_table(&mut conn).await?; - let user_id = sqlx::query_scalar!( + let user_id = sqlx::query!( "insert into users(username, password_hash) values($1, $2) returning id", "user_foo", "", @@ -46,8 +46,6 @@ async fn main() -> anyhow::Result<()> { .fetch_one(&mut conn) .await?; - println!("Created user: {user:?}"); - let session = uses_time::create_session(&mut conn, SessionData { user_id }, SESSION_DURATION).await?; @@ -64,7 +62,5 @@ async fn main() -> anyhow::Result<()> { .await? .expect("expected purchase"); - println!("Created purchase: {purchase:?}"); - Ok(()) } diff --git a/examples/postgres/preferred-crates/uses-rust-decimal/src/lib.rs b/examples/postgres/preferred-crates/uses-rust-decimal/src/lib.rs index f955b737d1..8f062b52aa 100644 --- a/examples/postgres/preferred-crates/uses-rust-decimal/src/lib.rs +++ b/examples/postgres/preferred-crates/uses-rust-decimal/src/lib.rs @@ -1,8 +1,8 @@ use chrono::{DateTime, Utc}; use sqlx::PgExecutor; -#[derive(sqlx::FromRow, Debug)] -pub struct Purchase { +#[derive(sqlx::FromRow)] +struct Purchase { pub id: Uuid, pub user_id: Uuid, pub amount: Decimal, @@ -21,7 +21,7 @@ pub async fn create_table(e: impl PgExecutor<'_>) -> sqlx::Result<()> { user_id uuid not null, \ amount numeric not null check(amount > 0), \ description text not null, \ - created_at timestamptz not null default now() \ + created_at timestamptz not null \ ); ", ) @@ -37,14 +37,12 @@ pub async fn create_purchase( amount: Decimal, description: &str, ) -> sqlx::Result { - sqlx::query_scalar( - "insert into purchases(user_id, amount, description) values ($1, $2, $3) returning id", - ) - .bind(user_id) - .bind(amount) - .bind(description) - .fetch_one(e) - .await + sqlx::query_scalar("insert into purchases(user_id, amount, description) values ($1, $2, $3)") + .bind(user_id) + .bind(amount) + .bind(description) + .fetch_one(e) + .await } pub async fn get_purchase(e: impl PgExecutor<'_>, id: Uuid) -> sqlx::Result> { diff --git a/examples/postgres/preferred-crates/uses-time/src/lib.rs b/examples/postgres/preferred-crates/uses-time/src/lib.rs index 4fb3377880..6c9dbee82e 100644 --- a/examples/postgres/preferred-crates/uses-time/src/lib.rs +++ b/examples/postgres/preferred-crates/uses-time/src/lib.rs @@ -37,12 +37,7 @@ pub async fn create_session( data: D, valid_duration: Duration, ) -> sqlx::Result> { - // Round down to the nearest second because - // Postgres doesn't support precision higher than 1 microsecond anyway. - let created_at = OffsetDateTime::now_utc() - .replace_nanosecond(0) - .expect("0 nanoseconds should be in range"); - + let created_at = OffsetDateTime::now_utc(); let expires_at = created_at + valid_duration; let id: Uuid = sqlx::query_scalar( From 929939a7f402e04a8a90d33e021eedb308cf0388 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Sun, 1 Jun 2025 23:28:10 -0700 Subject: [PATCH 65/78] fix(examples): use workspace `sqlx` --- examples/postgres/multi-database/Cargo.toml | 7 +++++-- examples/postgres/multi-tenant/Cargo.toml | 7 +++++-- examples/postgres/preferred-crates/Cargo.toml | 4 ++-- 3 files changed, 12 insertions(+), 6 deletions(-) diff --git a/examples/postgres/multi-database/Cargo.toml b/examples/postgres/multi-database/Cargo.toml index 1d1279c7e5..a40c210e82 100644 --- a/examples/postgres/multi-database/Cargo.toml +++ b/examples/postgres/multi-database/Cargo.toml @@ -11,8 +11,6 @@ authors.workspace = true [dependencies] tokio = { version = "1", features = ["rt-multi-thread", "macros"] } -sqlx = { path = "../../..", version = "0.8.3", features = ["runtime-tokio", "postgres"] } - axum = { version = "0.8.1", features = ["macros"] } color-eyre = "0.6.3" @@ -23,6 +21,11 @@ rust_decimal = "1.36.0" rand = "0.8.5" +[dependencies.sqlx] +# version = "0.9.0" +workspace = true +features = ["runtime-tokio", "postgres"] + [dependencies.accounts] path = "accounts" package = "sqlx-example-postgres-multi-database-accounts" diff --git a/examples/postgres/multi-tenant/Cargo.toml b/examples/postgres/multi-tenant/Cargo.toml index 200fcfd2e8..c855403ec3 100644 --- a/examples/postgres/multi-tenant/Cargo.toml +++ b/examples/postgres/multi-tenant/Cargo.toml @@ -11,8 +11,6 @@ authors.workspace = true [dependencies] tokio = { version = "1", features = ["rt-multi-thread", "macros"] } -sqlx = { path = "../../..", version = "0.8.3", features = ["runtime-tokio", "postgres"] } - axum = { version = "0.8.1", features = ["macros"] } color-eyre = "0.6.3" @@ -23,6 +21,11 @@ rust_decimal = "1.36.0" rand = "0.8.5" +[dependencies.sqlx] +# version = "0.9.0" +workspace = true +features = ["runtime-tokio", "postgres"] + [dependencies.accounts] path = "accounts" package = "sqlx-example-postgres-multi-tenant-accounts" diff --git a/examples/postgres/preferred-crates/Cargo.toml b/examples/postgres/preferred-crates/Cargo.toml index f442b751db..d01985c705 100644 --- a/examples/postgres/preferred-crates/Cargo.toml +++ b/examples/postgres/preferred-crates/Cargo.toml @@ -21,8 +21,8 @@ workspace = true features = ["rt-multi-thread", "macros"] [dependencies.sqlx] -path = "../../.." -version = "0.8" +# version = "0.9.0" +workspace = true features = ["runtime-tokio", "postgres", "bigdecimal", "chrono", "derive"] [dependencies.uses-rust-decimal] From aa43ced9a4156655f4751bbff4bdb319269218b4 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Sun, 1 Jun 2025 23:55:24 -0700 Subject: [PATCH 66/78] fix: examples --- Cargo.lock | 10 +++++----- examples/postgres/multi-database/Cargo.toml | 2 +- examples/postgres/multi-database/accounts/src/lib.rs | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 515572dfdc..045b406515 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3858,7 +3858,7 @@ dependencies = [ [[package]] name = "sqlx-example-postgres-multi-database" -version = "0.8.5" +version = "0.9.0-alpha.1" dependencies = [ "axum 0.8.1", "color-eyre", @@ -3900,7 +3900,7 @@ dependencies = [ [[package]] name = "sqlx-example-postgres-multi-tenant" -version = "0.8.5" +version = "0.9.0-alpha.1" dependencies = [ "axum 0.8.1", "color-eyre", @@ -3942,7 +3942,7 @@ dependencies = [ [[package]] name = "sqlx-example-postgres-preferred-crates" -version = "0.8.5" +version = "0.9.0-alpha.1" dependencies = [ "anyhow", "chrono", @@ -3957,7 +3957,7 @@ dependencies = [ [[package]] name = "sqlx-example-postgres-preferred-crates-uses-rust-decimal" -version = "0.8.5" +version = "0.9.0-alpha.1" dependencies = [ "chrono", "rust_decimal", @@ -3967,7 +3967,7 @@ dependencies = [ [[package]] name = "sqlx-example-postgres-preferred-crates-uses-time" -version = "0.8.5" +version = "0.9.0-alpha.1" dependencies = [ "serde", "sqlx", diff --git a/examples/postgres/multi-database/Cargo.toml b/examples/postgres/multi-database/Cargo.toml index a40c210e82..c4aeccd741 100644 --- a/examples/postgres/multi-database/Cargo.toml +++ b/examples/postgres/multi-database/Cargo.toml @@ -24,7 +24,7 @@ rand = "0.8.5" [dependencies.sqlx] # version = "0.9.0" workspace = true -features = ["runtime-tokio", "postgres"] +features = ["runtime-tokio", "postgres", "migrate"] [dependencies.accounts] path = "accounts" diff --git a/examples/postgres/multi-database/accounts/src/lib.rs b/examples/postgres/multi-database/accounts/src/lib.rs index 79c1199fbf..a543d2fd45 100644 --- a/examples/postgres/multi-database/accounts/src/lib.rs +++ b/examples/postgres/multi-database/accounts/src/lib.rs @@ -1,7 +1,7 @@ use argon2::{password_hash, Argon2, PasswordHasher, PasswordVerifier}; use password_hash::PasswordHashString; use rand::distributions::{Alphanumeric, DistString}; -use sqlx::{Acquire, Executor, PgPool, PgTransaction, Postgres}; +use sqlx::PgPool; use std::sync::Arc; use uuid::Uuid; From 35b6a79b93c5316935da8c05136cebeb95764099 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Mon, 2 Jun 2025 01:18:51 -0700 Subject: [PATCH 67/78] fix: run `cargo fmt` --- sqlx-mysql/src/connection/executor.rs | 2 +- sqlx-postgres/src/connection/executor.rs | 10 +++++++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/sqlx-mysql/src/connection/executor.rs b/sqlx-mysql/src/connection/executor.rs index 07748fe691..a6f8c20129 100644 --- a/sqlx-mysql/src/connection/executor.rs +++ b/sqlx-mysql/src/connection/executor.rs @@ -22,8 +22,8 @@ use futures_core::future::BoxFuture; use futures_core::stream::BoxStream; use futures_core::Stream; use futures_util::TryStreamExt; -use std::{borrow::Cow, pin::pin, sync::Arc}; use sqlx_core::column::{ColumnOrigin, TableColumn}; +use std::{borrow::Cow, pin::pin, sync::Arc}; impl MySqlConnection { async fn prepare_statement( diff --git a/sqlx-postgres/src/connection/executor.rs b/sqlx-postgres/src/connection/executor.rs index b6e972c4ee..93cf4ec6bc 100644 --- a/sqlx-postgres/src/connection/executor.rs +++ b/sqlx-postgres/src/connection/executor.rs @@ -182,7 +182,15 @@ impl PgConnection { return Ok((*statement).clone()); } - let statement = prepare(self, sql, parameters, metadata, persistent, fetch_column_origin).await?; + let statement = prepare( + self, + sql, + parameters, + metadata, + persistent, + fetch_column_origin, + ) + .await?; if persistent && self.inner.cache_statement.is_enabled() { if let Some((id, _)) = self.inner.cache_statement.insert(sql, statement.clone()) { From f57ee86c131d4df0bd0fd691e6a564eef6331e23 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Mon, 2 Jun 2025 02:43:30 -0700 Subject: [PATCH 68/78] fix: more example fixes --- examples/postgres/multi-tenant/Cargo.toml | 2 +- examples/postgres/multi-tenant/accounts/Cargo.toml | 6 +++++- examples/postgres/multi-tenant/payments/Cargo.toml | 7 +++++-- examples/postgres/preferred-crates/Cargo.toml | 2 +- 4 files changed, 12 insertions(+), 5 deletions(-) diff --git a/examples/postgres/multi-tenant/Cargo.toml b/examples/postgres/multi-tenant/Cargo.toml index c855403ec3..21bf765f59 100644 --- a/examples/postgres/multi-tenant/Cargo.toml +++ b/examples/postgres/multi-tenant/Cargo.toml @@ -24,7 +24,7 @@ rand = "0.8.5" [dependencies.sqlx] # version = "0.9.0" workspace = true -features = ["runtime-tokio", "postgres"] +features = ["runtime-tokio", "postgres", "migrate"] [dependencies.accounts] path = "accounts" diff --git a/examples/postgres/multi-tenant/accounts/Cargo.toml b/examples/postgres/multi-tenant/accounts/Cargo.toml index 33b185912c..40c365c607 100644 --- a/examples/postgres/multi-tenant/accounts/Cargo.toml +++ b/examples/postgres/multi-tenant/accounts/Cargo.toml @@ -4,7 +4,6 @@ version = "0.1.0" edition = "2021" [dependencies] -sqlx = { workspace = true, features = ["postgres", "time", "uuid", "macros", "sqlx-toml"] } tokio = { version = "1", features = ["rt", "sync"] } argon2 = { version = "0.5.3", features = ["password-hash"] } @@ -18,5 +17,10 @@ time = { version = "0.3.37", features = ["serde"] } serde = { version = "1.0.218", features = ["derive"] } +[dependencies.sqlx] +# version = "0.9.0" +workspace = true +features = ["postgres", "time", "uuid", "macros", "sqlx-toml", "migrate"] + [dev-dependencies] sqlx = { workspace = true, features = ["runtime-tokio"] } diff --git a/examples/postgres/multi-tenant/payments/Cargo.toml b/examples/postgres/multi-tenant/payments/Cargo.toml index 1f7d7c3f75..de15b21828 100644 --- a/examples/postgres/multi-tenant/payments/Cargo.toml +++ b/examples/postgres/multi-tenant/payments/Cargo.toml @@ -5,13 +5,16 @@ edition = "2021" [dependencies] -sqlx = { workspace = true, features = ["postgres", "time", "uuid", "rust_decimal", "sqlx-toml"] } - rust_decimal = "1.36.0" time = "0.3.37" uuid = "1.12.1" +[dependencies.sqlx] +# version = "0.9.0" +workspace = true +features = ["postgres", "time", "uuid", "rust_decimal", "sqlx-toml", "migrate"] + [dependencies.accounts] path = "../accounts" package = "sqlx-example-postgres-multi-tenant-accounts" diff --git a/examples/postgres/preferred-crates/Cargo.toml b/examples/postgres/preferred-crates/Cargo.toml index d01985c705..612b6eb917 100644 --- a/examples/postgres/preferred-crates/Cargo.toml +++ b/examples/postgres/preferred-crates/Cargo.toml @@ -23,7 +23,7 @@ features = ["rt-multi-thread", "macros"] [dependencies.sqlx] # version = "0.9.0" workspace = true -features = ["runtime-tokio", "postgres", "bigdecimal", "chrono", "derive"] +features = ["runtime-tokio", "postgres", "bigdecimal", "chrono", "derive", "migrate"] [dependencies.uses-rust-decimal] path = "uses-rust-decimal" From daca7a0af4c3319e6063ac1becb6ed9290023c38 Mon Sep 17 00:00:00 2001 From: Austin Bonander Date: Mon, 2 Jun 2025 02:53:17 -0700 Subject: [PATCH 69/78] fix(ci): preferred-crates setup --- .github/workflows/examples.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/examples.yml b/.github/workflows/examples.yml index 943e64a101..d1b8ff4634 100644 --- a/.github/workflows/examples.yml +++ b/.github/workflows/examples.yml @@ -211,7 +211,7 @@ jobs: working-directory: examples/postgres/preferred-crates env: DATABASE_URL: postgres://postgres:password@localhost:5432/preferred-crates - run: sqlx migrate run + run: sqlx db setup - name: Multi-Tenant (Run) env: From 756ee0c2dc5509f3c247cd117890636be8bb9f37 Mon Sep 17 00:00:00 2001 From: Daniel Arbuckle Date: Mon, 2 Jun 2025 13:26:28 -0700 Subject: [PATCH 70/78] fix: axum-multi-tenant example locked to specific sqlx version --- examples/postgres/axum-multi-tenant/Cargo.toml | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 examples/postgres/axum-multi-tenant/Cargo.toml diff --git a/examples/postgres/axum-multi-tenant/Cargo.toml b/examples/postgres/axum-multi-tenant/Cargo.toml new file mode 100644 index 0000000000..c35df3575e --- /dev/null +++ b/examples/postgres/axum-multi-tenant/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "multi-tenant" +version.workspace = true +license.workspace = true +edition.workspace = true +repository.workspace = true +keywords.workspace = true +categories.workspace = true +authors.workspace = true + +[dependencies] +accounts = { path = "accounts" } +payments = { path = "payments" } + +sqlx = { path = "../../..", features = ["runtime-tokio", "postgres"] } + +[lints] +workspace = true From 1956256aa6ea3e554368f87fa355f40384587d08 Mon Sep 17 00:00:00 2001 From: Daniel Arbuckle Date: Mon, 2 Jun 2025 13:37:52 -0700 Subject: [PATCH 71/78] import anyhow::Context trait in sqlx-cli/src/lib.rs since it was being used and causing a compile error --- sqlx-cli/src/lib.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/sqlx-cli/src/lib.rs b/sqlx-cli/src/lib.rs index 189b4df80e..c9f6ff4f18 100644 --- a/sqlx-cli/src/lib.rs +++ b/sqlx-cli/src/lib.rs @@ -6,6 +6,7 @@ use futures::{Future, TryFutureExt}; use sqlx::{AnyConnection, Connection}; use tokio::{select, signal}; +use anyhow::Context; use crate::opt::{Command, ConnectOpts, DatabaseCommand, MigrateCommand}; From 94c93d8407d2cbedc54d68e74c6b2a8adc03aa45 Mon Sep 17 00:00:00 2001 From: Daniel Arbuckle Date: Tue, 1 Jul 2025 12:49:28 -0700 Subject: [PATCH 72/78] rebased on upstream/main --- Cargo.lock | 297 +++--------------- Cargo.toml | 4 +- .../postgres/axum-multi-tenant/Cargo.toml | 18 -- examples/postgres/multi-database/Cargo.toml | 4 +- examples/postgres/multi-database/README.md | 2 +- examples/postgres/multi-tenant/Cargo.toml | 4 +- examples/postgres/multi-tenant/README.md | 2 +- examples/postgres/preferred-crates/Cargo.toml | 2 +- .../postgres/preferred-crates/src/main.rs | 6 +- .../uses-rust-decimal/src/lib.rs | 20 +- .../preferred-crates/uses-time/src/lib.rs | 7 +- examples/sqlite/extension/Cargo.toml | 2 +- examples/sqlite/extension/sqlx.toml | 4 +- examples/sqlite/extension/src/main.rs | 6 + sqlx-cli/src/lib.rs | 60 +++- sqlx-cli/src/migrate.rs | 8 +- sqlx-cli/src/opt.rs | 9 +- sqlx-core/src/any/connection/mod.rs | 5 +- sqlx-core/src/any/options.rs | 10 +- sqlx-core/src/config/common.rs | 6 - sqlx-core/src/config/macros.rs | 2 + sqlx-core/src/config/mod.rs | 96 ++---- sqlx-macros-core/Cargo.toml | 2 +- sqlx-macros-core/src/migrate.rs | 6 +- sqlx-macros-core/src/query/args.rs | 11 +- sqlx-macros-core/src/query/mod.rs | 8 +- sqlx-macros-core/src/test_attr.rs | 6 +- sqlx-postgres/src/connection/describe.rs | 144 --------- sqlx-sqlite/Cargo.toml | 2 + sqlx-sqlite/src/any.rs | 11 +- sqlx-sqlite/src/connection/describe.rs | 4 - sqlx-sqlite/src/lib.rs | 10 +- src/lib.rs | 3 +- 33 files changed, 199 insertions(+), 582 deletions(-) delete mode 100644 examples/postgres/axum-multi-tenant/Cargo.toml diff --git a/Cargo.lock b/Cargo.lock index 045b406515..fe56e255f9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,17 +2,6 @@ # It is not intended for manual editing. version = 4 -[[package]] -name = "accounts" -version = "0.1.0" -dependencies = [ - "argon2 0.5.3", - "sqlx", - "thiserror 1.0.69", - "tokio", - "uuid", -] - [[package]] name = "addr2line" version = "0.21.0" @@ -392,16 +381,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "acee9fd5073ab6b045a275b3e709c163dd36c90685219cb21804a147b58dba43" dependencies = [ "async-trait", - "axum-core 0.2.9", - "axum-macros 0.2.3", + "axum-core", + "axum-macros", "bitflags 1.3.2", "bytes", "futures-util", - "http 0.2.12", - "http-body 0.4.6", - "hyper 0.14.32", + "http", + "http-body", + "hyper", "itoa", - "matchit 0.5.0", + "matchit", "memchr", "mime", "percent-encoding", @@ -409,49 +398,14 @@ dependencies = [ "serde", "serde_json", "serde_urlencoded", - "sync_wrapper 0.1.2", + "sync_wrapper", "tokio", - "tower 0.4.13", + "tower", "tower-http", "tower-layer", "tower-service", ] -[[package]] -name = "axum" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d6fd624c75e18b3b4c6b9caf42b1afe24437daaee904069137d8bab077be8b8" -dependencies = [ - "axum-core 0.5.0", - "axum-macros 0.5.0", - "bytes", - "form_urlencoded", - "futures-util", - "http 1.2.0", - "http-body 1.0.1", - "http-body-util", - "hyper 1.6.0", - "hyper-util", - "itoa", - "matchit 0.8.4", - "memchr", - "mime", - "percent-encoding", - "pin-project-lite", - "rustversion", - "serde", - "serde_json", - "serde_path_to_error", - "serde_urlencoded", - "sync_wrapper 1.0.2", - "tokio", - "tower 0.5.2", - "tower-layer", - "tower-service", - "tracing", -] - [[package]] name = "axum-core" version = "0.2.9" @@ -461,31 +415,11 @@ dependencies = [ "async-trait", "bytes", "futures-util", - "http 0.2.12", - "http-body 0.4.6", - "mime", - "tower-layer", - "tower-service", -] - -[[package]] -name = "axum-core" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df1362f362fd16024ae199c1970ce98f9661bf5ef94b9808fee734bc3698b733" -dependencies = [ - "bytes", - "futures-util", - "http 1.2.0", - "http-body 1.0.1", - "http-body-util", + "http", + "http-body", "mime", - "pin-project-lite", - "rustversion", - "sync_wrapper 1.0.2", "tower-layer", "tower-service", - "tracing", ] [[package]] @@ -500,17 +434,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "axum-macros" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "604fde5e028fea851ce1d8570bbdc034bec850d157f7569d10f347d06808c05c" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.96", -] - [[package]] name = "backoff" version = "0.4.0" @@ -1356,16 +1279,6 @@ dependencies = [ "windows-sys 0.59.0", ] -[[package]] -name = "error-code" -version = "2.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64f18991e7bf11e7ffee451b5318b5c1a73c52d0d0ada6e5a3017c8c1ced6a21" -dependencies = [ - "libc", - "str-buf", -] - [[package]] name = "etcetera" version = "0.10.0" @@ -1793,17 +1706,6 @@ dependencies = [ "itoa", ] -[[package]] -name = "http" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea" -dependencies = [ - "bytes", - "fnv", - "itoa", -] - [[package]] name = "http-body" version = "0.4.6" @@ -1811,30 +1713,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ "bytes", - "http 0.2.12", - "pin-project-lite", -] - -[[package]] -name = "http-body" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" -dependencies = [ - "bytes", - "http 1.2.0", -] - -[[package]] -name = "http-body-util" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" -dependencies = [ - "bytes", - "futures-util", - "http 1.2.0", - "http-body 1.0.1", + "http", "pin-project-lite", ] @@ -1872,8 +1751,8 @@ dependencies = [ "futures-channel", "futures-core", "futures-util", - "http 0.2.12", - "http-body 0.4.6", + "http", + "http-body", "httparse", "httpdate", "itoa", @@ -1885,41 +1764,6 @@ dependencies = [ "want", ] -[[package]] -name = "hyper" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80" -dependencies = [ - "bytes", - "futures-channel", - "futures-util", - "http 1.2.0", - "http-body 1.0.1", - "httparse", - "httpdate", - "itoa", - "pin-project-lite", - "smallvec", - "tokio", -] - -[[package]] -name = "hyper-util" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" -dependencies = [ - "bytes", - "futures-util", - "http 1.2.0", - "http-body 1.0.1", - "hyper 1.6.0", - "pin-project-lite", - "tokio", - "tower-service", -] - [[package]] name = "iana-time-zone" version = "0.1.61" @@ -2372,12 +2216,6 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "73cbba799671b762df5a175adf59ce145165747bb891505c43d09aefbbf38beb" -[[package]] -name = "matchit" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3" - [[package]] name = "md-5" version = "0.10.6" @@ -2474,15 +2312,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "multi-tenant" -version = "0.8.3" -dependencies = [ - "accounts", - "payments", - "sqlx", -] - [[package]] name = "native-tls" version = "0.2.12" @@ -2746,13 +2575,6 @@ version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" -[[package]] -name = "payments" -version = "0.1.0" -dependencies = [ - "sqlx", -] - [[package]] name = "pem-rfc7468" version = "0.7.0" @@ -2969,7 +2791,7 @@ version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ecf48c7ca261d60b74ab1a7b20da18bede46776b2e55535cb958eb595c5fa7b" dependencies = [ - "toml_edit 0.21.1", + "toml_edit", ] [[package]] @@ -3286,6 +3108,19 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "rustix" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d97817398dd4bb2e6da002002db259209759911da105da92bec29ccb12cf58bf" +dependencies = [ + "bitflags 2.7.0", + "errno", + "libc", + "linux-raw-sys 0.9.4", + "windows-sys 0.59.0", +] + [[package]] name = "rustls" version = "0.23.21" @@ -3459,16 +3294,6 @@ dependencies = [ "serde", ] -[[package]] -name = "serde_path_to_error" -version = "0.1.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af99884400da37c88f5e9146b7f1fd0fbcae8f6eec4e9da38b67d05486f814a6" -dependencies = [ - "itoa", - "serde", -] - [[package]] name = "serde_spanned" version = "0.6.8" @@ -3741,6 +3566,7 @@ dependencies = [ "hashbrown 0.15.2", "hashlink", "indexmap 2.7.0", + "ipnet", "ipnetwork", "log", "mac_address", @@ -3783,7 +3609,7 @@ version = "0.1.0" dependencies = [ "anyhow", "argon2 0.4.1", - "axum 0.5.17", + "axum", "dotenvy", "rand", "regex", @@ -3794,7 +3620,7 @@ dependencies = [ "thiserror 2.0.11", "time", "tokio", - "tower 0.4.13", + "tower", "tracing", "uuid", "validator", @@ -3860,7 +3686,6 @@ dependencies = [ name = "sqlx-example-postgres-multi-database" version = "0.9.0-alpha.1" dependencies = [ - "axum 0.8.1", "color-eyre", "dotenvy", "rand", @@ -3902,7 +3727,6 @@ dependencies = [ name = "sqlx-example-postgres-multi-tenant" version = "0.9.0-alpha.1" dependencies = [ - "axum 0.8.1", "color-eyre", "dotenvy", "rand", @@ -3994,6 +3818,15 @@ dependencies = [ "tokio", ] +[[package]] +name = "sqlx-example-sqlite-extension" +version = "0.1.0" +dependencies = [ + "anyhow", + "sqlx", + "tokio", +] + [[package]] name = "sqlx-example-sqlite-todos" version = "0.1.0" @@ -4338,12 +4171,6 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" -[[package]] -name = "sync_wrapper" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" - [[package]] name = "synstructure" version = "0.13.1" @@ -4571,7 +4398,7 @@ dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit 0.22.22", + "toml_edit", ] [[package]] @@ -4583,17 +4410,6 @@ dependencies = [ "serde", ] -[[package]] -name = "toml_edit" -version = "0.22.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" -dependencies = [ - "indexmap 2.7.0", - "toml_datetime", - "winnow 0.5.40", -] - [[package]] name = "toml_edit" version = "0.22.22" @@ -4604,7 +4420,7 @@ dependencies = [ "serde", "serde_spanned", "toml_datetime", - "winnow 0.6.22", + "winnow", ] [[package]] @@ -4623,22 +4439,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "tower" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" -dependencies = [ - "futures-core", - "futures-util", - "pin-project-lite", - "sync_wrapper 1.0.2", - "tokio", - "tower-layer", - "tower-service", - "tracing", -] - [[package]] name = "tower-http" version = "0.3.5" @@ -4649,11 +4449,11 @@ dependencies = [ "bytes", "futures-core", "futures-util", - "http 0.2.12", - "http-body 0.4.6", + "http", + "http-body", "http-range-header", "pin-project-lite", - "tower 0.4.13", + "tower", "tower-layer", "tower-service", ] @@ -5318,15 +5118,6 @@ dependencies = [ "memchr", ] -[[package]] -name = "winnow" -version = "0.6.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39281189af81c07ec09db316b302a3e67bf9bd7cbf6c820b50e35fee9c2fa980" -dependencies = [ - "memchr", -] - [[package]] name = "write16" version = "1.0.0" diff --git a/Cargo.toml b/Cargo.toml index 216a79e0ab..3521ee1a1b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -59,14 +59,14 @@ features = ["all-databases", "_unstable-all-types", "_unstable-doc", "sqlite-pre rustdoc-args = ["--cfg", "docsrs"] [features] -default = ["any", "macros", "migrate", "json", "sqlx-toml"] +default = ["any", "macros", "migrate", "json"] derive = ["sqlx-macros/derive"] macros = ["derive", "sqlx-macros/macros"] migrate = ["sqlx-core/migrate", "sqlx-macros?/migrate", "sqlx-mysql?/migrate", "sqlx-postgres?/migrate", "sqlx-sqlite?/migrate"] # Enable parsing of `sqlx.toml` for configuring macros and migrations. -sqlx-toml = ["sqlx-core/sqlx-toml", "sqlx-macros?/sqlx-toml"] +sqlx-toml = ["sqlx-core/sqlx-toml", "sqlx-macros?/sqlx-toml", "sqlx-sqlite?/sqlx-toml"] # intended mainly for CI and docs all-databases = ["mysql", "sqlite", "postgres", "any"] diff --git a/examples/postgres/axum-multi-tenant/Cargo.toml b/examples/postgres/axum-multi-tenant/Cargo.toml deleted file mode 100644 index c35df3575e..0000000000 --- a/examples/postgres/axum-multi-tenant/Cargo.toml +++ /dev/null @@ -1,18 +0,0 @@ -[package] -name = "multi-tenant" -version.workspace = true -license.workspace = true -edition.workspace = true -repository.workspace = true -keywords.workspace = true -categories.workspace = true -authors.workspace = true - -[dependencies] -accounts = { path = "accounts" } -payments = { path = "payments" } - -sqlx = { path = "../../..", features = ["runtime-tokio", "postgres"] } - -[lints] -workspace = true diff --git a/examples/postgres/multi-database/Cargo.toml b/examples/postgres/multi-database/Cargo.toml index c4aeccd741..c5e01621b8 100644 --- a/examples/postgres/multi-database/Cargo.toml +++ b/examples/postgres/multi-database/Cargo.toml @@ -11,8 +11,6 @@ authors.workspace = true [dependencies] tokio = { version = "1", features = ["rt-multi-thread", "macros"] } -axum = { version = "0.8.1", features = ["macros"] } - color-eyre = "0.6.3" dotenvy = "0.15.7" tracing-subscriber = "0.3.19" @@ -24,7 +22,7 @@ rand = "0.8.5" [dependencies.sqlx] # version = "0.9.0" workspace = true -features = ["runtime-tokio", "postgres", "migrate"] +features = ["runtime-tokio", "postgres", "migrate", "sqlx-toml"] [dependencies.accounts] path = "accounts" diff --git a/examples/postgres/multi-database/README.md b/examples/postgres/multi-database/README.md index 126565e952..c7804f90d1 100644 --- a/examples/postgres/multi-database/README.md +++ b/examples/postgres/multi-database/README.md @@ -1,4 +1,4 @@ -# Axum App with multi-database Database +# Using Multiple Databases with `sqlx.toml` This example project involves three crates, each owning a different schema in one database, with their own set of migrations. diff --git a/examples/postgres/multi-tenant/Cargo.toml b/examples/postgres/multi-tenant/Cargo.toml index 21bf765f59..a219cce2b8 100644 --- a/examples/postgres/multi-tenant/Cargo.toml +++ b/examples/postgres/multi-tenant/Cargo.toml @@ -11,8 +11,6 @@ authors.workspace = true [dependencies] tokio = { version = "1", features = ["rt-multi-thread", "macros"] } -axum = { version = "0.8.1", features = ["macros"] } - color-eyre = "0.6.3" dotenvy = "0.15.7" tracing-subscriber = "0.3.19" @@ -24,7 +22,7 @@ rand = "0.8.5" [dependencies.sqlx] # version = "0.9.0" workspace = true -features = ["runtime-tokio", "postgres", "migrate"] +features = ["runtime-tokio", "postgres", "migrate", "sqlx-toml"] [dependencies.accounts] path = "accounts" diff --git a/examples/postgres/multi-tenant/README.md b/examples/postgres/multi-tenant/README.md index 3688202690..01848a3f83 100644 --- a/examples/postgres/multi-tenant/README.md +++ b/examples/postgres/multi-tenant/README.md @@ -1,4 +1,4 @@ -# Axum App with Multi-tenant Database +# Multi-tenant Databases with `sqlx.toml` This example project involves three crates, each owning a different schema in one database, with their own set of migrations. diff --git a/examples/postgres/preferred-crates/Cargo.toml b/examples/postgres/preferred-crates/Cargo.toml index 612b6eb917..cf6b0aca1d 100644 --- a/examples/postgres/preferred-crates/Cargo.toml +++ b/examples/postgres/preferred-crates/Cargo.toml @@ -23,7 +23,7 @@ features = ["rt-multi-thread", "macros"] [dependencies.sqlx] # version = "0.9.0" workspace = true -features = ["runtime-tokio", "postgres", "bigdecimal", "chrono", "derive", "migrate"] +features = ["runtime-tokio", "postgres", "bigdecimal", "chrono", "derive", "migrate", "sqlx-toml"] [dependencies.uses-rust-decimal] path = "uses-rust-decimal" diff --git a/examples/postgres/preferred-crates/src/main.rs b/examples/postgres/preferred-crates/src/main.rs index df595eb885..5d6e4dc9b8 100644 --- a/examples/postgres/preferred-crates/src/main.rs +++ b/examples/postgres/preferred-crates/src/main.rs @@ -34,7 +34,7 @@ async fn main() -> anyhow::Result<()> { uses_rust_decimal::create_table(&mut conn).await?; uses_time::create_table(&mut conn).await?; - let user_id = sqlx::query!( + let user_id = sqlx::query_scalar!( "insert into users(username, password_hash) values($1, $2) returning id", "user_foo", "", @@ -46,6 +46,8 @@ async fn main() -> anyhow::Result<()> { .fetch_one(&mut conn) .await?; + println!("Created user: {user:?}"); + let session = uses_time::create_session(&mut conn, SessionData { user_id }, SESSION_DURATION).await?; @@ -62,5 +64,7 @@ async fn main() -> anyhow::Result<()> { .await? .expect("expected purchase"); + println!("Created purchase: {purchase:?}"); + Ok(()) } diff --git a/examples/postgres/preferred-crates/uses-rust-decimal/src/lib.rs b/examples/postgres/preferred-crates/uses-rust-decimal/src/lib.rs index 8f062b52aa..f955b737d1 100644 --- a/examples/postgres/preferred-crates/uses-rust-decimal/src/lib.rs +++ b/examples/postgres/preferred-crates/uses-rust-decimal/src/lib.rs @@ -1,8 +1,8 @@ use chrono::{DateTime, Utc}; use sqlx::PgExecutor; -#[derive(sqlx::FromRow)] -struct Purchase { +#[derive(sqlx::FromRow, Debug)] +pub struct Purchase { pub id: Uuid, pub user_id: Uuid, pub amount: Decimal, @@ -21,7 +21,7 @@ pub async fn create_table(e: impl PgExecutor<'_>) -> sqlx::Result<()> { user_id uuid not null, \ amount numeric not null check(amount > 0), \ description text not null, \ - created_at timestamptz not null \ + created_at timestamptz not null default now() \ ); ", ) @@ -37,12 +37,14 @@ pub async fn create_purchase( amount: Decimal, description: &str, ) -> sqlx::Result { - sqlx::query_scalar("insert into purchases(user_id, amount, description) values ($1, $2, $3)") - .bind(user_id) - .bind(amount) - .bind(description) - .fetch_one(e) - .await + sqlx::query_scalar( + "insert into purchases(user_id, amount, description) values ($1, $2, $3) returning id", + ) + .bind(user_id) + .bind(amount) + .bind(description) + .fetch_one(e) + .await } pub async fn get_purchase(e: impl PgExecutor<'_>, id: Uuid) -> sqlx::Result> { diff --git a/examples/postgres/preferred-crates/uses-time/src/lib.rs b/examples/postgres/preferred-crates/uses-time/src/lib.rs index 6c9dbee82e..4fb3377880 100644 --- a/examples/postgres/preferred-crates/uses-time/src/lib.rs +++ b/examples/postgres/preferred-crates/uses-time/src/lib.rs @@ -37,7 +37,12 @@ pub async fn create_session( data: D, valid_duration: Duration, ) -> sqlx::Result> { - let created_at = OffsetDateTime::now_utc(); + // Round down to the nearest second because + // Postgres doesn't support precision higher than 1 microsecond anyway. + let created_at = OffsetDateTime::now_utc() + .replace_nanosecond(0) + .expect("0 nanoseconds should be in range"); + let expires_at = created_at + valid_duration; let id: Uuid = sqlx::query_scalar( diff --git a/examples/sqlite/extension/Cargo.toml b/examples/sqlite/extension/Cargo.toml index bf20add4b3..fa2042e343 100644 --- a/examples/sqlite/extension/Cargo.toml +++ b/examples/sqlite/extension/Cargo.toml @@ -9,7 +9,7 @@ categories.workspace = true authors.workspace = true [dependencies] -sqlx = { path = "../../../", features = [ "sqlite", "runtime-tokio", "tls-native-tls" ] } +sqlx = { path = "../../../", features = [ "sqlite", "runtime-tokio", "tls-native-tls", "sqlx-toml"] } tokio = { version = "1.20.0", features = ["rt", "macros"]} anyhow = "1.0" diff --git a/examples/sqlite/extension/sqlx.toml b/examples/sqlite/extension/sqlx.toml index 77f844642f..fbc5884390 100644 --- a/examples/sqlite/extension/sqlx.toml +++ b/examples/sqlite/extension/sqlx.toml @@ -2,11 +2,11 @@ # Including the full path to the extension is somewhat unusual, # because normally an extension will be installed in a standard # directory which is part of the library search path. If that were the -# case here, the load-extensions value could just be `["ipaddr"]` +# case here, the load_extensions value could just be `["ipaddr"]` # # When the extension file is installed in a non-standard location, as # in this example, there are two options: # * Provide the full path the the extension, as seen below. # * Add the non-standard location to the library search path, which on # Linux means adding it to the LD_LIBRARY_PATH environment variable. -load-extensions = ["/tmp/sqlite3-lib/ipaddr"] \ No newline at end of file +load_extensions = ["/tmp/sqlite3-lib/ipaddr"] \ No newline at end of file diff --git a/examples/sqlite/extension/src/main.rs b/examples/sqlite/extension/src/main.rs index e171e9a6d0..7ee852e6af 100644 --- a/examples/sqlite/extension/src/main.rs +++ b/examples/sqlite/extension/src/main.rs @@ -25,6 +25,12 @@ async fn main() -> anyhow::Result<()> { let db = SqlitePool::connect_with(opts).await?; + // We're not running the migrations here, for the sake of brevity + // and to confirm that the needed extension was loaded during the + // CLI migrate operation. It would not be unusual to run the + // migrations here as well, though, using the database connection + // we just configured. + query!("insert into addresses (address, family) values (?1, ipfamily(?1))", "10.0.0.10").execute(&db).await?; println!("Query which requires the extension was successfully executed."); diff --git a/sqlx-cli/src/lib.rs b/sqlx-cli/src/lib.rs index c9f6ff4f18..b1d7ececac 100644 --- a/sqlx-cli/src/lib.rs +++ b/sqlx-cli/src/lib.rs @@ -2,11 +2,10 @@ use std::future::Future; use std::io; use std::time::Duration; -use futures::{Future, TryFutureExt}; +use futures_util::TryFutureExt; use sqlx::{AnyConnection, Connection}; use tokio::{select, signal}; -use anyhow::Context; use crate::opt::{Command, ConnectOpts, DatabaseCommand, MigrateCommand}; @@ -67,10 +66,10 @@ async fn do_run(opt: Opt) -> anyhow::Result<()> { } => { let config = config.load_config().await?; - connect_opts.populate_db_url(config)?; + connect_opts.populate_db_url(&config)?; migrate::run( - config, + &config, &source, &connect_opts, dry_run, @@ -89,10 +88,10 @@ async fn do_run(opt: Opt) -> anyhow::Result<()> { } => { let config = config.load_config().await?; - connect_opts.populate_db_url(config)?; + connect_opts.populate_db_url(&config)?; migrate::revert( - config, + &config, &source, &connect_opts, dry_run, @@ -108,9 +107,9 @@ async fn do_run(opt: Opt) -> anyhow::Result<()> { } => { let config = config.load_config().await?; - connect_opts.populate_db_url(config)?; + connect_opts.populate_db_url(&config)?; - migrate::info(config, &source, &connect_opts).await? + migrate::info(&config, &source, &connect_opts).await? } MigrateCommand::BuildScript { source, @@ -119,7 +118,7 @@ async fn do_run(opt: Opt) -> anyhow::Result<()> { } => { let config = config.load_config().await?; - migrate::build_script(config, &source, force)? + migrate::build_script(&config, &source, force)? } }, @@ -130,7 +129,7 @@ async fn do_run(opt: Opt) -> anyhow::Result<()> { } => { let config = config.load_config().await?; - connect_opts.populate_db_url(config)?; + connect_opts.populate_db_url(&config)?; database::create(&connect_opts).await? } DatabaseCommand::Drop { @@ -141,7 +140,7 @@ async fn do_run(opt: Opt) -> anyhow::Result<()> { } => { let config = config.load_config().await?; - connect_opts.populate_db_url(config)?; + connect_opts.populate_db_url(&config)?; database::drop(&connect_opts, !confirmation.yes, force).await? } DatabaseCommand::Reset { @@ -153,8 +152,8 @@ async fn do_run(opt: Opt) -> anyhow::Result<()> { } => { let config = config.load_config().await?; - connect_opts.populate_db_url(config)?; - database::reset(config, &source, &connect_opts, !confirmation.yes, force).await? + connect_opts.populate_db_url(&config)?; + database::reset(&config, &source, &connect_opts, !confirmation.yes, force).await? } DatabaseCommand::Setup { source, @@ -163,8 +162,8 @@ async fn do_run(opt: Opt) -> anyhow::Result<()> { } => { let config = config.load_config().await?; - connect_opts.populate_db_url(config)?; - database::setup(config, &source, &connect_opts).await? + connect_opts.populate_db_url(&config)?; + database::setup(&config, &source, &connect_opts).await? } }, @@ -177,7 +176,7 @@ async fn do_run(opt: Opt) -> anyhow::Result<()> { config, } => { let config = config.load_config().await?; - connect_opts.populate_db_url(config)?; + connect_opts.populate_db_url(&config)?; prepare::run(check, all, workspace, connect_opts, args).await? } @@ -189,8 +188,35 @@ async fn do_run(opt: Opt) -> anyhow::Result<()> { } /// Attempt to connect to the database server, retrying up to `ops.connect_timeout`. +#[cfg(feature = "sqlx-toml")] async fn connect(opts: &ConnectOpts) -> anyhow::Result { - retry_connect_errors(opts, AnyConnection::connect_with_config).await + retry_connect_errors( + opts, + move |url| { + // This only handles the default case. For good support of + // the new command line options, we need to work out some + // way to make the appropriate ConfigOpt available here. I + // suspect that that infrastructure would be useful for + // other things in the future, as well, but it also seems + // like an extensive and intrusive change. + // + // On the other hand, the compile-time checking macros + // can't be configured to use a different config file at + // all, so I believe this is okay for the time being. + let config = Some(std::path::PathBuf::from("sqlx.toml")) + .and_then(|p| if p.exists() {Some(p)} else {None}); + + async move { + AnyConnection::connect_with_config(url, config.clone()).await + } + } + ).await +} + +/// Attempt to connect to the database server, retrying up to `ops.connect_timeout`. +#[cfg(not(feature = "sqlx-toml"))] +async fn connect(opts: &ConnectOpts) -> anyhow::Result { + retry_connect_errors(opts, AnyConnection::connect).await } /// Attempt an operation that may return errors like `ConnectionRefused`, diff --git a/sqlx-cli/src/migrate.rs b/sqlx-cli/src/migrate.rs index 45a38b202a..6d32c9e846 100644 --- a/sqlx-cli/src/migrate.rs +++ b/sqlx-cli/src/migrate.rs @@ -14,15 +14,15 @@ use std::time::Duration; pub async fn add(opts: AddMigrationOpts) -> anyhow::Result<()> { let config = opts.config.load_config().await?; - let source = opts.source.resolve_path(config); + let source = opts.source.resolve_path(&config); fs::create_dir_all(source).context("Unable to create migrations directory")?; - let migrator = opts.source.resolve(config).await?; + let migrator = opts.source.resolve(&config).await?; - let version_prefix = opts.version_prefix(config, &migrator); + let version_prefix = opts.version_prefix(&config, &migrator); - if opts.reversible(config, &migrator) { + if opts.reversible(&config, &migrator) { create_file( source, &version_prefix, diff --git a/sqlx-cli/src/opt.rs b/sqlx-cli/src/opt.rs index 272c343c50..cb09bc2ff5 100644 --- a/sqlx-cli/src/opt.rs +++ b/sqlx-cli/src/opt.rs @@ -455,22 +455,23 @@ impl ConnectOpts { } impl ConfigOpt { - pub async fn load_config(&self) -> anyhow::Result<&'static Config> { + pub async fn load_config(&self) -> anyhow::Result { let path = self.config.clone(); // Tokio does file I/O on a background task anyway tokio::task::spawn_blocking(|| { if let Some(path) = path { let err_str = format!("error reading config from {path:?}"); - Config::try_read_with(|| Ok(path)).context(err_str) + Config::try_from_path(path).context(err_str) } else { let path = PathBuf::from("sqlx.toml"); if path.exists() { eprintln!("Found `sqlx.toml` in current directory; reading..."); + Ok(Config::try_from_path(path)?) + } else { + Ok(Config::default()) } - - Ok(Config::read_with_or_default(move || Ok(path))) } }) .await diff --git a/sqlx-core/src/any/connection/mod.rs b/sqlx-core/src/any/connection/mod.rs index 509e8f5e93..fc0dcd2323 100644 --- a/sqlx-core/src/any/connection/mod.rs +++ b/sqlx-core/src/any/connection/mod.rs @@ -44,14 +44,15 @@ impl AnyConnection { /// /// Connect to the database, and instruct the nested driver to /// read options from the sqlx.toml file as appropriate. + #[cfg(feature = "sqlx-toml")] #[doc(hidden)] - pub fn connect_with_config(url: &str) -> BoxFuture<'static, Result> + pub fn connect_with_config(url: &str, path: Option) -> BoxFuture<'static, Result> where Self: Sized, { let options: Result = url.parse(); - Box::pin(async move { Self::connect_with(&options?.allow_config_file()).await }) + Box::pin(async move { Self::connect_with(&options?.with_config_file(path)).await }) } pub(crate) fn connect_with_db( diff --git a/sqlx-core/src/any/options.rs b/sqlx-core/src/any/options.rs index 5ed68efec5..66a35eb9b3 100644 --- a/sqlx-core/src/any/options.rs +++ b/sqlx-core/src/any/options.rs @@ -19,7 +19,7 @@ use url::Url; pub struct AnyConnectOptions { pub database_url: Url, pub log_settings: LogSettings, - pub enable_config: bool, + pub enable_config: Option, } impl FromStr for AnyConnectOptions { type Err = Error; @@ -30,7 +30,7 @@ impl FromStr for AnyConnectOptions { .parse::() .map_err(|e| Error::Configuration(e.into()))?, log_settings: LogSettings::default(), - enable_config: false, + enable_config: None, }) } } @@ -42,7 +42,7 @@ impl ConnectOptions for AnyConnectOptions { Ok(AnyConnectOptions { database_url: url.clone(), log_settings: LogSettings::default(), - enable_config: false, + enable_config: None, }) } @@ -73,8 +73,8 @@ impl AnyConnectOptions { /// Allow nested drivers to extract configuration information from /// the sqlx.toml file. #[doc(hidden)] - pub fn allow_config_file(mut self) -> Self { - self.enable_config = true; + pub fn with_config_file(mut self, path: Option>) -> Self { + self.enable_config = path.map(|p| p.into()); self } } diff --git a/sqlx-core/src/config/common.rs b/sqlx-core/src/config/common.rs index 45d3e689ee..d270026d89 100644 --- a/sqlx-core/src/config/common.rs +++ b/sqlx-core/src/config/common.rs @@ -76,9 +76,3 @@ pub struct SQLite { /// ``` pub load_extensions: Vec, } - -impl Config { - pub fn database_url_var(&self) -> &str { - self.database_url_var.as_deref().unwrap_or("DATABASE_URL") - } -} diff --git a/sqlx-core/src/config/macros.rs b/sqlx-core/src/config/macros.rs index 9acabf2d6a..6d08aa3ec2 100644 --- a/sqlx-core/src/config/macros.rs +++ b/sqlx-core/src/config/macros.rs @@ -1,6 +1,8 @@ use std::collections::BTreeMap; /// Configuration for the `query!()` family of macros. +/// +/// See also [`common::Config`][crate::config::common::Config] for renaming `DATABASE_URL`. #[derive(Debug, Default)] #[cfg_attr( feature = "sqlx-toml", diff --git a/sqlx-core/src/config/mod.rs b/sqlx-core/src/config/mod.rs index 5801af888c..40feb007fd 100644 --- a/sqlx-core/src/config/mod.rs +++ b/sqlx-core/src/config/mod.rs @@ -3,6 +3,10 @@ //! To use, create a `sqlx.toml` file in your crate root (the same directory as your `Cargo.toml`). //! The configuration in a `sqlx.toml` configures SQLx *only* for the current crate. //! +//! Requires the `sqlx-toml` feature (not enabled by default). +//! +//! `sqlx-cli` will also read `sqlx.toml` when running migrations. +//! //! See the [`Config`] type and its fields for individual configuration options. //! //! See the [reference][`_reference`] for the full `sqlx.toml` file. @@ -12,10 +16,6 @@ use std::fmt::Debug; use std::io; use std::path::{Path, PathBuf}; -// `std::sync::OnceLock` doesn't have a stable `.get_or_try_init()` -// because it's blocked on a stable `Try` trait. -use once_cell::sync::OnceCell; - /// Configuration shared by multiple components. /// /// See [`common::Config`] for details. @@ -120,11 +120,14 @@ impl ConfigError { /// Create a [`ConfigError`] from a [`std::io::Error`]. /// /// Maps to either `NotFound` or `Io`. - pub fn from_io(path: PathBuf, error: io::Error) -> Self { + pub fn from_io(path: impl Into, error: io::Error) -> Self { if error.kind() == io::ErrorKind::NotFound { - Self::NotFound { path } + Self::NotFound { path: path.into() } } else { - Self::Io { path, error } + Self::Io { + path: path.into(), + error, + } } } @@ -138,86 +141,33 @@ impl ConfigError { } } -static CACHE: OnceCell = OnceCell::new(); - /// Internal methods for loading a `Config`. #[allow(clippy::result_large_err)] impl Config { - /// Get the cached config, or attempt to read `$CARGO_MANIFEST_DIR/sqlx.toml`. - /// - /// On success, the config is cached in a `static` and returned by future calls. - /// - /// Returns `Config::default()` if the file does not exist. - /// - /// ### Panics - /// If the file exists but an unrecoverable error was encountered while parsing it. - pub fn from_crate() -> &'static Self { - Self::read_with_or_default(get_crate_path) - } - - /// Get the cached config, or to read `$CARGO_MANIFEST_DIR/sqlx.toml`. + /// Get the cached config, or read `$CARGO_MANIFEST_DIR/sqlx.toml`. /// /// On success, the config is cached in a `static` and returned by future calls. /// /// Errors if `CARGO_MANIFEST_DIR` is not set, or if the config file could not be read. - pub fn try_from_crate() -> Result<&'static Self, ConfigError> { - Self::try_read_with(get_crate_path) - } - - /// Get the cached config, or attempt to read `sqlx.toml` from the current working directory. /// - /// On success, the config is cached in a `static` and returned by future calls. - /// - /// Errors if the config file does not exist, or could not be read. - pub fn try_from_current_dir() -> Result<&'static Self, ConfigError> { - Self::try_read_with(|| Ok("sqlx.toml".into())) - } - - /// Get the cached config, or attempt to read it from the path returned by the closure. - /// - /// On success, the config is cached in a `static` and returned by future calls. - /// - /// Errors if the config file does not exist, or could not be read. - pub fn try_read_with( - make_path: impl FnOnce() -> Result, - ) -> Result<&'static Self, ConfigError> { - CACHE.get_or_try_init(|| { - let path = make_path()?; - Self::read_from(path) + /// If the file does not exist, the cache is populated with `Config::default()`. + pub fn try_from_crate_or_default() -> Result { + Self::read_from(get_crate_path()?).or_else(|e| { + if let ConfigError::NotFound { .. } = e { + Ok(Config::default()) + } else { + Err(e) + } }) } - /// Get the cached config, or attempt to read it from the path returned by the closure. + /// Get the cached config, or attempt to read it from the path given. /// /// On success, the config is cached in a `static` and returned by future calls. /// - /// Returns `Config::default()` if the file does not exist. - pub fn read_with_or_default( - make_path: impl FnOnce() -> Result, - ) -> &'static Self { - CACHE.get_or_init(|| { - match make_path().and_then(Self::read_from) { - Ok(config) => config, - Err(ConfigError::NotFound { path }) => { - // Non-fatal - tracing::debug!("Not reading config, file {path:?} not found"); - Config::default() - } - // FATAL ERRORS BELOW: - // In the case of migrations, - // we can't proceed with defaults as they may be completely wrong. - Err(e @ ConfigError::ParseDisabled { .. }) => { - // Only returned if the file exists but the feature is not enabled. - panic!("{e}") - } - Err(ConfigError::Parse { error, path }) => { - panic!("error parsing sqlx config {path:?}: {error}") - } - Err(e) => { - panic!("failed to read sqlx config: {e}") - } - } - }) + /// Errors if the config file does not exist, or could not be read. + pub fn try_from_path(path: PathBuf) -> Result { + Self::read_from(path) } #[cfg(feature = "sqlx-toml")] diff --git a/sqlx-macros-core/Cargo.toml b/sqlx-macros-core/Cargo.toml index 02b773af07..c8eb5760a4 100644 --- a/sqlx-macros-core/Cargo.toml +++ b/sqlx-macros-core/Cargo.toml @@ -27,7 +27,7 @@ derive = [] macros = [] migrate = ["sqlx-core/migrate"] -sqlx-toml = ["sqlx-core/sqlx-toml"] +sqlx-toml = ["sqlx-core/sqlx-toml", "sqlx-sqlite?/sqlx-toml"] # database mysql = ["sqlx-mysql"] diff --git a/sqlx-macros-core/src/migrate.rs b/sqlx-macros-core/src/migrate.rs index cfc3394757..4f051d1330 100644 --- a/sqlx-macros-core/src/migrate.rs +++ b/sqlx-macros-core/src/migrate.rs @@ -92,14 +92,14 @@ pub fn default_path(config: &Config) -> &str { } pub fn expand(path_arg: Option) -> crate::Result { - let config = Config::from_crate(); + let config = Config::try_from_crate_or_default()?; let path = match path_arg { Some(path_arg) => crate::common::resolve_path(path_arg.value(), path_arg.span())?, - None => { crate::common::resolve_path(default_path(config), Span::call_site()) }?, + None => { crate::common::resolve_path(default_path(&config), Span::call_site()) }?, }; - expand_with_path(config, &path) + expand_with_path(&config, &path) } pub fn expand_with_path(config: &Config, path: &Path) -> crate::Result { diff --git a/sqlx-macros-core/src/query/args.rs b/sqlx-macros-core/src/query/args.rs index 6195ee6bbc..1b338efa3e 100644 --- a/sqlx-macros-core/src/query/args.rs +++ b/sqlx-macros-core/src/query/args.rs @@ -6,7 +6,6 @@ use quote::{format_ident, quote, quote_spanned}; use sqlx_core::config::Config; use sqlx_core::describe::Describe; use sqlx_core::type_checking; -use sqlx_core::type_checking::Error; use sqlx_core::type_info::TypeInfo; use syn::spanned::Spanned; use syn::{Expr, ExprCast, ExprGroup, Type}; @@ -130,7 +129,10 @@ fn get_param_type( "optional sqlx feature `{feature_gate}` required for type {param_ty} of param #{param_num}", ) } else { - format!("unsupported type {param_ty} for param #{param_num}") + format!( + "no built-in mapping for type {param_ty} of param #{param_num}; \ + a type override may be required, see documentation for details" + ) } } type_checking::Error::DateTimeCrateFeatureNotEnabled => { @@ -160,11 +162,12 @@ fn get_param_type( ) } - Error::AmbiguousDateTimeType { fallback } => { + type_checking::Error::AmbiguousDateTimeType { fallback } => { warnings.ambiguous_datetime = true; return Ok(fallback.parse()?); } - Error::AmbiguousNumericType { fallback } => { + + type_checking::Error::AmbiguousNumericType { fallback } => { warnings.ambiguous_numeric = true; return Ok(fallback.parse()?); } diff --git a/sqlx-macros-core/src/query/mod.rs b/sqlx-macros-core/src/query/mod.rs index 70ccd828b1..f6c0cae6db 100644 --- a/sqlx-macros-core/src/query/mod.rs +++ b/sqlx-macros-core/src/query/mod.rs @@ -124,11 +124,7 @@ fn init_metadata(manifest_dir: &String) -> crate::Result { .map(|s| s.eq_ignore_ascii_case("true") || s == "1") .unwrap_or(false); - let var_name = Config::from_crate().common.database_url_var(); - - let database_url = env(var_name).ok().or(database_url); - - let database_url = env(var_name).ok(); + let config = Config::try_from_crate_or_default()?; let database_url = env(config.common.database_url_var()).ok().or(database_url); @@ -264,8 +260,6 @@ fn expand_with_data( where Describe: DescribeExt, { - let config = Config::from_crate(); - // validate at the minimum that our args match the query's input parameters let num_parameters = match data.describe.parameters() { Some(Either::Left(params)) => Some(params.len()), diff --git a/sqlx-macros-core/src/test_attr.rs b/sqlx-macros-core/src/test_attr.rs index 907b8839fb..046ff5c2fb 100644 --- a/sqlx-macros-core/src/test_attr.rs +++ b/sqlx-macros-core/src/test_attr.rs @@ -77,7 +77,7 @@ fn expand_simple(input: syn::ItemFn) -> TokenStream { #[cfg(feature = "migrate")] fn expand_advanced(args: AttributeArgs, input: syn::ItemFn) -> crate::Result { - let config = sqlx_core::config::Config::from_crate(); + let config = sqlx_core::config::Config::try_from_crate_or_default()?; let ret = &input.sig.output; let name = &input.sig.ident; @@ -149,12 +149,12 @@ fn expand_advanced(args: AttributeArgs, input: syn::ItemFn) -> crate::Result { - let path = crate::migrate::default_path(config); + let path = crate::migrate::default_path(&config); let resolved_path = crate::common::resolve_path(path, proc_macro2::Span::call_site())?; if resolved_path.is_dir() { - let migrator = crate::migrate::expand_with_path(config, &resolved_path)?; + let migrator = crate::migrate::expand_with_path(&config, &resolved_path)?; quote! { args.migrator(&#migrator); } } else { quote! {} diff --git a/sqlx-postgres/src/connection/describe.rs b/sqlx-postgres/src/connection/describe.rs index 0c2f5ff154..0334357a6c 100644 --- a/sqlx-postgres/src/connection/describe.rs +++ b/sqlx-postgres/src/connection/describe.rs @@ -134,24 +134,6 @@ impl PgConnection { ColumnOrigin::Expression }; - let origin = if let (Some(relation_oid), Some(attribute_no)) = - (field.relation_id, field.relation_attribute_no) - { - self.maybe_fetch_column_origin(relation_oid, attribute_no, should_fetch) - .await? - } else { - ColumnOrigin::Expression - }; - - let origin = if let (Some(relation_oid), Some(attribute_no)) = - (field.relation_id, field.relation_attribute_no) - { - self.maybe_fetch_column_origin(relation_oid, attribute_no, fetch_column_description) - .await? - } else { - ColumnOrigin::Expression - }; - let column = PgColumn { ordinal: index, name: name.clone(), @@ -284,132 +266,6 @@ impl PgConnection { })) } - async fn maybe_fetch_column_origin( - &mut self, - relation_id: Oid, - attribute_no: i16, - should_fetch: bool, - ) -> Result { - if let Some(origin) = self - .inner - .cache_table_to_column_names - .get(&relation_id) - .and_then(|table_columns| { - let column_name = table_columns.columns.get(&attribute_no).cloned()?; - - Some(ColumnOrigin::Table(TableColumn { - table: table_columns.table_name.clone(), - name: column_name, - })) - }) - { - return Ok(origin); - } - - if !should_fetch { - return Ok(ColumnOrigin::Unknown); - } - - // Looking up the table name _may_ end up being redundant, - // but the round-trip to the server is by far the most expensive part anyway. - let Some((table_name, column_name)): Option<(String, String)> = query_as( - // language=PostgreSQL - "SELECT $1::oid::regclass::text, attname \ - FROM pg_catalog.pg_attribute \ - WHERE attrelid = $1 AND attnum = $2", - ) - .bind(relation_id) - .bind(attribute_no) - .fetch_optional(&mut *self) - .await? - else { - // The column/table doesn't exist anymore for whatever reason. - return Ok(ColumnOrigin::Unknown); - }; - - let table_columns = self - .inner - .cache_table_to_column_names - .entry(relation_id) - .or_insert_with(|| TableColumns { - table_name: table_name.into(), - columns: Default::default(), - }); - - let column_name = table_columns - .columns - .entry(attribute_no) - .or_insert(column_name.into()); - - Ok(ColumnOrigin::Table(TableColumn { - table: table_columns.table_name.clone(), - name: Arc::clone(column_name), - })) - } - - async fn maybe_fetch_column_origin( - &mut self, - relation_id: Oid, - attribute_no: i16, - should_fetch: bool, - ) -> Result { - if let Some(origin) = self - .inner - .cache_table_to_column_names - .get(&relation_id) - .and_then(|table_columns| { - let column_name = table_columns.columns.get(&attribute_no).cloned()?; - - Some(ColumnOrigin::Table(TableColumn { - table: table_columns.table_name.clone(), - name: column_name, - })) - }) - { - return Ok(origin); - } - - if !should_fetch { - return Ok(ColumnOrigin::Unknown); - } - - // Looking up the table name _may_ end up being redundant, - // but the round-trip to the server is by far the most expensive part anyway. - let Some((table_name, column_name)): Option<(String, String)> = query_as( - // language=PostgreSQL - "SELECT $1::oid::regclass::text, attname \ - FROM pg_catalog.pg_attribute \ - WHERE attrelid = $1 AND attnum = $2", - ) - .bind(relation_id) - .bind(attribute_no) - .fetch_optional(&mut *self) - .await? - else { - // The column/table doesn't exist anymore for whatever reason. - return Ok(ColumnOrigin::Unknown); - }; - - let table_columns = self - .inner - .cache_table_to_column_names - .entry(relation_id) - .or_insert_with(|| TableColumns { - table_name: table_name.into(), - columns: Default::default(), - }); - - let column_name = table_columns - .columns - .entry(attribute_no) - .or_insert(column_name.into()); - - Ok(ColumnOrigin::Table(TableColumn { - table: table_columns.table_name.clone(), - name: Arc::clone(column_name), - })) - } - async fn fetch_type_by_oid(&mut self, oid: Oid) -> Result { let (name, typ_type, category, relation_id, element, base_type): ( String, diff --git a/sqlx-sqlite/Cargo.toml b/sqlx-sqlite/Cargo.toml index db7fb63cb8..a84dccc6dc 100644 --- a/sqlx-sqlite/Cargo.toml +++ b/sqlx-sqlite/Cargo.toml @@ -27,6 +27,8 @@ preupdate-hook = ["libsqlite3-sys/preupdate_hook"] bundled = ["libsqlite3-sys/bundled"] unbundled = ["libsqlite3-sys/buildtime_bindgen"] +sqlx-toml = ["sqlx-core/sqlx-toml"] + # Note: currently unused, only to satisfy "unexpected `cfg` condition" lint bigdecimal = [] rust_decimal = [] diff --git a/sqlx-sqlite/src/any.rs b/sqlx-sqlite/src/any.rs index 96c224b8a4..4038b44c2c 100644 --- a/sqlx-sqlite/src/any.rs +++ b/sqlx-sqlite/src/any.rs @@ -202,10 +202,13 @@ impl<'a> TryFrom<&'a AnyConnectOptions> for SqliteConnectOptions { let mut opts_out = SqliteConnectOptions::from_url(&opts.database_url)?; opts_out.log_settings = opts.log_settings.clone(); - if opts.enable_config { - let config = sqlx_core::config::Config::from_crate(); - for extension in config.common.drivers.sqlite.load_extensions.iter() { - opts_out = opts_out.extension(extension); + #[cfg(feature = "sqlx-toml")] + if let Some(ref path) = opts.enable_config { + if path.exists() { + let config = sqlx_core::config::Config::try_from_path(path.to_path_buf()).unwrap_or_default(); + for extension in config.common.drivers.sqlite.load_extensions.iter() { + opts_out = opts_out.extension(extension.to_owned()); + } } } diff --git a/sqlx-sqlite/src/connection/describe.rs b/sqlx-sqlite/src/connection/describe.rs index b22590cfa3..6db81374aa 100644 --- a/sqlx-sqlite/src/connection/describe.rs +++ b/sqlx-sqlite/src/connection/describe.rs @@ -52,10 +52,6 @@ pub(crate) fn describe(conn: &mut ConnectionState, query: &str) -> Result Result, Error> { let mut opts: SqliteConnectOptions = database_url.parse()?; - let config = sqlx_core::config::Config::from_crate(); - for extension in config.common.drivers.sqlite.load_extensions.iter() { - opts = opts.extension(extension); + #[cfg(feature = "sqlx-toml")] + { + let config = sqlx_core::config::Config::try_from_crate_or_default().unwrap_or_default(); + for extension in config.common.drivers.sqlite.load_extensions.iter() { + opts = opts.extension(extension.to_owned()); + } } let params = EstablishParams::from_options(&opts)?; - let mut conn = params.establish()?; // Execute any ancillary `PRAGMA`s diff --git a/src/lib.rs b/src/lib.rs index ce34f0e851..c608e02aea 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -175,7 +175,8 @@ pub mod prelude { } #[cfg(feature = "_unstable-doc")] -pub use sqlx_core::config; +#[cfg_attr(docsrs, doc(cfg(feature = "_unstable-doc")))] +pub use sqlx_core::config as _config; // NOTE: APIs exported in this module are SemVer-exempt. #[doc(hidden)] From 89c63aef20e06cba8020a886cb66d182d8ba1415 Mon Sep 17 00:00:00 2001 From: Daniel Arbuckle Date: Tue, 1 Jul 2025 13:33:25 -0700 Subject: [PATCH 73/78] make cargo fmt happy --- examples/sqlite/extension/src/main.rs | 12 ++++++-- sqlx-cli/src/lib.rs | 41 ++++++++++++++------------- sqlx-core/src/any/connection/mod.rs | 5 +++- sqlx-sqlite/src/any.rs | 3 +- 4 files changed, 37 insertions(+), 24 deletions(-) diff --git a/examples/sqlite/extension/src/main.rs b/examples/sqlite/extension/src/main.rs index 7ee852e6af..ee859c55b8 100644 --- a/examples/sqlite/extension/src/main.rs +++ b/examples/sqlite/extension/src/main.rs @@ -1,6 +1,9 @@ use std::str::FromStr; -use sqlx::{query, sqlite::{SqlitePool, SqliteConnectOptions}}; +use sqlx::{ + query, + sqlite::{SqliteConnectOptions, SqlitePool}, +}; #[tokio::main(flavor = "current_thread")] async fn main() -> anyhow::Result<()> { @@ -31,7 +34,12 @@ async fn main() -> anyhow::Result<()> { // migrations here as well, though, using the database connection // we just configured. - query!("insert into addresses (address, family) values (?1, ipfamily(?1))", "10.0.0.10").execute(&db).await?; + query!( + "insert into addresses (address, family) values (?1, ipfamily(?1))", + "10.0.0.10" + ) + .execute(&db) + .await?; println!("Query which requires the extension was successfully executed."); diff --git a/sqlx-cli/src/lib.rs b/sqlx-cli/src/lib.rs index b1d7ececac..193128d83c 100644 --- a/sqlx-cli/src/lib.rs +++ b/sqlx-cli/src/lib.rs @@ -190,27 +190,28 @@ async fn do_run(opt: Opt) -> anyhow::Result<()> { /// Attempt to connect to the database server, retrying up to `ops.connect_timeout`. #[cfg(feature = "sqlx-toml")] async fn connect(opts: &ConnectOpts) -> anyhow::Result { - retry_connect_errors( - opts, - move |url| { - // This only handles the default case. For good support of - // the new command line options, we need to work out some - // way to make the appropriate ConfigOpt available here. I - // suspect that that infrastructure would be useful for - // other things in the future, as well, but it also seems - // like an extensive and intrusive change. - // - // On the other hand, the compile-time checking macros - // can't be configured to use a different config file at - // all, so I believe this is okay for the time being. - let config = Some(std::path::PathBuf::from("sqlx.toml")) - .and_then(|p| if p.exists() {Some(p)} else {None}); - - async move { - AnyConnection::connect_with_config(url, config.clone()).await + retry_connect_errors(opts, move |url| { + // This only handles the default case. For good support of + // the new command line options, we need to work out some + // way to make the appropriate ConfigOpt available here. I + // suspect that that infrastructure would be useful for + // other things in the future, as well, but it also seems + // like an extensive and intrusive change. + // + // On the other hand, the compile-time checking macros + // can't be configured to use a different config file at + // all, so I believe this is okay for the time being. + let config = Some(std::path::PathBuf::from("sqlx.toml")).and_then(|p| { + if p.exists() { + Some(p) + } else { + None } - } - ).await + }); + + async move { AnyConnection::connect_with_config(url, config.clone()).await } + }) + .await } /// Attempt to connect to the database server, retrying up to `ops.connect_timeout`. diff --git a/sqlx-core/src/any/connection/mod.rs b/sqlx-core/src/any/connection/mod.rs index fc0dcd2323..34c7ba5ea5 100644 --- a/sqlx-core/src/any/connection/mod.rs +++ b/sqlx-core/src/any/connection/mod.rs @@ -46,7 +46,10 @@ impl AnyConnection { /// read options from the sqlx.toml file as appropriate. #[cfg(feature = "sqlx-toml")] #[doc(hidden)] - pub fn connect_with_config(url: &str, path: Option) -> BoxFuture<'static, Result> + pub fn connect_with_config( + url: &str, + path: Option, + ) -> BoxFuture<'static, Result> where Self: Sized, { diff --git a/sqlx-sqlite/src/any.rs b/sqlx-sqlite/src/any.rs index 4038b44c2c..2c94e0c9ab 100644 --- a/sqlx-sqlite/src/any.rs +++ b/sqlx-sqlite/src/any.rs @@ -205,7 +205,8 @@ impl<'a> TryFrom<&'a AnyConnectOptions> for SqliteConnectOptions { #[cfg(feature = "sqlx-toml")] if let Some(ref path) = opts.enable_config { if path.exists() { - let config = sqlx_core::config::Config::try_from_path(path.to_path_buf()).unwrap_or_default(); + let config = sqlx_core::config::Config::try_from_path(path.to_path_buf()) + .unwrap_or_default(); for extension in config.common.drivers.sqlite.load_extensions.iter() { opts_out = opts_out.extension(extension.to_owned()); } From 4bf945a71533176f2cf5de694b33199468137815 Mon Sep 17 00:00:00 2001 From: Daniel Arbuckle Date: Tue, 1 Jul 2025 13:47:27 -0700 Subject: [PATCH 74/78] make clippy happy --- sqlx-cli/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sqlx-cli/src/lib.rs b/sqlx-cli/src/lib.rs index 193128d83c..023ee81e17 100644 --- a/sqlx-cli/src/lib.rs +++ b/sqlx-cli/src/lib.rs @@ -4,7 +4,7 @@ use std::time::Duration; use futures_util::TryFutureExt; -use sqlx::{AnyConnection, Connection}; +use sqlx::AnyConnection; use tokio::{select, signal}; use crate::opt::{Command, ConnectOpts, DatabaseCommand, MigrateCommand}; From 6d4db032f9d4fd81845009d7541810b291a8d41a Mon Sep 17 00:00:00 2001 From: Daniel Arbuckle Date: Tue, 1 Jul 2025 13:55:08 -0700 Subject: [PATCH 75/78] make clippy happier still --- sqlx-sqlite/src/lib.rs | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/sqlx-sqlite/src/lib.rs b/sqlx-sqlite/src/lib.rs index 0611c7a605..57425febe0 100644 --- a/sqlx-sqlite/src/lib.rs +++ b/sqlx-sqlite/src/lib.rs @@ -127,15 +127,20 @@ pub static CREATE_DB_WAL: AtomicBool = AtomicBool::new(true); /// UNSTABLE: for use by `sqlite-macros-core` only. #[doc(hidden)] pub fn describe_blocking(query: &str, database_url: &str) -> Result, Error> { - let mut opts: SqliteConnectOptions = database_url.parse()?; - #[cfg(feature = "sqlx-toml")] - { + let opts = { + let mut opts: SqliteConnectOptions = database_url.parse()?; + let config = sqlx_core::config::Config::try_from_crate_or_default().unwrap_or_default(); for extension in config.common.drivers.sqlite.load_extensions.iter() { opts = opts.extension(extension.to_owned()); } - } + + opts + }; + + #[cfg(not(feature = "sqlx-toml"))] + let opts: SqliteConnectOptions = database_url.parse()?; let params = EstablishParams::from_options(&opts)?; let mut conn = params.establish()?; From c04c328c53f14759cc504e96ed014f6d7bfe03dc Mon Sep 17 00:00:00 2001 From: Daniel Arbuckle Date: Wed, 2 Jul 2025 11:14:00 -0700 Subject: [PATCH 76/78] fix: improved error reporting, added parsing test, removed sqlx-toml flag use --- sqlx-cli/src/lib.rs | 7 ------- sqlx-core/src/config/common.rs | 14 ++++++++++++-- sqlx-core/src/config/tests.rs | 1 + sqlx-core/src/error.rs | 4 ++++ sqlx-sqlite/src/any.rs | 11 ++++++++--- sqlx-sqlite/src/lib.rs | 21 +++++++++------------ 6 files changed, 34 insertions(+), 24 deletions(-) diff --git a/sqlx-cli/src/lib.rs b/sqlx-cli/src/lib.rs index 023ee81e17..f0db083b1d 100644 --- a/sqlx-cli/src/lib.rs +++ b/sqlx-cli/src/lib.rs @@ -188,7 +188,6 @@ async fn do_run(opt: Opt) -> anyhow::Result<()> { } /// Attempt to connect to the database server, retrying up to `ops.connect_timeout`. -#[cfg(feature = "sqlx-toml")] async fn connect(opts: &ConnectOpts) -> anyhow::Result { retry_connect_errors(opts, move |url| { // This only handles the default case. For good support of @@ -214,12 +213,6 @@ async fn connect(opts: &ConnectOpts) -> anyhow::Result { .await } -/// Attempt to connect to the database server, retrying up to `ops.connect_timeout`. -#[cfg(not(feature = "sqlx-toml"))] -async fn connect(opts: &ConnectOpts) -> anyhow::Result { - retry_connect_errors(opts, AnyConnection::connect).await -} - /// Attempt an operation that may return errors like `ConnectionRefused`, /// retrying up until `ops.connect_timeout`. /// diff --git a/sqlx-core/src/config/common.rs b/sqlx-core/src/config/common.rs index d270026d89..13fd5a3e02 100644 --- a/sqlx-core/src/config/common.rs +++ b/sqlx-core/src/config/common.rs @@ -57,14 +57,24 @@ impl Config { } /// Configuration for specific database drivers. -#[derive(Debug, Default, serde::Deserialize)] +#[derive(Debug, Default)] +#[cfg_attr( + feature = "sqlx-toml", + derive(serde::Deserialize), + serde(default, deny_unknown_fields) +)] pub struct Drivers { /// Specify options for the SQLite driver. pub sqlite: SQLite, } /// Configuration for the SQLite database driver. -#[derive(Debug, Default, serde::Deserialize)] +#[derive(Debug, Default)] +#[cfg_attr( + feature = "sqlx-toml", + derive(serde::Deserialize), + serde(default, deny_unknown_fields) +)] pub struct SQLite { /// Specify extensions to load. /// diff --git a/sqlx-core/src/config/tests.rs b/sqlx-core/src/config/tests.rs index 0b0b590919..3d0f4fc871 100644 --- a/sqlx-core/src/config/tests.rs +++ b/sqlx-core/src/config/tests.rs @@ -14,6 +14,7 @@ fn reference_parses_as_config() { fn assert_common_config(config: &config::common::Config) { assert_eq!(config.database_url_var.as_deref(), Some("FOO_DATABASE_URL")); + assert_eq!(config.drivers.sqlite.load_extensions[1].as_str(), "vsv"); } fn assert_macros_config(config: &config::macros::Config) { diff --git a/sqlx-core/src/error.rs b/sqlx-core/src/error.rs index 9ad5eff464..5db51eca59 100644 --- a/sqlx-core/src/error.rs +++ b/sqlx-core/src/error.rs @@ -30,6 +30,10 @@ pub struct UnexpectedNullError; #[derive(Debug, thiserror::Error)] #[non_exhaustive] pub enum Error { + /// Error occurred while reading configuration file + #[error("error reading configuration file: {0}")] + ConfigFile(#[source] crate::config::ConfigError), + /// Error occurred while parsing a connection string. #[error("error with configuration: {0}")] Configuration(#[source] BoxDynError), diff --git a/sqlx-sqlite/src/any.rs b/sqlx-sqlite/src/any.rs index 2c94e0c9ab..7b71e2cf4e 100644 --- a/sqlx-sqlite/src/any.rs +++ b/sqlx-sqlite/src/any.rs @@ -202,11 +202,16 @@ impl<'a> TryFrom<&'a AnyConnectOptions> for SqliteConnectOptions { let mut opts_out = SqliteConnectOptions::from_url(&opts.database_url)?; opts_out.log_settings = opts.log_settings.clone(); - #[cfg(feature = "sqlx-toml")] if let Some(ref path) = opts.enable_config { if path.exists() { - let config = sqlx_core::config::Config::try_from_path(path.to_path_buf()) - .unwrap_or_default(); + let config = match sqlx_core::config::Config::try_from_path(path.to_path_buf()) { + Ok(cfg) => cfg, + Err(sqlx_core::config::ConfigError::NotFound { path: _ }) => { + return Ok(opts_out) + } + Err(err) => return Err(Self::Error::ConfigFile(err)), + }; + for extension in config.common.drivers.sqlite.load_extensions.iter() { opts_out = opts_out.extension(extension.to_owned()); } diff --git a/sqlx-sqlite/src/lib.rs b/sqlx-sqlite/src/lib.rs index 57425febe0..b4878a4e2a 100644 --- a/sqlx-sqlite/src/lib.rs +++ b/sqlx-sqlite/src/lib.rs @@ -127,20 +127,17 @@ pub static CREATE_DB_WAL: AtomicBool = AtomicBool::new(true); /// UNSTABLE: for use by `sqlite-macros-core` only. #[doc(hidden)] pub fn describe_blocking(query: &str, database_url: &str) -> Result, Error> { - #[cfg(feature = "sqlx-toml")] - let opts = { - let mut opts: SqliteConnectOptions = database_url.parse()?; + let mut opts: SqliteConnectOptions = database_url.parse()?; - let config = sqlx_core::config::Config::try_from_crate_or_default().unwrap_or_default(); - for extension in config.common.drivers.sqlite.load_extensions.iter() { - opts = opts.extension(extension.to_owned()); + match sqlx_core::config::Config::try_from_crate_or_default() { + Ok(config) => { + for extension in config.common.drivers.sqlite.load_extensions.iter() { + opts = opts.extension(extension.to_owned()); + } } - - opts - }; - - #[cfg(not(feature = "sqlx-toml"))] - let opts: SqliteConnectOptions = database_url.parse()?; + Err(sqlx_core::config::ConfigError::NotFound { path: _ }) => {} + Err(err) => return Err(Error::ConfigFile(err)), + } let params = EstablishParams::from_options(&opts)?; let mut conn = params.establish()?; From fe9916f23fbaabd158e3ee63e5d051f4ee9abccb Mon Sep 17 00:00:00 2001 From: Daniel Arbuckle Date: Sat, 5 Jul 2025 09:30:54 -0700 Subject: [PATCH 77/78] switched to kebab-case for the config key --- examples/sqlite/extension/sqlx.toml | 4 ++-- examples/sqlite/extension/src/main.rs | 1 + sqlx-core/src/config/common.rs | 6 +++--- sqlx-core/src/config/reference.toml | 2 +- 4 files changed, 7 insertions(+), 6 deletions(-) diff --git a/examples/sqlite/extension/sqlx.toml b/examples/sqlite/extension/sqlx.toml index fbc5884390..77f844642f 100644 --- a/examples/sqlite/extension/sqlx.toml +++ b/examples/sqlite/extension/sqlx.toml @@ -2,11 +2,11 @@ # Including the full path to the extension is somewhat unusual, # because normally an extension will be installed in a standard # directory which is part of the library search path. If that were the -# case here, the load_extensions value could just be `["ipaddr"]` +# case here, the load-extensions value could just be `["ipaddr"]` # # When the extension file is installed in a non-standard location, as # in this example, there are two options: # * Provide the full path the the extension, as seen below. # * Add the non-standard location to the library search path, which on # Linux means adding it to the LD_LIBRARY_PATH environment variable. -load_extensions = ["/tmp/sqlite3-lib/ipaddr"] \ No newline at end of file +load-extensions = ["/tmp/sqlite3-lib/ipaddr"] \ No newline at end of file diff --git a/examples/sqlite/extension/src/main.rs b/examples/sqlite/extension/src/main.rs index ee859c55b8..8645422f9f 100644 --- a/examples/sqlite/extension/src/main.rs +++ b/examples/sqlite/extension/src/main.rs @@ -1,3 +1,4 @@ + use std::str::FromStr; use sqlx::{ diff --git a/sqlx-core/src/config/common.rs b/sqlx-core/src/config/common.rs index 13fd5a3e02..e1809d6d2b 100644 --- a/sqlx-core/src/config/common.rs +++ b/sqlx-core/src/config/common.rs @@ -61,7 +61,7 @@ impl Config { #[cfg_attr( feature = "sqlx-toml", derive(serde::Deserialize), - serde(default, deny_unknown_fields) + serde(default, rename_all = "kebab-case", deny_unknown_fields) )] pub struct Drivers { /// Specify options for the SQLite driver. @@ -73,7 +73,7 @@ pub struct Drivers { #[cfg_attr( feature = "sqlx-toml", derive(serde::Deserialize), - serde(default, deny_unknown_fields) + serde(default, rename_all = "kebab-case", deny_unknown_fields) )] pub struct SQLite { /// Specify extensions to load. @@ -82,7 +82,7 @@ pub struct SQLite { /// `sqlx.toml`: /// ```toml /// [common.drivers.sqlite] - /// load_extensions = ["uuid", "vsv"] + /// load-extensions = ["uuid", "vsv"] /// ``` pub load_extensions: Vec, } diff --git a/sqlx-core/src/config/reference.toml b/sqlx-core/src/config/reference.toml index 375d510762..787c3456db 100644 --- a/sqlx-core/src/config/reference.toml +++ b/sqlx-core/src/config/reference.toml @@ -19,7 +19,7 @@ database-url-var = "FOO_DATABASE_URL" # Load extensions into SQLite when running macros or migrations # # Defaults to an empty list, which has no effect. -load_extensions = ["uuid", "vsv"] +load-extensions = ["uuid", "vsv"] ############################################################################################### From 55567c341155b866091b8a349e95c3df4f06078b Mon Sep 17 00:00:00 2001 From: Daniel Arbuckle Date: Sat, 5 Jul 2025 09:32:33 -0700 Subject: [PATCH 78/78] switched to kebab-case for the config key --- examples/sqlite/extension/src/main.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/examples/sqlite/extension/src/main.rs b/examples/sqlite/extension/src/main.rs index 8645422f9f..ee859c55b8 100644 --- a/examples/sqlite/extension/src/main.rs +++ b/examples/sqlite/extension/src/main.rs @@ -1,4 +1,3 @@ - use std::str::FromStr; use sqlx::{