diff --git a/.cargo/audit.toml b/.cargo/audit.toml new file mode 100644 index 0000000..4ad3c87 --- /dev/null +++ b/.cargo/audit.toml @@ -0,0 +1,4 @@ +[advisories] +ignore = [ + "RUSTSEC-2023-0071", # no fix available as of 2024-12-07: https://github.com/RustCrypto/RSA/issues/19 +] diff --git a/.github/workflows/audit.yml b/.github/workflows/audit.yml index 6204620..989419d 100644 --- a/.github/workflows/audit.yml +++ b/.github/workflows/audit.yml @@ -5,13 +5,18 @@ on: paths: - "**/Cargo.toml" - "**/Cargo.lock" - schedule: - - cron: "0 0 * * *" + - ".github/workflows/audit.yml" + schedule: + - cron: "0 0 * * *" jobs: security_audit: name: Audit check runs-on: ubuntu-latest + permissions: + checks: write + contents: read + issues: write steps: - uses: actions/checkout@v4 - uses: rustsec/audit-check@v2.0.0 diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 3ffcf2c..61351ec 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -50,7 +50,7 @@ jobs: run: cargo +${{ matrix.rust }} build - name: Test - run: cargo +${{ matrix.rust }} nextest run + run: cargo +${{ matrix.rust }} nextest run --all-features # Nextest does not support doc tests as in stable Rust # they are not exposed in the same way as normal tests. @@ -79,7 +79,43 @@ jobs: uses: Swatinem/rust-cache@v2 - name: Run clippy - run: cargo clippy --no-deps -- -Dclippy::all -Wclippy::pedantic + run: cargo clippy --no-deps -- + + external-deps: + if: github.event_name == 'push' || github.event_name == 'schedule' || + github.event.pull_request.head.repo.full_name != github.repository + + name: Test with external dependencies + runs-on: ubuntu-latest + needs: ["build"] + steps: + - name: Checkout source + uses: actions/checkout@v4 + + - name: Install Rust toolchain + uses: dtolnay/rust-toolchain@master + with: + toolchain: nightly + + - name: Cache Cargo registry + uses: Swatinem/rust-cache@v2 + + - name: Install cargo-nextest + uses: taiki-e/install-action@v2 + with: + tool: nextest + + - name: Build + run: cargo build + + - name: Run the external dependencies + run: docker compose up -d + + - name: Test + run: cargo nextest run --all-features -- --include-ignored + + - name: Test docs + run: cargo test --doc coverage: if: github.event_name == 'push' || github.event_name == 'schedule' || @@ -101,8 +137,11 @@ jobs: - name: Cache Cargo registry uses: Swatinem/rust-cache@v2 + - name: Run the external dependencies + run: docker compose up -d + - name: Test - run: cargo test --all-features --no-fail-fast + run: cargo test --all-features --no-fail-fast -- --include-ignored env: RUSTFLAGS: "-Cinstrument-coverage" @@ -116,7 +155,7 @@ jobs: - name: Run grcov run: grcov . --binary-path target/debug/deps/ -s . -t lcov --branch --ignore-not-existing --ignore '../**' --ignore '/*' --ignore 'examples/**' -o coverage.lcov - - uses: codecov/codecov-action@v4 + - uses: codecov/codecov-action@v5 with: files: ./coverage.lcov flags: rust @@ -199,3 +238,61 @@ jobs: # Update Cargo.lock to minimal version dependencies. cargo update -Z minimal-versions cargo hack check --all-features --ignore-private + + build-feature-power-set: + if: github.event_name == 'push' || github.event_name == 'schedule' || + github.event.pull_request.head.repo.full_name != github.repository + + name: Build with each feature combination + runs-on: ubuntu-latest + needs: ["build"] + steps: + - name: Checkout source + uses: actions/checkout@v4 + + - name: Install Rust toolchain + uses: dtolnay/rust-toolchain@master + with: + toolchain: nightly + + - name: Install cargo-hack + uses: taiki-e/install-action@cargo-hack + + - name: Cache Cargo registry + uses: Swatinem/rust-cache@v2 + + - name: Run cargo check with every combination of features + run: cargo hack check --feature-powerset --exclude-features db --no-dev-deps + + miri: + if: github.event_name == 'push' || github.event_name == 'schedule' || + github.event.pull_request.head.repo.full_name != github.repository + + name: Miri test + runs-on: ubuntu-latest + needs: ["build"] + steps: + - name: Checkout source + uses: actions/checkout@v4 + + - name: Install Rust toolchain + uses: dtolnay/rust-toolchain@master + with: + toolchain: nightly + components: miri + + - name: Install cargo-nextest + uses: taiki-e/install-action@v2 + with: + tool: nextest + + - name: Cache Cargo registry + uses: Swatinem/rust-cache@v2 + + - name: Miri setup + run: cargo miri setup + + - name: Miri test + run: cargo miri nextest run --no-fail-fast --all-features + env: + MIRIFLAGS: -Zmiri-disable-isolation diff --git a/Cargo.lock b/Cargo.lock index 848827b..eb02f4b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -40,9 +40,9 @@ dependencies = [ [[package]] name = "allocator-api2" -version = "0.2.18" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" [[package]] name = "android-tzdata" @@ -61,9 +61,9 @@ dependencies = [ [[package]] name = "anstream" -version = "0.6.15" +version = "0.6.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64e15c1ab1f89faffbf04a634d5e1962e9074f2741eef6d97f3c4e322426d526" +checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" dependencies = [ "anstyle", "anstyle-parse", @@ -76,43 +76,43 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.8" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1" +checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" [[package]] name = "anstyle-parse" -version = "0.2.5" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb47de1e80c2b463c735db5b217a0ddc39d612e7ac9e2e96a5aed1f57616c1cb" +checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.1.1" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d36fc52c7f6c869915e99412912f22093507da8d9e942ceaf66fe4b7c14422a" +checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "anstyle-wincon" -version = "3.0.4" +version = "3.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bf74e1b6e971609db8ca7a9ce79fd5768ab6ae46441c572e46cf596f59e57f8" +checksum = "2109dbce0e72be3ec00bed26e6a7479ca384ad226efdd66db8fa2e3a38c83125" dependencies = [ "anstyle", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "anyhow" -version = "1.0.92" +version = "1.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74f37166d7d48a0284b99dd824694c26119c700b53bf0d1540cdb147dbdaaf13" +checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" [[package]] name = "argon2" @@ -220,9 +220,9 @@ checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" [[package]] name = "axum" -version = "0.7.7" +version = "0.7.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "504e3947307ac8326a5437504c517c4b56716c9d98fac0028c2acc7ca47d70ae" +checksum = "edca88bc138befd0323b20752846e6587272d3b03b0343c8ea28a6f819e6e71f" dependencies = [ "async-trait", "axum-core", @@ -241,15 +241,11 @@ dependencies = [ "pin-project-lite", "rustversion", "serde", - "serde_json", - "serde_path_to_error", - "serde_urlencoded", - "sync_wrapper 1.0.1", + "sync_wrapper", "tokio", "tower", "tower-layer", "tower-service", - "tracing", ] [[package]] @@ -267,10 +263,9 @@ dependencies = [ "mime", "pin-project-lite", "rustversion", - "sync_wrapper 1.0.1", + "sync_wrapper", "tower-layer", "tower-service", - "tracing", ] [[package]] @@ -359,9 +354,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ac0150caa2ae65ca5bd83f25c7de183dea78d4d366469f148435e2acfbad0da" +checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" [[package]] name = "cargo_toml" @@ -375,9 +370,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.1.31" +version = "1.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2e7962b54006dcfcc61cb72735f4d89bb97061dd6a7ed882ec6b8ee53714c6f" +checksum = "27f657647bcff5394bf56c7317665bbf790a137a50eaaa5c6bfbb9e27a518f2d" dependencies = [ "shlex", ] @@ -390,24 +385,21 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" -version = "0.4.38" +version = "0.4.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" +checksum = "7e36cc9d416881d2e24f9a963be5fb1cd90966419ac844274161d10488b3e825" dependencies = [ "android-tzdata", "iana-time-zone", - "js-sys", "num-traits", - "serde", - "wasm-bindgen", "windows-targets 0.52.6", ] [[package]] name = "clap" -version = "4.5.20" +version = "4.5.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b97f376d85a664d5837dbae44bf546e6477a679ff6610010f17276f686d867e8" +checksum = "3135e7ec2ef7b10c6ed8950f0f792ed96ee093fa088608f1c76e569722700c84" dependencies = [ "clap_builder", "clap_derive", @@ -415,9 +407,9 @@ dependencies = [ [[package]] name = "clap-verbosity-flag" -version = "2.2.2" +version = "2.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e099138e1807662ff75e2cebe4ae2287add879245574489f9b1588eb5e5564ed" +checksum = "34c77f67047557f62582784fd7482884697731b2932c7d37ced54bce2312e1e2" dependencies = [ "clap", "log", @@ -425,9 +417,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.20" +version = "4.5.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19bc80abd44e4bed93ca373a0704ccbd1b710dc5749406201bb018272808dc54" +checksum = "30582fc632330df2bd26877bde0c1f4470d57c582bbc070376afcd04d8cb4838" dependencies = [ "anstream", "anstyle", @@ -441,7 +433,7 @@ version = "4.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab" dependencies = [ - "heck 0.5.0", + "heck", "proc-macro2", "quote", "syn", @@ -449,15 +441,15 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.7.2" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97" +checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" [[package]] name = "colorchoice" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0" +checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" [[package]] name = "concurrent-queue" @@ -476,9 +468,9 @@ checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" [[package]] name = "const-oid" -version = "0.10.0-rc.2" +version = "0.10.0-rc.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a0d96d207edbe5135e55038e79ab9ad6d75ba83b14cdf62326ce5b12bc46ab5" +checksum = "68ff6be19477a1bd5441f382916a89bc2a0b2c35db6d41e0f6e8538bf6d6463f" [[package]] name = "convert_case" @@ -508,9 +500,9 @@ checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "cpufeatures" -version = "0.2.14" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "608697df725056feaccfa42cffdaeeec3fccc4ffc38358ecd19b243e716a78e0" +checksum = "16b80225097f2e5ae4e7179dd2266824648f3e2f49d9134d584b76389d31c4c3" dependencies = [ "libc", ] @@ -668,7 +660,6 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22" dependencies = [ - "convert_case", "proc-macro2", "quote", "syn", @@ -700,11 +691,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cf2e3d6615d99707295a9673e889bf363a04b2a466bd320c65a72536f7577379" dependencies = [ "block-buffer 0.11.0-rc.3", - "const-oid 0.10.0-rc.2", + "const-oid 0.10.0-rc.3", "crypto-common 0.2.0-rc.1", "subtle", ] +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "dissimilar" version = "1.0.9" @@ -725,9 +727,9 @@ checksum = "1435fa1053d8b2fbbe9be7e97eca7f33d37b28409959813daefc1446a14247f1" [[package]] name = "dummy" -version = "0.8.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cac124e13ae9aa56acc4241f8c8207501d93afdd8d8e62f0c1f2e12f6508c65" +checksum = "b3ee4e39146145f7dd28e6c85ffdce489d93c0d9c88121063b8aacabbd9858d2" dependencies = [ "darling", "proc-macro2", @@ -756,9 +758,9 @@ dependencies = [ [[package]] name = "env_logger" -version = "0.11.5" +version = "0.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13fa619b91fb2381732789fc5de83b45675e882f66623b7d8cb4f643017018d" +checksum = "dcaee3d8e3cfc3fd92428d477bc97fc29ec8716d180c0d74c643bb26166660e0" dependencies = [ "anstream", "anstyle", @@ -775,12 +777,12 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" -version = "0.3.9" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -809,9 +811,7 @@ dependencies = [ name = "example-admin" version = "0.1.0" dependencies = [ - "env_logger", "flareon", - "tokio", ] [[package]] @@ -819,7 +819,14 @@ name = "example-hello-world" version = "0.1.0" dependencies = [ "flareon", - "tokio", +] + +[[package]] +name = "example-json" +version = "0.1.0" +dependencies = [ + "flareon", + "serde", ] [[package]] @@ -828,7 +835,6 @@ version = "0.1.0" dependencies = [ "askama", "flareon", - "tokio", ] [[package]] @@ -836,16 +842,14 @@ name = "example-todo-list" version = "0.1.0" dependencies = [ "askama", - "env_logger", "flareon", - "tokio", ] [[package]] name = "fake" -version = "2.10.0" +version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d391ba4af7f1d93f01fcf7b2f29e2bc9348e109dfdbf4dcbdc51dfa38dab0b6" +checksum = "aef603df4ba9adbca6a332db7da6f614f21eafefbaf8e087844e452fdec152d0" dependencies = [ "chrono", "deunicode", @@ -855,9 +859,9 @@ dependencies = [ [[package]] name = "fastrand" -version = "2.1.1" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8c02a5121d4ea3eb16a80748c74f5549a5665e4c21333c6098f283870fbdea6" +checksum = "486f806e73c5707928240ddc295403b1b93c96a02038563881c4a2fd84b81ac4" [[package]] name = "flareon" @@ -890,16 +894,15 @@ dependencies = [ "mockall", "password-auth", "pin-project-lite", - "rand", - "regex", "sea-query", "sea-query-binder", "serde", + "serde_json", "sha2 0.11.0-pre.4", "sqlx", "subtle", - "sync_wrapper 1.0.1", - "thiserror", + "sync_wrapper", + "thiserror 2.0.9", "time", "tokio", "tower", @@ -925,6 +928,8 @@ dependencies = [ "proc-macro2", "quote", "syn", + "tempfile", + "trybuild", ] [[package]] @@ -993,7 +998,6 @@ checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" dependencies = [ "futures-channel", "futures-core", - "futures-executor", "futures-io", "futures-sink", "futures-task", @@ -1073,7 +1077,6 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" dependencies = [ - "futures-channel", "futures-core", "futures-io", "futures-macro", @@ -1116,9 +1119,9 @@ checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" [[package]] name = "glob" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" +checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" [[package]] name = "hashbrown" @@ -1132,9 +1135,9 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.15.0" +version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e087f84d4f86bf4b218b927129862374b72199ae7d8657835f1e89000eea4fb" +checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" [[package]] name = "hashlink" @@ -1145,24 +1148,12 @@ dependencies = [ "hashbrown 0.14.5", ] -[[package]] -name = "heck" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" - [[package]] name = "heck" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" -[[package]] -name = "hermit-abi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" - [[package]] name = "hex" version = "0.4.3" @@ -1207,9 +1198,9 @@ dependencies = [ [[package]] name = "http" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea" dependencies = [ "bytes", "fnv", @@ -1277,9 +1268,9 @@ dependencies = [ [[package]] name = "hyper" -version = "1.5.0" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbbff0a806a4728c99295b254c8838933b5b082d75e3cb70c8dab21fdfbcfa9a" +checksum = "97818827ef4f364230e16705d4706e2897df2bb60617d6ca15d598025a3c481f" dependencies = [ "bytes", "futures-channel", @@ -1296,9 +1287,9 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41296eb09f183ac68eec06e03cdbea2e759633d4067b2f6552fc2e009bcad08b" +checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" dependencies = [ "bytes", "futures-util", @@ -1333,6 +1324,124 @@ dependencies = [ "cc", ] +[[package]] +name = "icu_collections" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locid" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_locid_transform" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_locid_transform_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_locid_transform_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" + +[[package]] +name = "icu_normalizer" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "utf16_iter", + "utf8_iter", + "write16", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" + +[[package]] +name = "icu_properties" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locid_transform", + "icu_properties_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" + +[[package]] +name = "icu_provider" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_provider_macros", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_provider_macros" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "ident_case" version = "1.0.1" @@ -1341,22 +1450,33 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "idna" -version = "0.5.0" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" dependencies = [ - "unicode-bidi", - "unicode-normalization", + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" +dependencies = [ + "icu_normalizer", + "icu_properties", ] [[package]] name = "indexmap" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da" +checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f" dependencies = [ "equivalent", - "hashbrown 0.15.0", + "hashbrown 0.15.2", ] [[package]] @@ -1378,16 +1498,17 @@ checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" [[package]] name = "itoa" -version = "1.0.11" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" +checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" [[package]] name = "js-sys" -version = "0.3.72" +version = "0.3.76" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a88f1bda2bd75b0452a14784937d796722fdebfe50df998aeb3f0b7603019a9" +checksum = "6717b6b5b077764fb5966237269cb3c64edddde4b14ce42647430a78ced9e7b7" dependencies = [ + "once_cell", "wasm-bindgen", ] @@ -1402,15 +1523,15 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.161" +version = "0.2.167" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e9489c2807c139ffd9c1794f4af0ebe86a828db53ecdc7fea2111d0fed085d1" +checksum = "09d6582e104315a817dff97f75133544b2e094ee22447d2acf4a74e189ba06fc" [[package]] name = "libm" -version = "0.2.8" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" +checksum = "8355be11b20d696c8f18f6cc018c4e372165b1fa8126cef092399c9951984ffa" [[package]] name = "libsqlite3-sys" @@ -1429,6 +1550,12 @@ version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" +[[package]] +name = "litemap" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" + [[package]] name = "lock_api" version = "0.4.12" @@ -1501,11 +1628,10 @@ dependencies = [ [[package]] name = "mio" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec" +checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" dependencies = [ - "hermit-abi", "libc", "wasi", "windows-sys 0.52.0", @@ -1513,9 +1639,9 @@ dependencies = [ [[package]] name = "mockall" -version = "0.13.0" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4c28b3fb6d753d28c20e826cd46ee611fda1cf3cde03a443a974043247c065a" +checksum = "39a6bfcc6c8c7eed5ee98b9c3e33adc726054389233e201c95dab2d41a3839d2" dependencies = [ "cfg-if", "downcast", @@ -1527,9 +1653,9 @@ dependencies = [ [[package]] name = "mockall_derive" -version = "0.13.0" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "341014e7f530314e9a1fdbc7400b244efea7122662c96bfa248c31da5bfb2020" +checksum = "25ca3004c2efe9011bd4e461bd8256445052b9615405b4f7ea43fc8ca5c20898" dependencies = [ "cfg-if", "proc-macro2", @@ -1789,18 +1915,18 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.89" +version = "1.0.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f139b0662de085916d1fb67d2b4169d1addddda1919e696f3252b740b629986e" +checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" dependencies = [ "unicode-ident", ] [[package]] name = "quote" -version = "1.0.37" +version = "1.0.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" +checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc" dependencies = [ "proc-macro2", ] @@ -1858,9 +1984,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.8" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "368758f23274712b504848e9d5a6f010445cc8b87a7cdb4d7cbee666c1288da3" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", @@ -1875,9 +2001,9 @@ checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "rsa" -version = "0.9.6" +version = "0.9.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d0e5124fcb30e76a7e79bfee683a2746db83784b86289f6251b54b7950a0dfc" +checksum = "47c75d7c5c6b673e58bf54d8544a9f432e3a925b0e80f7cd3602ab5c50c55519" dependencies = [ "const-oid 0.9.6", "digest 0.10.7", @@ -1901,9 +2027,9 @@ checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" [[package]] name = "rustix" -version = "0.38.37" +version = "0.38.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8acb788b847c24f28525660c4d7758620a7210875711f79e7f663cc152726811" +checksum = "d7f649912bc1495e167a6edee79151c84b1bad49748cb4f1f1167f459f6224f6" dependencies = [ "bitflags", "errno", @@ -1914,9 +2040,9 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.18" +version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e819f2bc632f285be6d7cd36e25940d45b2391dd6d9b939e79de557f7014248" +checksum = "f7c45b9784283f1b2e7fb61b42047c2fd678ef0960d4f6f1eba131594cc369d4" [[package]] name = "ryu" @@ -1932,13 +2058,12 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "sea-query" -version = "0.32.0" +version = "0.32.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff504d13b5e4b52fffcf2fb203d0352a5722fa5151696db768933e41e1e591bb" +checksum = "085e94f7d7271c0393ac2d164a39994b1dff1b06bc40cd9a0da04f3d672b0fee" dependencies = [ "chrono", "inherent", - "sea-query-derive", ] [[package]] @@ -1952,34 +2077,20 @@ dependencies = [ "sqlx", ] -[[package]] -name = "sea-query-derive" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9834af2c4bd8c5162f00c89f1701fb6886119a88062cf76fe842ea9e232b9839" -dependencies = [ - "darling", - "heck 0.4.1", - "proc-macro2", - "quote", - "syn", - "thiserror", -] - [[package]] name = "serde" -version = "1.0.214" +version = "1.0.217" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f55c3193aca71c12ad7890f1785d2b73e1b9f63a0bbc353c08ef26fe03fc56b5" +checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.214" +version = "1.0.217" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de523f781f095e28fa605cdce0f8307e451cc0fd14e2eb4cd2e98a355b147766" +checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" dependencies = [ "proc-macro2", "quote", @@ -1988,9 +2099,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.132" +version = "1.0.134" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d726bfaff4b320266d395898905d0eba0345aae23b54aee3a737e260fd46db03" +checksum = "d00f4175c42ee48b15416f6193a959ba3a0d67fc699a0db9ad12df9f83991c7d" dependencies = [ "itoa", "memchr", @@ -1998,16 +2109,6 @@ dependencies = [ "serde", ] -[[package]] -name = "serde_path_to_error" -version = "0.1.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af99884400da37c88f5e9146b7f1fd0fbcae8f6eec4e9da38b67d05486f814a6" -dependencies = [ - "itoa", - "serde", -] - [[package]] name = "serde_spanned" version = "0.6.8" @@ -2098,9 +2199,9 @@ dependencies = [ [[package]] name = "socket2" -version = "0.5.7" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" +checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8" dependencies = [ "libc", "windows-sys 0.52.0", @@ -2181,7 +2282,7 @@ dependencies = [ "sha2 0.10.8", "smallvec", "sqlformat", - "thiserror", + "thiserror 1.0.69", "tokio", "tokio-stream", "tracing", @@ -2209,7 +2310,7 @@ checksum = "1804e8a7c7865599c9c79be146dc8a9fd8cc86935fa641d3ea58e5f0688abaa5" dependencies = [ "dotenvy", "either", - "heck 0.5.0", + "heck", "hex", "once_cell", "proc-macro2", @@ -2265,7 +2366,7 @@ dependencies = [ "smallvec", "sqlx-core", "stringprep", - "thiserror", + "thiserror 1.0.69", "tracing", "whoami", ] @@ -2304,7 +2405,7 @@ dependencies = [ "smallvec", "sqlx-core", "stringprep", - "thiserror", + "thiserror 1.0.69", "tracing", "whoami", ] @@ -2333,6 +2434,12 @@ dependencies = [ "url", ] +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + [[package]] name = "stringprep" version = "0.1.5" @@ -2358,9 +2465,9 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "syn" -version = "2.0.86" +version = "2.0.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e89275301d38033efb81a6e60e3497e734dfcc62571f2854bf4b16690398824c" +checksum = "9c786062daee0d6db1132800e623df74274a0a87322d8e183338e01b3d98d058" dependencies = [ "proc-macro2", "quote", @@ -2369,15 +2476,20 @@ dependencies = [ [[package]] name = "sync_wrapper" -version = "0.1.2" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" [[package]] -name = "sync_wrapper" -version = "1.0.1" +name = "synstructure" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" +checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] [[package]] name = "target-triple" @@ -2387,9 +2499,9 @@ checksum = "42a4d50cdb458045afc8131fd91b64904da29548bcb63c7236e0844936c13078" [[package]] name = "tempfile" -version = "3.13.0" +version = "3.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0f2c9fc62d0beef6951ccffd757e241266a2c833136efbe35af6cd2567dca5b" +checksum = "28cce251fcbc87fac86a866eeb0d6c2d536fc16d06f184bb61aeae11aa4cee0c" dependencies = [ "cfg-if", "fastrand", @@ -2415,18 +2527,38 @@ checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76" [[package]] name = "thiserror" -version = "1.0.66" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl 1.0.69", +] + +[[package]] +name = "thiserror" +version = "2.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f072643fd0190df67a8bab670c20ef5d8737177d6ac6b2e9a236cb096206b2cc" +dependencies = [ + "thiserror-impl 2.0.9", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d171f59dbaa811dbbb1aee1e73db92ec2b122911a48e1390dfe327a821ddede" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ - "thiserror-impl", + "proc-macro2", + "quote", + "syn", ] [[package]] name = "thiserror-impl" -version = "1.0.66" +version = "2.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b08be0f17bd307950653ce45db00cd31200d82b624b36e181337d9c7d92765b5" +checksum = "7b50fa271071aae2e6ee85f842e2e28ba8cd2c5fb67f11fcb1fd70b276f9e7d4" dependencies = [ "proc-macro2", "quote", @@ -2435,9 +2567,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.36" +version = "0.3.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" +checksum = "35e7868883861bd0e56d9ac6efcaaca0d6d5d82a2a7ec8209ff492c07cf37b21" dependencies = [ "deranged", "itoa", @@ -2456,14 +2588,24 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" -version = "0.2.18" +version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" +checksum = "2834e6017e3e5e4b9834939793b282bc03b37a3336245fa820e35e233e2a85de" dependencies = [ "num-conv", "time-core", ] +[[package]] +name = "tinystr" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +dependencies = [ + "displaydoc", + "zerovec", +] + [[package]] name = "tinyvec" version = "1.8.0" @@ -2481,9 +2623,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.41.0" +version = "1.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "145f3413504347a2be84393cc8a7d2fb4d863b375909ea59f2158261aa258bbb" +checksum = "5cec9b21b0450273377fc97bd4c33a8acffc8c996c987a7c5b319a0083707551" dependencies = [ "backtrace", "bytes", @@ -2508,9 +2650,9 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.16" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f4e6ce100d0eb49a2734f8c0812bcd324cf357d21810932c5df6b96ef2b86f1" +checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" dependencies = [ "futures-core", "pin-project-lite", @@ -2553,18 +2695,17 @@ dependencies = [ [[package]] name = "tower" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2873938d487c3cfb9aed7546dc9f2711d867c9f90c46b889989a2cb84eba6b4f" +checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" dependencies = [ "futures-core", "futures-util", "pin-project-lite", - "sync_wrapper 0.1.2", + "sync_wrapper", "tokio", "tower-layer", "tower-service", - "tracing", ] [[package]] @@ -2621,7 +2762,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fb6abbfcaf6436ec5a772cd9f965401da12db793e404ae6134eac066fa5a04f3" dependencies = [ "async-trait", - "axum-core", "base64", "futures", "http", @@ -2629,7 +2769,7 @@ dependencies = [ "rand", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", "time", "tokio", "tracing", @@ -2649,9 +2789,9 @@ dependencies = [ [[package]] name = "tracing" -version = "0.1.40" +version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ "log", "pin-project-lite", @@ -2661,9 +2801,9 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.27" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" dependencies = [ "proc-macro2", "quote", @@ -2672,9 +2812,9 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.32" +version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" dependencies = [ "once_cell", ] @@ -2715,9 +2855,9 @@ checksum = "5ab17db44d7388991a428b2ee655ce0c212e862eff1768a455c58f9aad6e7893" [[package]] name = "unicode-ident" -version = "1.0.13" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe" +checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" [[package]] name = "unicode-normalization" @@ -2754,15 +2894,27 @@ checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" [[package]] name = "url" -version = "2.5.2" +version = "2.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" dependencies = [ "form_urlencoded", "idna", "percent-encoding", ] +[[package]] +name = "utf16_iter" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + [[package]] name = "utf8parse" version = "0.2.2" @@ -2795,9 +2947,9 @@ checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" [[package]] name = "wasm-bindgen" -version = "0.2.95" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "128d1e363af62632b8eb57219c8fd7877144af57558fb2ef0368d0087bddeb2e" +checksum = "a474f6281d1d70c17ae7aa6a613c87fce69a127e2624002df63dcb39d6cf6396" dependencies = [ "cfg-if", "once_cell", @@ -2806,13 +2958,12 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.95" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb6dd4d3ca0ddffd1dd1c9c04f94b868c37ff5fac97c30b97cff2d74fce3a358" +checksum = "5f89bb38646b4f81674e8f5c3fb81b562be1fd936d84320f3264486418519c79" dependencies = [ "bumpalo", "log", - "once_cell", "proc-macro2", "quote", "syn", @@ -2821,9 +2972,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.95" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e79384be7f8f5a9dd5d7167216f022090cf1f9ec128e6e6a482a2cb5c5422c56" +checksum = "2cc6181fd9a7492eef6fef1f33961e3695e4579b9872a6f7c83aee556666d4fe" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -2831,9 +2982,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.95" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68" +checksum = "30d7a95b763d3c45903ed6c81f156801839e5ee968bb07e534c44df0fcd330c2" dependencies = [ "proc-macro2", "quote", @@ -2844,9 +2995,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.95" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65fc09f10666a9f147042251e0dda9c18f166ff7de300607007e96bdebc1068d" +checksum = "943aab3fdaaa029a6e0271b35ea10b72b943135afe9bffca82384098ad0e06a6" [[package]] name = "whoami" @@ -3033,6 +3184,42 @@ dependencies = [ "memchr", ] +[[package]] +name = "write16" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" + +[[package]] +name = "writeable" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" + +[[package]] +name = "yoke" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + [[package]] name = "zerocopy" version = "0.7.35" @@ -3054,8 +3241,51 @@ dependencies = [ "syn", ] +[[package]] +name = "zerofrom" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cff3ee08c995dee1859d998dea82f7374f2826091dd9cd47def953cae446cd2e" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + [[package]] name = "zeroize" version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" + +[[package]] +name = "zerovec" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] diff --git a/Cargo.toml b/Cargo.toml index f111084..25697ed 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -9,6 +9,7 @@ members = [ "examples/todo-list", "examples/sessions", "examples/admin", + "examples/json", ] resolver = "2" @@ -16,64 +17,66 @@ resolver = "2" edition = "2021" license = "MIT OR Apache-2.0" +[workspace.lints.clippy] +all = "deny" +pedantic = "warn" + [workspace.dependencies] -anyhow = "1.0.92" +anyhow = "1.0.95" askama = "0.12.1" askama_derive = "0.12.5" askama_parser = "0.2.1" async-stream = "0.3" async-trait = "0.1" -axum = "0.7" +axum = { version = "0.7", default-features = false } backtrace = "0.3" -bytes = "1.7" +bytes = "1.9" cargo_toml = "0.20" -chrono = { version = "0.4", features = ["serde"] } -clap = { version = "4", features = ["derive", "env"] } +chrono = { version = "0.4", default-features = false } +clap = "4" clap-verbosity-flag = "2" convert_case = "0.6" darling = "0.20" derive_builder = "0.20" -derive_more = { version = "1", features = ["full"] } +derive_more = "1" env_logger = "0.11" -fake = { version = "2", features = ["derive", "chrono"] } +fake = "3.1" flareon = { path = "flareon" } flareon_codegen = { path = "flareon-codegen" } flareon_macros = { path = "flareon-macros" } form_urlencoded = "1" -futures = "0.3" -futures-core = "0.3" -futures-util = "0.3" +futures = { version = "0.3", default-features = false } +futures-core = { version = "0.3", default-features = false } +futures-util = { version = "0.3", default-features = false } glob = "0.3" hmac = "0.13.0-pre.4" http = "1.1" http-body = "1" http-body-util = "0.1" indexmap = "2" -itertools = "0.13" log = "0.4" -mime_guess = "2" +mime_guess = { version = "2", default-features = false } mockall = "0.13" -password-auth = "1.1.0-pre.1" +password-auth = { version = "1.1.0-pre.1", default-features = false } pin-project-lite = "0.2" prettyplease = "0.2" proc-macro-crate = "3" -proc-macro2 = "1" -quote = "1" -rand = "0.8" -regex = "1.11" +proc-macro2 = { version = "1", default-features = false } +quote = { version = "1", default-features = false } rustversion = "1" -sea-query = "0.32.0-rc.2" -sea-query-binder = { version = "0.7.0-rc.2", features = ["sqlx-sqlite", "with-chrono", "runtime-tokio"] } +sea-query = { version = "0.32.0-rc.2", default-features = false } +sea-query-binder = { version = "0.7.0-rc.2", default-features = false } serde = "1" +serde_json = "1" sha2 = "0.11.0-pre.4" -slug = "0.1" -sqlx = { version = "0.8", default-features = false, features = ["macros", "json", "runtime-tokio", "sqlite", "chrono"] } -subtle = "2" -syn = { version = "2", features = ["full", "extra-traits"] } +sqlx = { version = "0.8", default-features = false } +subtle = { version = "2", default-features = false } +syn = { version = "2", default-features = false } sync_wrapper = "1" -thiserror = "1" -time = "0.3.35" -tokio = { version = "1.40", features = ["macros", "rt-multi-thread"] } -tower = "0.5.1" -tower-sessions = "0.13" +tempfile = "3" +thiserror = "2" +time = { version = "0.3.35", default-features = false } +tokio = { version = "1.41", default-features = false } +tower = "0.5.2" +tower-sessions = { version = "0.13", default-features = false } trybuild = { version = "1", features = ["diff"] } diff --git a/README.md b/README.md index 938ca76..4597aed 100644 --- a/README.md +++ b/README.md @@ -28,6 +28,24 @@ built on top of [axum](https://github.com/tokio-rs/axum). * **Secure by default** — security should be opt-out, not opt-in. Flareon takes care of making your web apps secure by default, defending it against common modern web vulnerabilities. You can focus on building your app, not securing it. +## Development + +### Testing + +Tests that require using external databases are ignored by default. In order to run them, execute the following in the +root of the repository: + +```shell +docker compose up -d +cargo test --all-features -- --include-ignored +``` + +You can them execute the following command to stop the database: + +```shell +docker compose down +``` + ## License Flareon is licensed under either of the following, at your option: diff --git a/clippy.toml b/clippy.toml new file mode 100644 index 0000000..91c8bb3 --- /dev/null +++ b/clippy.toml @@ -0,0 +1 @@ +doc-valid-idents = ["PostgreSQL", "MySQL", "SQLite"] diff --git a/compose.yml b/compose.yml new file mode 100644 index 0000000..0d10cf6 --- /dev/null +++ b/compose.yml @@ -0,0 +1,20 @@ +services: + mariadb: + image: docker.io/mariadb:11 + container_name: flareon-mariadb + environment: + MARIADB_DATABASE: mysql + MARIADB_USER: flareon + MARIADB_PASSWORD: flareon + MARIADB_ALLOW_EMPTY_ROOT_PASSWORD: 1 + ports: + - "3306:3306" + + postgres: + image: docker.io/postgres:17-alpine + container_name: flareon-postgres + environment: + POSTGRES_USER: flareon + POSTGRES_PASSWORD: flareon + ports: + - "5432:5432" diff --git a/examples/admin/Cargo.toml b/examples/admin/Cargo.toml index 6e74357..2f627c5 100644 --- a/examples/admin/Cargo.toml +++ b/examples/admin/Cargo.toml @@ -6,6 +6,4 @@ description = "Admin panel - Flareon example." edition = "2021" [dependencies] -env_logger = "0.11.5" flareon = { path = "../../flareon" } -tokio = { version = "1.40.0", features = ["macros", "rt-multi-thread"] } diff --git a/examples/admin/src/main.rs b/examples/admin/src/main.rs index acc1b51..4c84ade 100644 --- a/examples/admin/src/main.rs +++ b/examples/admin/src/main.rs @@ -22,7 +22,12 @@ impl FlareonApp for HelloApp { } async fn init(&self, context: &mut AppContext) -> flareon::Result<()> { - DatabaseUser::create_user(context.database(), "admin", "admin").await?; + // TODO use transaction + let user = DatabaseUser::get_by_username(context.database(), "admin").await?; + if user.is_none() { + DatabaseUser::create_user(context.database(), "admin", "admin").await?; + } + Ok(()) } @@ -31,10 +36,8 @@ impl FlareonApp for HelloApp { } } -#[tokio::main] -async fn main() { - env_logger::init(); - +#[flareon::main] +async fn main() -> flareon::Result { let flareon_project = FlareonProject::builder() .config( ProjectConfig::builder() @@ -52,10 +55,7 @@ async fn main() { .middleware_with_context(StaticFilesMiddleware::from_app_context) .middleware(SessionMiddleware::new()) .build() - .await - .unwrap(); + .await?; - flareon::run(flareon_project, "127.0.0.1:8000") - .await - .unwrap(); + Ok(flareon_project) } diff --git a/examples/hello-world/Cargo.toml b/examples/hello-world/Cargo.toml index ea41f5e..42c46ba 100644 --- a/examples/hello-world/Cargo.toml +++ b/examples/hello-world/Cargo.toml @@ -7,4 +7,3 @@ edition = "2021" [dependencies] flareon = { path = "../../flareon" } -tokio = { version = "1.40.0", features = ["macros", "rt-multi-thread"] } diff --git a/examples/hello-world/src/main.rs b/examples/hello-world/src/main.rs index 3d1a69f..1d766af 100644 --- a/examples/hello-world/src/main.rs +++ b/examples/hello-world/src/main.rs @@ -22,15 +22,12 @@ impl FlareonApp for HelloApp { } } -#[tokio::main] -async fn main() { +#[flareon::main] +async fn main() -> flareon::Result { let flareon_project = FlareonProject::builder() .register_app_with_views(HelloApp, "") .build() - .await - .unwrap(); + .await?; - flareon::run(flareon_project, "127.0.0.1:8000") - .await - .unwrap(); + Ok(flareon_project) } diff --git a/examples/json/Cargo.toml b/examples/json/Cargo.toml new file mode 100644 index 0000000..c025e73 --- /dev/null +++ b/examples/json/Cargo.toml @@ -0,0 +1,10 @@ +[package] +name = "example-json" +version = "0.1.0" +publish = false +description = "JSON - Flareon example." +edition = "2021" + +[dependencies] +flareon = { path = "../../flareon" } +serde = "1" diff --git a/examples/json/src/main.rs b/examples/json/src/main.rs new file mode 100644 index 0000000..d547c54 --- /dev/null +++ b/examples/json/src/main.rs @@ -0,0 +1,50 @@ +use flareon::request::{Request, RequestExt}; +use flareon::response::{Response, ResponseExt}; +use flareon::router::{Route, Router}; +use flareon::{FlareonApp, FlareonProject, StatusCode}; +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Clone, Deserialize)] +struct AddRequest { + a: i32, + b: i32, +} + +#[derive(Debug, Clone, Serialize)] +struct AddResponse { + result: i32, +} + +async fn add(mut request: Request) -> flareon::Result { + let add_request: AddRequest = request.json().await?; + let response = AddResponse { + result: add_request.a + add_request.b, + }; + + Response::new_json(StatusCode::OK, &response) +} + +struct AddApp; + +impl FlareonApp for AddApp { + fn name(&self) -> &'static str { + env!("CARGO_PKG_NAME") + } + + fn router(&self) -> Router { + Router::with_urls([Route::with_handler("/", add)]) + } +} + +// Test with: +// curl --header "Content-Type: application/json" --request POST --data '{"a": 123, "b": 456}' 'http://127.0.0.1:8080/' + +#[flareon::main] +async fn main() -> flareon::Result { + let flareon_project = FlareonProject::builder() + .register_app_with_views(AddApp, "") + .build() + .await?; + + Ok(flareon_project) +} diff --git a/examples/sessions/Cargo.toml b/examples/sessions/Cargo.toml index cad9ad3..fb322f0 100644 --- a/examples/sessions/Cargo.toml +++ b/examples/sessions/Cargo.toml @@ -8,4 +8,3 @@ edition = "2021" [dependencies] askama = "0.12.1" flareon = { path = "../../flareon" } -tokio = { version = "1.40.0", features = ["macros", "rt-multi-thread"] } diff --git a/examples/sessions/src/main.rs b/examples/sessions/src/main.rs index 7996f2e..7cc5bc2 100644 --- a/examples/sessions/src/main.rs +++ b/examples/sessions/src/main.rs @@ -82,16 +82,13 @@ impl FlareonApp for HelloApp { } } -#[tokio::main] -async fn main() { +#[flareon::main] +async fn main() -> flareon::Result { let flareon_project = FlareonProject::builder() .register_app_with_views(HelloApp, "") .middleware(SessionMiddleware::new()) .build() - .await - .unwrap(); + .await?; - flareon::run(flareon_project, "127.0.0.1:8000") - .await - .unwrap(); + Ok(flareon_project) } diff --git a/examples/todo-list/Cargo.toml b/examples/todo-list/Cargo.toml index 1f441a2..30347be 100644 --- a/examples/todo-list/Cargo.toml +++ b/examples/todo-list/Cargo.toml @@ -8,5 +8,3 @@ edition = "2021" [dependencies] askama = "0.12.1" flareon = { path = "../../flareon" } -tokio = { version = "1.40.0", features = ["macros", "rt-multi-thread"] } -env_logger = "0.11.5" diff --git a/examples/todo-list/src/main.rs b/examples/todo-list/src/main.rs index c599ffb..4d0c2ae 100644 --- a/examples/todo-list/src/main.rs +++ b/examples/todo-list/src/main.rs @@ -1,14 +1,14 @@ mod migrations; use askama::Template; -use flareon::db::migrations::MigrationEngine; -use flareon::db::{model, query, Database, Model}; +use flareon::config::{DatabaseConfig, ProjectConfig}; +use flareon::db::migrations::DynMigration; +use flareon::db::{model, query, Model}; use flareon::forms::Form; use flareon::request::{Request, RequestExt}; use flareon::response::{Response, ResponseExt}; use flareon::router::{Route, Router}; use flareon::{reverse, Body, FlareonApp, FlareonProject, StatusCode}; -use tokio::sync::OnceCell; #[derive(Debug, Clone)] #[model] @@ -24,12 +24,8 @@ struct IndexTemplate<'a> { todo_items: Vec, } -static DB: OnceCell = OnceCell::const_new(); - async fn index(request: Request) -> flareon::Result { - let db = DB.get().unwrap(); - - let todo_items = TodoItem::objects().all(db).await?; + let todo_items = TodoItem::objects().all(request.db()).await?; let index_template = IndexTemplate { request: &request, todo_items, @@ -49,12 +45,11 @@ async fn add_todo(mut request: Request) -> flareon::Result { let todo_form = TodoForm::from_request(&mut request).await?.unwrap(); { - let db = DB.get().unwrap(); TodoItem { id: 0, title: todo_form.title, } - .save(db) + .save(request.db()) .await?; } @@ -69,8 +64,9 @@ async fn remove_todo(request: Request) -> flareon::Result { let todo_id = todo_id.parse::().expect("todo_id is not a number"); { - let db = DB.get().unwrap(); - query!(TodoItem, $id == todo_id).delete(db).await?; + query!(TodoItem, $id == todo_id) + .delete(request.db()) + .await?; } Ok(reverse!(request, "index")) @@ -83,6 +79,16 @@ impl FlareonApp for TodoApp { "todo-app" } + fn migrations(&self) -> Vec> { + // TODO: this is way too complicated for the user-facing API + #[allow(trivial_casts)] + migrations::MIGRATIONS + .iter() + .copied() + .map(|x| Box::new(x) as Box) + .collect() + } + fn router(&self) -> Router { Router::with_urls([ Route::with_handler_and_name("/", index, "index"), @@ -92,23 +98,22 @@ impl FlareonApp for TodoApp { } } -#[tokio::main] -async fn main() { - env_logger::init(); - - let db = DB - .get_or_init(|| async { Database::new("sqlite::memory:").await.unwrap() }) - .await; - MigrationEngine::new(migrations::MIGRATIONS.iter().copied()) - .run(db) - .await - .unwrap(); - +#[flareon::main] +async fn main() -> flareon::Result { let todo_project = FlareonProject::builder() + .config( + ProjectConfig::builder() + .database_config( + DatabaseConfig::builder() + .url("sqlite::memory:") + .build() + .unwrap(), + ) + .build(), + ) .register_app_with_views(TodoApp, "") .build() - .await - .unwrap(); + .await?; - flareon::run(todo_project, "127.0.0.1:8080").await.unwrap(); + Ok(todo_project) } diff --git a/examples/todo-list/src/migrations/m_0001_initial.rs b/examples/todo-list/src/migrations/m_0001_initial.rs index f1ff280..7ae4043 100644 --- a/examples/todo-list/src/migrations/m_0001_initial.rs +++ b/examples/todo-list/src/migrations/m_0001_initial.rs @@ -5,6 +5,7 @@ pub(super) struct Migration; impl ::flareon::db::migrations::Migration for Migration { const APP_NAME: &'static str = "example-todo-list"; const MIGRATION_NAME: &'static str = "m_0001_initial"; + const DEPENDENCIES: &'static [::flareon::db::migrations::MigrationDependency] = &[]; const OPERATIONS: &'static [::flareon::db::migrations::Operation] = &[::flareon::db::migrations::Operation::create_model() .table_name(::flareon::db::Identifier::new("todo_item")) diff --git a/flareon-cli/Cargo.toml b/flareon-cli/Cargo.toml index a68aab3..58b6180 100644 --- a/flareon-cli/Cargo.toml +++ b/flareon-cli/Cargo.toml @@ -5,11 +5,14 @@ edition.workspace = true license.workspace = true description = "Modern web framework focused on speed and ease of use - CLI tool." +[lints] +workspace = true + [dependencies] anyhow.workspace = true cargo_toml.workspace = true chrono.workspace = true -clap.workspace = true +clap = { workspace = true, features = ["derive", "env"] } clap-verbosity-flag.workspace = true darling.workspace = true env_logger.workspace = true @@ -21,3 +24,7 @@ prettyplease.workspace = true proc-macro2 = { workspace = true, features = ["span-locations"] } quote.workspace = true syn.workspace = true + +[dev-dependencies] +tempfile.workspace = true +trybuild.workspace = true diff --git a/flareon-cli/src/lib.rs b/flareon-cli/src/lib.rs new file mode 100644 index 0000000..4b4a2e1 --- /dev/null +++ b/flareon-cli/src/lib.rs @@ -0,0 +1,2 @@ +pub mod migration_generator; +mod utils; diff --git a/flareon-cli/src/main.rs b/flareon-cli/src/main.rs index a6d1f90..663cb09 100644 --- a/flareon-cli/src/main.rs +++ b/flareon-cli/src/main.rs @@ -7,7 +7,7 @@ use anyhow::Context; use clap::{Parser, Subcommand}; use clap_verbosity_flag::Verbosity; -use crate::migration_generator::make_migrations; +use crate::migration_generator::{make_migrations, MigrationGeneratorOptions}; #[derive(Debug, Parser)] #[command(version, about, long_about = None)] @@ -20,7 +20,18 @@ struct Cli { #[derive(Debug, Subcommand)] enum Commands { - MakeMigrations { path: Option }, + MakeMigrations { + /// Path to the crate directory to generate migrations for (default: + /// current directory) + path: Option, + /// Name of the app to use in the migration (default: crate name) + #[arg(long)] + app_name: Option, + /// Directory to write the migrations to (default: migrations/ directory + /// in the crate's src/ directory) + #[arg(long)] + output_dir: Option, + }, } fn main() -> anyhow::Result<()> { @@ -31,9 +42,17 @@ fn main() -> anyhow::Result<()> { .init(); match cli.command { - Commands::MakeMigrations { path } => { + Commands::MakeMigrations { + path, + app_name, + output_dir, + } => { let path = path.unwrap_or_else(|| PathBuf::from(".")); - make_migrations(&path).with_context(|| "unable to create migrations")?; + let options = MigrationGeneratorOptions { + app_name, + output_dir, + }; + make_migrations(&path, options).with_context(|| "unable to create migrations")?; } } diff --git a/flareon-cli/src/migration_generator.rs b/flareon-cli/src/migration_generator.rs index 5df9690..7b2e976 100644 --- a/flareon-cli/src/migration_generator.rs +++ b/flareon-cli/src/migration_generator.rs @@ -1,23 +1,23 @@ use std::collections::{HashMap, HashSet}; use std::error::Error; -use std::fmt::{Debug, Display, Formatter}; +use std::fmt::{Debug, Display}; use std::fs::File; use std::io::{Read, Write}; use std::path::{Path, PathBuf}; use anyhow::{bail, Context}; use cargo_toml::Manifest; -use darling::{FromDeriveInput, FromMeta}; +use darling::FromMeta; use flareon::db::migrations::{DynMigration, MigrationEngine}; use flareon_codegen::model::{Field, Model, ModelArgs, ModelOpts, ModelType}; -use log::{debug, info}; +use log::{debug, info, warn}; use proc_macro2::TokenStream; use quote::{format_ident, quote}; -use syn::{parse_quote, Attribute, ItemStruct, Meta}; +use syn::{parse_quote, Attribute, Meta, UseTree}; use crate::utils::find_cargo_toml; -pub fn make_migrations(path: &Path) -> anyhow::Result<()> { +pub fn make_migrations(path: &Path, options: MigrationGeneratorOptions) -> anyhow::Result<()> { match find_cargo_toml( &path .canonicalize() @@ -31,8 +31,8 @@ pub fn make_migrations(path: &Path) -> anyhow::Result<()> { .with_context(|| "unable to find package in Cargo.toml")? .name; - MigrationGenerator::new(cargo_toml_path, crate_name) - .generate_migrations() + MigrationGenerator::new(cargo_toml_path, crate_name, options) + .generate_and_write_migrations() .with_context(|| "unable to generate migrations")?; } None => { @@ -43,38 +43,68 @@ pub fn make_migrations(path: &Path) -> anyhow::Result<()> { Ok(()) } +#[derive(Debug, Clone, Default)] +pub struct MigrationGeneratorOptions { + pub app_name: Option, + pub output_dir: Option, +} + #[derive(Debug)] -struct MigrationGenerator { +pub struct MigrationGenerator { cargo_toml_path: PathBuf, crate_name: String, + options: MigrationGeneratorOptions, } impl MigrationGenerator { #[must_use] - fn new(cargo_toml_path: PathBuf, crate_name: String) -> Self { + pub fn new( + cargo_toml_path: PathBuf, + crate_name: String, + options: MigrationGeneratorOptions, + ) -> Self { Self { cargo_toml_path, crate_name, + options, } } - fn generate_migrations(&mut self) -> anyhow::Result<()> { - let source_file_paths = self.find_source_files()?; - let AppState { models, migrations } = self.process_source_files(&source_file_paths)?; - let migration_processor = MigrationProcessor::new(migrations); + fn generate_and_write_migrations(&mut self) -> anyhow::Result<()> { + let source_files = self.get_source_files()?; + + if let Some(migration) = self.generate_migrations(source_files)? { + self.write_migration(migration)?; + } + + Ok(()) + } + + pub fn generate_migrations( + &mut self, + source_files: Vec, + ) -> anyhow::Result> { + let AppState { models, migrations } = self.process_source_files(source_files)?; + let migration_processor = MigrationProcessor::new(migrations)?; let migration_models = migration_processor.latest_models(); let (modified_models, operations) = self.generate_operations(&models, &migration_models); - if !operations.is_empty() { - self.generate_migration_file( - &migration_processor.next_migration_name()?, + + if operations.is_empty() { + Ok(None) + } else { + let migration_name = migration_processor.next_migration_name()?; + let dependencies = migration_processor.dependencies(); + let content = self.generate_migration_file_content( + &migration_name, &modified_models, + dependencies, operations, - )?; + ); + Ok(Some(MigrationToWrite::new(migration_name, content))) } - Ok(()) } - fn find_source_files(&self) -> anyhow::Result> { + fn get_source_files(&mut self) -> anyhow::Result> { let src_dir = self .cargo_toml_path .parent() @@ -84,44 +114,76 @@ impl MigrationGenerator { .canonicalize() .with_context(|| "unable to canonicalize src dir")?; - let mut source_files = Vec::new(); + let source_file_paths = Self::find_source_files(&src_dir)?; + let source_files = source_file_paths + .into_iter() + .map(|path| { + Self::parse_file(&src_dir, path.clone()) + .with_context(|| format!("unable to parse file: {path:?}")) + }) + .collect::>>()?; + Ok(source_files) + } + + fn find_source_files(src_dir: &Path) -> anyhow::Result> { + let mut paths = Vec::new(); for entry in glob::glob(src_dir.join("**/*.rs").to_str().unwrap()) .with_context(|| "unable to find Rust source files with glob")? { let path = entry?; - source_files.push(path); + paths.push( + path.strip_prefix(src_dir) + .expect("path must be in src dir") + .to_path_buf(), + ); } - Ok(source_files) + Ok(paths) } - fn process_source_files(&self, paths: &Vec) -> anyhow::Result { + fn process_source_files(&self, source_files: Vec) -> anyhow::Result { let mut app_state = AppState::new(); - for path in paths { - self.process_file(path, &mut app_state) + for source_file in source_files { + let path = source_file.path.clone(); + self.process_parsed_file(source_file, &mut app_state) .with_context(|| format!("unable to find models in file: {path:?}"))?; } Ok(app_state) } - fn process_file(&self, path: &PathBuf, app_state: &mut AppState) -> anyhow::Result<()> { - debug!("Parsing file: {:?}", path); - let mut file = File::open(path).with_context(|| "unable to open file")?; + fn parse_file(src_dir: &Path, path: PathBuf) -> anyhow::Result { + let full_path = src_dir.join(&path); + debug!("Parsing file: {:?}", &full_path); + let mut file = File::open(&full_path).with_context(|| "unable to open file")?; let mut src = String::new(); file.read_to_string(&mut src) - .with_context(|| format!("unable to read file: {path:?}"))?; + .with_context(|| format!("unable to read file: {full_path:?}"))?; - let syntax = syn::parse_file(&src).with_context(|| "unable to parse file")?; + SourceFile::parse(path, &src) + } + + fn process_parsed_file( + &self, + SourceFile { + path, + content: file, + }: SourceFile, + app_state: &mut AppState, + ) -> anyhow::Result<()> { + let imports = Self::get_imports(&file, &ModulePath::from_fs_path(&path)); + let import_resolver = SymbolResolver::new(imports); let mut migration_models = Vec::new(); - for item in syntax.items { - if let syn::Item::Struct(item) = item { - for attr in &item.attrs { + for item in file.items { + if let syn::Item::Struct(mut item) = item { + for attr in &item.attrs.clone() { if is_model_attr(attr) { - let args = Self::args_from_attr(path, attr)?; + import_resolver.resolve_struct(&mut item); + + let args = Self::args_from_attr(&path, attr)?; let model_in_source = ModelInSource::from_item(item, &args)?; match args.model_type { @@ -152,6 +214,29 @@ impl MigrationGenerator { Ok(()) } + /// Return the list of top-level `use` statements, structs, and constants as + /// a list of [`VisibleSymbol`]s from the file. + fn get_imports(file: &syn::File, module_path: &ModulePath) -> Vec { + let mut imports = Vec::new(); + + for item in &file.items { + match item { + syn::Item::Use(item) => { + imports.append(&mut VisibleSymbol::from_item_use(item, module_path)); + } + syn::Item::Struct(item_struct) => { + imports.push(VisibleSymbol::from_item_struct(item_struct, module_path)); + } + syn::Item::Const(item_const) => { + imports.push(VisibleSymbol::from_item_const(item_const, module_path)); + } + _ => {} + } + } + + imports + } + fn args_from_attr(path: &Path, attr: &Attribute) -> Result { match attr.meta { Meta::Path(_) => { @@ -313,18 +398,23 @@ impl MigrationGenerator { todo!() } - fn generate_migration_file( + fn generate_migration_file_content( &self, migration_name: &str, modified_models: &[ModelInSource], + dependencies: Vec, operations: Vec, - ) -> anyhow::Result<()> { + ) -> String { let operations: Vec<_> = operations .into_iter() .map(|operation| operation.repr()) .collect(); + let dependencies: Vec<_> = dependencies + .into_iter() + .map(|dependency| dependency.repr()) + .collect(); - let app_name = &self.crate_name; + let app_name = self.options.app_name.as_ref().unwrap_or(&self.crate_name); let migration_def = quote! { #[derive(Debug, Copy, Clone)] pub(super) struct Migration; @@ -332,6 +422,9 @@ impl MigrationGenerator { impl ::flareon::db::migrations::Migration for Migration { const APP_NAME: &'static str = #app_name; const MIGRATION_NAME: &'static str = #migration_name; + const DEPENDENCIES: &'static [::flareon::db::migrations::MigrationDependency] = &[ + #(#dependencies,)* + ]; const OPERATIONS: &'static [::flareon::db::migrations::Operation] = &[ #(#operations,)* ]; @@ -346,14 +439,17 @@ impl MigrationGenerator { #(#models)* }; - let migration_path = self - .cargo_toml_path - .parent() - .unwrap() - .join("src") - .join("migrations"); - let migration_file = migration_path.join(format!("{migration_name}.rs")); - let migration_content = Self::generate_migration(migration_def, models_def); + Self::generate_migration(migration_def, models_def) + } + + fn write_migration(&self, migration: MigrationToWrite) -> anyhow::Result<()> { + let src_path = self + .options + .output_dir + .clone() + .unwrap_or(self.cargo_toml_path.parent().unwrap().join("src")); + let migration_path = src_path.join("migrations"); + let migration_file = migration_path.join(format!("{}.rs", migration.name)); std::fs::create_dir_all(&migration_path).with_context(|| { format!( @@ -368,10 +464,9 @@ impl MigrationGenerator { migration_file.display() ) })?; - file.write_all(migration_content.as_bytes()) + file.write_all(migration.content.as_bytes()) .with_context(|| "unable to write migration file")?; info!("Generated migration: {}", migration_file.display()); - Ok(()) } @@ -410,6 +505,30 @@ impl MigrationGenerator { } } +#[derive(Debug, Clone)] +pub struct SourceFile { + path: PathBuf, + content: syn::File, +} + +impl SourceFile { + #[must_use] + pub fn new(path: PathBuf, content: syn::File) -> Self { + assert!( + path.is_relative(), + "path must be relative to the src directory" + ); + Self { path, content } + } + + pub fn parse(path: PathBuf, content: &str) -> anyhow::Result { + Ok(Self::new( + path, + syn::parse_file(content).with_context(|| "unable to parse file")?, + )) + } +} + #[derive(Debug, Clone)] struct AppState { /// All the application models found in the source @@ -428,6 +547,290 @@ impl AppState { } } +/// Represents a symbol visible in the current module. This might mean there is +/// a `use` statement for a given type, but also, for instance, the type is +/// defined in the current module. +/// +/// For instance, for `use std::collections::HashMap;` the `VisibleSymbol ` +/// would be: +/// ```ignore +/// # /* +/// VisibleSymbol { +/// alias: "HashMap", +/// full_path: "std::collections::HashMap", +/// kind: VisibleSymbolKind::Use, +/// } +/// # */ +/// ``` +#[derive(Debug, Clone, PartialEq, Eq)] +struct VisibleSymbol { + alias: String, + full_path: String, + kind: VisibleSymbolKind, +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] +enum VisibleSymbolKind { + Use, + Struct, + Const, +} + +impl VisibleSymbol { + #[must_use] + fn new(alias: &str, full_path: &str, kind: VisibleSymbolKind) -> Self { + Self { + alias: alias.to_string(), + full_path: full_path.to_string(), + kind, + } + } + + fn full_path_parts(&self) -> impl Iterator { + self.full_path.split("::") + } + + fn new_use(alias: &str, full_path: &str) -> Self { + Self::new(alias, full_path, VisibleSymbolKind::Use) + } + + fn from_item_use(item: &syn::ItemUse, module_path: &ModulePath) -> Vec { + Self::from_tree(&item.tree, module_path) + } + + fn from_item_struct(item: &syn::ItemStruct, module_path: &ModulePath) -> Self { + let ident = item.ident.to_string(); + let full_path = Self::module_path(module_path, &ident); + + Self { + alias: ident, + full_path, + kind: VisibleSymbolKind::Struct, + } + } + + fn from_item_const(item: &syn::ItemConst, module_path: &ModulePath) -> Self { + let ident = item.ident.to_string(); + let full_path = Self::module_path(module_path, &ident); + + Self { + alias: ident, + full_path, + kind: VisibleSymbolKind::Const, + } + } + + fn module_path(module_path: &ModulePath, ident: &str) -> String { + format!("{module_path}::{ident}") + } + + fn from_tree(tree: &UseTree, current_module: &ModulePath) -> Vec { + match tree { + UseTree::Path(path) => { + let ident = path.ident.to_string(); + let resolved_path = if ident == "crate" { + current_module.crate_name().to_string() + } else if ident == "self" { + current_module.to_string() + } else if ident == "super" { + current_module.parent().to_string() + } else { + ident + }; + + return Self::from_tree(&path.tree, current_module) + .into_iter() + .map(|import| { + Self::new_use( + &import.alias, + &format!("{}::{}", resolved_path, import.full_path), + ) + }) + .collect(); + } + UseTree::Name(name) => { + let ident = name.ident.to_string(); + return vec![Self::new_use(&ident, &ident)]; + } + UseTree::Rename(rename) => { + return vec![Self::new_use( + &rename.rename.to_string(), + &rename.ident.to_string(), + )]; + } + UseTree::Glob(_) => { + warn!("Glob imports are not supported"); + } + UseTree::Group(group) => { + return group + .items + .iter() + .flat_map(|tree| Self::from_tree(tree, current_module)) + .collect(); + } + } + + vec![] + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +struct ModulePath { + parts: Vec, +} + +impl ModulePath { + #[must_use] + fn from_fs_path(path: &Path) -> Self { + let mut parts = vec![String::from("crate")]; + + if path == Path::new("lib.rs") || path == Path::new("main.rs") { + return Self { parts }; + } + + parts.append( + &mut path + .components() + .map(|c| { + let component_str = c.as_os_str().to_string_lossy(); + component_str + .strip_suffix(".rs") + .unwrap_or(&component_str) + .to_string() + }) + .collect::>(), + ); + + if parts + .last() + .expect("parts must have at least one component") + == "mod" + { + parts.pop(); + } + + Self { parts } + } + + #[must_use] + fn parent(&self) -> Self { + let mut parts = self.parts.clone(); + parts.pop(); + Self { parts } + } + + #[must_use] + fn crate_name(&self) -> &str { + &self.parts[0] + } +} + +impl Display for ModulePath { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.parts.join("::")) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +struct SymbolResolver { + /// List of imports in the format `"HashMap" -> VisibleSymbol` + symbols: HashMap, +} + +impl SymbolResolver { + #[must_use] + fn new(symbols: Vec) -> Self { + let mut symbol_map = HashMap::new(); + for symbol in symbols { + symbol_map.insert(symbol.alias.clone(), symbol); + } + + Self { + symbols: symbol_map, + } + } + + fn resolve_struct(&self, item: &mut syn::ItemStruct) { + for field in &mut item.fields { + if let syn::Type::Path(path) = &mut field.ty { + self.resolve(path); + } + } + } + + /// Checks the provided `TypePath` and resolves the full type path, if + /// available. + fn resolve(&self, path: &mut syn::TypePath) { + let first_segment = path.path.segments.first(); + + if let Some(first_segment) = first_segment { + if let Some(symbol) = self.symbols.get(&first_segment.ident.to_string()) { + let mut new_segments: Vec<_> = symbol + .full_path_parts() + .map(|s| syn::PathSegment { + ident: syn::Ident::new(s, first_segment.ident.span()), + arguments: syn::PathArguments::None, + }) + .collect(); + + let first_arguments = first_segment.arguments.clone(); + new_segments + .last_mut() + .expect("new_segments must have at least one element") + .arguments = first_arguments; + + new_segments.extend(path.path.segments.iter().skip(1).cloned()); + path.path.segments = syn::punctuated::Punctuated::from_iter(new_segments); + } + + for segment in &mut path.path.segments { + self.resolve_path_arguments(&mut segment.arguments); + } + } + } + + fn resolve_path_arguments(&self, arguments: &mut syn::PathArguments) { + if let syn::PathArguments::AngleBracketed(args) = arguments { + for arg in &mut args.args { + self.resolve_generic_argument(arg); + } + } + } + + fn resolve_generic_argument(&self, arg: &mut syn::GenericArgument) { + if let syn::GenericArgument::Type(syn::Type::Path(path)) = arg { + if let Some(new_arg) = self.try_resolve_generic_const(path) { + *arg = new_arg; + } else { + self.resolve(path); + } + } + } + + fn try_resolve_generic_const(&self, path: &syn::TypePath) -> Option { + if path.qself.is_none() && path.path.segments.len() == 1 { + let segment = path + .path + .segments + .first() + .expect("segments have exactly one element"); + if segment.arguments.is_none() { + let ident = segment.ident.to_string(); + if let Some(symbol) = self.symbols.get(&ident) { + if symbol.kind == VisibleSymbolKind::Const { + let path = &symbol.full_path; + return Some(syn::GenericArgument::Const( + syn::parse_str(path).expect("full_path should be a valid path"), + )); + } + } + } + } + + None + } +} + /// Helper struct to process already existing migrations. #[derive(Debug, Clone)] struct MigrationProcessor { @@ -435,10 +838,9 @@ struct MigrationProcessor { } impl MigrationProcessor { - #[must_use] - fn new(mut migrations: Vec) -> Self { - MigrationEngine::sort_migrations(&mut migrations); - Self { migrations } + fn new(mut migrations: Vec) -> anyhow::Result { + MigrationEngine::sort_migrations(&mut migrations)?; + Ok(Self { migrations }) } /// Returns the latest (in the order of applying migrations) versions of the @@ -483,18 +885,30 @@ impl MigrationProcessor { Ok(format!("m_{migration_number:04}_auto_{date_time}")) } + + fn dependencies(&self) -> Vec { + if self.migrations.is_empty() { + return Vec::new(); + } + + let last_migration = self.migrations.last().unwrap(); + vec![DynDependency::Migration { + app: last_migration.app_name.clone(), + migration: last_migration.name.clone(), + }] + } } #[derive(Debug, Clone, PartialEq, Eq)] struct ModelInSource { - model_item: ItemStruct, + model_item: syn::ItemStruct, model: Model, } impl ModelInSource { - fn from_item(item: ItemStruct, args: &ModelArgs) -> anyhow::Result { + fn from_item(item: syn::ItemStruct, args: &ModelArgs) -> anyhow::Result { let input: syn::DeriveInput = item.clone().into(); - let opts = ModelOpts::from_derive_input(&input) + let opts = ModelOpts::new_from_derive_input(&input) .map_err(|e| anyhow::anyhow!("cannot parse model: {}", e))?; let model = opts.as_model(args)?; @@ -505,6 +919,19 @@ impl ModelInSource { } } +#[derive(Debug, Clone)] +pub struct MigrationToWrite { + pub name: String, + pub content: String, +} + +impl MigrationToWrite { + #[must_use] + pub fn new(name: String, content: String) -> Self { + Self { name, content } + } +} + #[must_use] fn is_model_attr(attr: &syn::Attribute) -> bool { let path = attr.path(); @@ -533,6 +960,10 @@ impl Repr for Field { if self.primary_key { tokens = quote! { #tokens.primary_key() } } + tokens = quote! { #tokens.set_null(<#ty as ::flareon::db::DatabaseField>::NULLABLE) }; + if self.unique { + tokens = quote! { #tokens.unique() } + } tokens } } @@ -553,11 +984,42 @@ impl DynMigration for Migration { &self.name } + fn dependencies(&self) -> &[flareon::db::migrations::MigrationDependency] { + &[] + } + fn operations(&self) -> &[flareon::db::migrations::Operation] { &[] } } +/// A version of [`flareon::db::migrations::MigrationDependency`] that can be +/// created at runtime and is using codegen types. +/// +/// This is used to generate migration files. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +enum DynDependency { + Migration { app: String, migration: String }, + Model { app: String, model_name: String }, +} + +impl Repr for DynDependency { + fn repr(&self) -> TokenStream { + match self { + Self::Migration { app, migration } => { + quote! { + ::flareon::db::migrations::MigrationDependency::migration(#app, #migration) + } + } + Self::Model { app, model_name } => { + quote! { + ::flareon::db::migrations::MigrationDependency::model(#app, #model_name) + } + } + } + } +} + /// A version of [`flareon::db::migrations::Operation`] that can be created at /// runtime and is using codegen types. /// @@ -625,7 +1087,7 @@ impl ParsingError { } impl Display for ParsingError { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.message)?; if let Some(source) = &self.source { write!(f, "\n{source}")?; @@ -636,3 +1098,190 @@ impl Display for ParsingError { } impl Error for ParsingError {} + +#[cfg(test)] +mod tests { + use quote::ToTokens; + + use super::*; + + #[test] + fn migration_processor_next_migration_name_empty() { + let migrations = vec![]; + let processor = MigrationProcessor::new(migrations).unwrap(); + + let next_migration_name = processor.next_migration_name().unwrap(); + assert_eq!(next_migration_name, "m_0001_initial"); + } + + #[test] + fn migration_processor_dependencies_empty() { + let migrations = vec![]; + let processor = MigrationProcessor::new(migrations).unwrap(); + + let next_migration_name = processor.dependencies(); + assert_eq!(next_migration_name, vec![]); + } + + #[test] + fn migration_processor_dependencies_previous() { + let migrations = vec![Migration { + app_name: "app1".to_string(), + name: "m0001_initial".to_string(), + models: vec![], + }]; + let processor = MigrationProcessor::new(migrations).unwrap(); + + let next_migration_name = processor.dependencies(); + assert_eq!( + next_migration_name, + vec![DynDependency::Migration { + app: "app1".to_string(), + migration: "m0001_initial".to_string(), + }] + ); + } + + #[test] + fn imports() { + let source = r" +use std::collections::HashMap; +use std::error::Error as StdError; +use std::fmt::{Debug, Display, Formatter}; +use std::fs::*; +use rand as r; +use super::MyModel; +use crate::MyOtherModel; +use self::MyThirdModel; + +struct MyFourthModel {} + +const MY_CONSTANT: u8 = 42; + "; + + let file = SourceFile::parse(PathBuf::from("foo/bar.rs").clone(), source).unwrap(); + let imports = + MigrationGenerator::get_imports(&file.content, &ModulePath::from_fs_path(&file.path)); + + let expected = vec![ + VisibleSymbol { + alias: "HashMap".to_string(), + full_path: "std::collections::HashMap".to_string(), + kind: VisibleSymbolKind::Use, + }, + VisibleSymbol { + alias: "StdError".to_string(), + full_path: "std::error::Error".to_string(), + kind: VisibleSymbolKind::Use, + }, + VisibleSymbol { + alias: "Debug".to_string(), + full_path: "std::fmt::Debug".to_string(), + kind: VisibleSymbolKind::Use, + }, + VisibleSymbol { + alias: "Display".to_string(), + full_path: "std::fmt::Display".to_string(), + kind: VisibleSymbolKind::Use, + }, + VisibleSymbol { + alias: "Formatter".to_string(), + full_path: "std::fmt::Formatter".to_string(), + kind: VisibleSymbolKind::Use, + }, + VisibleSymbol { + alias: "r".to_string(), + full_path: "rand".to_string(), + kind: VisibleSymbolKind::Use, + }, + VisibleSymbol { + alias: "MyModel".to_string(), + full_path: "crate::foo::MyModel".to_string(), + kind: VisibleSymbolKind::Use, + }, + VisibleSymbol { + alias: "MyOtherModel".to_string(), + full_path: "crate::MyOtherModel".to_string(), + kind: VisibleSymbolKind::Use, + }, + VisibleSymbol { + alias: "MyThirdModel".to_string(), + full_path: "crate::foo::bar::MyThirdModel".to_string(), + kind: VisibleSymbolKind::Use, + }, + VisibleSymbol { + alias: "MyFourthModel".to_string(), + full_path: "crate::foo::bar::MyFourthModel".to_string(), + kind: VisibleSymbolKind::Struct, + }, + VisibleSymbol { + alias: "MY_CONSTANT".to_string(), + full_path: "crate::foo::bar::MY_CONSTANT".to_string(), + kind: VisibleSymbolKind::Const, + }, + ]; + assert_eq!(imports, expected); + } + + #[test] + fn import_resolver() { + let resolver = SymbolResolver::new(vec![ + VisibleSymbol::new_use("MyType", "crate::models::MyType"), + VisibleSymbol::new_use("HashMap", "std::collections::HashMap"), + ]); + + let path = &mut parse_quote!(MyType); + resolver.resolve(path); + assert_eq!( + quote!(crate::models::MyType).to_string(), + path.into_token_stream().to_string() + ); + + let path = &mut parse_quote!(HashMap); + resolver.resolve(path); + assert_eq!( + quote!(std::collections::HashMap).to_string(), + path.into_token_stream().to_string() + ); + + let path = &mut parse_quote!(Option); + resolver.resolve(path); + assert_eq!( + quote!(Option).to_string(), + path.into_token_stream().to_string() + ); + } + + #[test] + fn import_resolver_resolve_struct() { + let resolver = SymbolResolver::new(vec![ + VisibleSymbol::new_use("MyType", "crate::models::MyType"), + VisibleSymbol::new_use("HashMap", "std::collections::HashMap"), + VisibleSymbol::new_use("LimitedString", "flareon::db::LimitedString"), + VisibleSymbol::new( + "MY_CONSTANT", + "crate::constants::MY_CONSTANT", + VisibleSymbolKind::Const, + ), + ]); + + let mut actual = parse_quote! { + struct Example { + field_1: MyType, + field_2: HashMap, + field_3: Option, + field_4: LimitedString, + } + }; + resolver.resolve_struct(&mut actual); + let expected = quote! { + struct Example { + field_1: crate::models::MyType, + field_2: std::collections::HashMap, + field_3: Option, + field_4: flareon::db::LimitedString<{ crate::constants::MY_CONSTANT }>, + } + }; + assert_eq!(actual.into_token_stream().to_string(), expected.to_string()); + } +} diff --git a/flareon-cli/tests/migration_generator.rs b/flareon-cli/tests/migration_generator.rs new file mode 100644 index 0000000..fadbc33 --- /dev/null +++ b/flareon-cli/tests/migration_generator.rs @@ -0,0 +1,43 @@ +use std::path::PathBuf; + +use flareon_cli::migration_generator::{ + MigrationGenerator, MigrationGeneratorOptions, MigrationToWrite, SourceFile, +}; + +/// Test that the migration generator can generate a create model migration for +/// a given model which compiles successfully. +#[test] +#[cfg_attr(miri, ignore)] // unsupported operation: extern static `pidfd_spawnp` is not supported by Miri +fn create_model_compile_test() { + let mut generator = MigrationGenerator::new( + PathBuf::from("Cargo.toml"), + String::from("my_crate"), + MigrationGeneratorOptions::default(), + ); + let src = include_str!("migration_generator/create_model.rs"); + let source_files = vec![SourceFile::parse(PathBuf::from("main.rs"), src).unwrap()]; + + let migration_opt = generator.generate_migrations(source_files).unwrap(); + let MigrationToWrite { + name: migration_name, + content: migration_content, + } = migration_opt.unwrap(); + + let source_with_migrations = format!( + r" +{src} + +mod migrations {{ + mod {migration_name} {{ + {migration_content} + }} +}}" + ); + + let temp_dir = tempfile::tempdir().unwrap(); + let test_path = temp_dir.path().join("main.rs"); + std::fs::write(&test_path, source_with_migrations).unwrap(); + + let t = trybuild::TestCases::new(); + t.pass(&test_path); +} diff --git a/flareon-cli/tests/migration_generator/create_model.rs b/flareon-cli/tests/migration_generator/create_model.rs new file mode 100644 index 0000000..a249d4d --- /dev/null +++ b/flareon-cli/tests/migration_generator/create_model.rs @@ -0,0 +1,11 @@ +use flareon::db::{model, LimitedString}; + +pub const FIELD_LEN: u32 = 64; + +#[model] +struct MyModel { + field_1: String, + field_2: LimitedString, +} + +fn main() {} diff --git a/flareon-codegen/Cargo.toml b/flareon-codegen/Cargo.toml index 917de09..84f0ca4 100644 --- a/flareon-codegen/Cargo.toml +++ b/flareon-codegen/Cargo.toml @@ -5,6 +5,9 @@ edition.workspace = true license.workspace = true description = "Modern web framework focused on speed and ease of use - code generation utils." +[lints] +workspace = true + [dependencies] convert_case.workspace = true darling.workspace = true diff --git a/flareon-codegen/src/model.rs b/flareon-codegen/src/model.rs index 8a77843..a9e4e8f 100644 --- a/flareon-codegen/src/model.rs +++ b/flareon-codegen/src/model.rs @@ -23,10 +23,22 @@ pub enum ModelType { #[darling(forward_attrs(allow, doc, cfg), supports(struct_named))] pub struct ModelOpts { pub ident: syn::Ident, + pub generics: syn::Generics, pub data: darling::ast::Data, } impl ModelOpts { + pub fn new_from_derive_input(input: &syn::DeriveInput) -> Result { + let opts = Self::from_derive_input(input)?; + if !opts.generics.params.is_empty() { + return Err( + darling::Error::custom("generics in models are not supported") + .with_span(&opts.generics), + ); + } + Ok(opts) + } + /// Get the fields of the struct. /// /// # Panics @@ -79,10 +91,11 @@ impl ModelOpts { } #[derive(Debug, Clone, FromField)] -#[darling(attributes(form))] +#[darling(attributes(model))] pub struct FieldOpts { pub ident: Option, pub ty: syn::Type, + pub unique: darling::util::Flag, } impl FieldOpts { @@ -108,6 +121,7 @@ impl FieldOpts { auto_value: is_auto, primary_key: is_primary_key, null: false, + unique: self.unique.is_present(), } } } @@ -136,4 +150,89 @@ pub struct Field { pub auto_value: bool, pub primary_key: bool, pub null: bool, + pub unique: bool, +} + +#[cfg(test)] +mod tests { + use syn::parse_quote; + + use super::*; + + #[test] + fn model_args_default() { + let args: ModelArgs = Default::default(); + assert_eq!(args.model_type, ModelType::Application); + assert!(args.table_name.is_none()); + } + + #[test] + fn model_type_default() { + let model_type: ModelType = Default::default(); + assert_eq!(model_type, ModelType::Application); + } + + #[test] + fn model_opts_fields() { + let input: syn::DeriveInput = parse_quote! { + struct TestModel { + id: i32, + name: String, + } + }; + let opts = ModelOpts::new_from_derive_input(&input).unwrap(); + let fields = opts.fields(); + assert_eq!(fields.len(), 2); + assert_eq!(fields[0].ident.as_ref().unwrap().to_string(), "id"); + assert_eq!(fields[1].ident.as_ref().unwrap().to_string(), "name"); + } + + #[test] + fn model_opts_as_model() { + let input: syn::DeriveInput = parse_quote! { + struct TestModel { + id: i32, + name: String, + } + }; + let opts = ModelOpts::new_from_derive_input(&input).unwrap(); + let args = ModelArgs::default(); + let model = opts.as_model(&args).unwrap(); + assert_eq!(model.name.to_string(), "TestModel"); + assert_eq!(model.table_name, "test_model"); + assert_eq!(model.fields.len(), 2); + assert_eq!(model.field_count(), 2); + } + + #[test] + fn model_opts_as_model_migration() { + let input: syn::DeriveInput = parse_quote! { + #[model(model_type = "migration")] + struct TestModel { + id: i32, + name: String, + } + }; + let opts = ModelOpts::new_from_derive_input(&input).unwrap(); + let args = ModelArgs::from_meta(&input.attrs.first().unwrap().meta).unwrap(); + let err = opts.as_model(&args).unwrap_err(); + assert_eq!( + err.to_string(), + "migration model names must start with an underscore" + ); + } + + #[test] + fn field_opts_as_field() { + let input: syn::Field = parse_quote! { + #[model(unique)] + name: String + }; + let field_opts = FieldOpts::from_field(&input).unwrap(); + let field = field_opts.as_field(); + assert_eq!(field.field_name.to_string(), "name"); + assert_eq!(field.column_name, "name"); + assert_eq!(field.ty, parse_quote!(String)); + assert!(field.unique); + } } diff --git a/flareon-macros/Cargo.toml b/flareon-macros/Cargo.toml index 8fc1b4f..7d1eb94 100644 --- a/flareon-macros/Cargo.toml +++ b/flareon-macros/Cargo.toml @@ -12,12 +12,15 @@ proc-macro = true name = "tests" path = "tests/compile_tests.rs" +[lints] +workspace = true + [dependencies] darling.workspace = true flareon_codegen.workspace = true proc-macro-crate.workspace = true -proc-macro2.workspace = true -quote.workspace = true +proc-macro2 = { workspace = true, features = ["proc-macro"] } +quote = { workspace = true, features = ["proc-macro"] } syn.workspace = true [dev-dependencies] diff --git a/flareon-macros/src/dbtest.rs b/flareon-macros/src/dbtest.rs new file mode 100644 index 0000000..679354a --- /dev/null +++ b/flareon-macros/src/dbtest.rs @@ -0,0 +1,60 @@ +use proc_macro2::TokenStream; +use quote::{format_ident, quote}; +use syn::ItemFn; + +pub(super) fn fn_to_dbtest(test_function_decl: ItemFn) -> syn::Result { + let test_fn = &test_function_decl.sig.ident; + let sqlite_ident = format_ident!("{}_sqlite", test_fn); + let postgres_ident = format_ident!("{}_postgres", test_fn); + let mysql_ident = format_ident!("{}_mysql", test_fn); + + if test_function_decl.sig.inputs.len() != 1 { + return Err(syn::Error::new_spanned( + test_function_decl.sig.inputs, + "Database test function must have exactly one argument", + )); + } + + let result = quote! { + #[::tokio::test] + #[cfg_attr(miri, ignore)] // unsupported operation: can't call foreign function `sqlite3_open_v2` + async fn #sqlite_ident() { + let mut database = flareon::test::TestDatabase::new_sqlite().await.unwrap(); + + #test_fn(&mut database).await; + + database.cleanup().await.unwrap(); + + #test_function_decl + } + + #[ignore] + #[::tokio::test] + async fn #postgres_ident() { + let mut database = flareon::test::TestDatabase::new_postgres(stringify!(#test_fn)) + .await + .unwrap(); + + #test_fn(&mut database).await; + + database.cleanup().await.unwrap(); + + #test_function_decl + } + + #[ignore] + #[::tokio::test] + async fn #mysql_ident() { + let mut database = flareon::test::TestDatabase::new_mysql(stringify!(#test_fn)) + .await + .unwrap(); + + #test_fn(&mut database).await; + + database.cleanup().await.unwrap(); + + #test_function_decl + } + }; + Ok(result) +} diff --git a/flareon-macros/src/lib.rs b/flareon-macros/src/lib.rs index c642238..338f6bb 100644 --- a/flareon-macros/src/lib.rs +++ b/flareon-macros/src/lib.rs @@ -1,4 +1,6 @@ +mod dbtest; mod form; +mod main_fn; mod model; mod query; @@ -7,9 +9,11 @@ use darling::Error; use proc_macro::TokenStream; use proc_macro_crate::crate_name; use quote::quote; -use syn::parse_macro_input; +use syn::{parse_macro_input, ItemFn}; +use crate::dbtest::fn_to_dbtest; use crate::form::impl_form_for_struct; +use crate::main_fn::fn_to_flareon_main; use crate::model::impl_model_for_struct; use crate::query::{query_to_tokens, Query}; @@ -101,8 +105,8 @@ pub fn model(args: TokenStream, input: TokenStream) -> TokenStream { return TokenStream::from(Error::from(e).write_errors()); } }; - let ast = parse_macro_input!(input as syn::DeriveInput); - let token_stream = impl_model_for_struct(&attr_args, &ast); + let mut ast = parse_macro_input!(input as syn::DeriveInput); + let token_stream = impl_model_for_struct(&attr_args, &mut ast); token_stream.into() } @@ -112,6 +116,22 @@ pub fn query(input: TokenStream) -> TokenStream { query_to_tokens(query_input).into() } +#[proc_macro_attribute] +pub fn dbtest(_args: TokenStream, input: TokenStream) -> TokenStream { + let fn_input = parse_macro_input!(input as ItemFn); + fn_to_dbtest(fn_input) + .unwrap_or_else(syn::Error::into_compile_error) + .into() +} + +#[proc_macro_attribute] +pub fn main(_args: TokenStream, input: TokenStream) -> TokenStream { + let fn_input = parse_macro_input!(input as ItemFn); + fn_to_flareon_main(fn_input) + .unwrap_or_else(syn::Error::into_compile_error) + .into() +} + pub(crate) fn flareon_ident() -> proc_macro2::TokenStream { let flareon_crate = crate_name("flareon").expect("flareon is not present in `Cargo.toml`"); match flareon_crate { diff --git a/flareon-macros/src/main_fn.rs b/flareon-macros/src/main_fn.rs new file mode 100644 index 0000000..dee1695 --- /dev/null +++ b/flareon-macros/src/main_fn.rs @@ -0,0 +1,40 @@ +use proc_macro2::TokenStream; +use quote::quote; +use syn::ItemFn; + +use crate::flareon_ident; + +pub(super) fn fn_to_flareon_main(main_function_decl: ItemFn) -> syn::Result { + let mut new_main_decl = main_function_decl.clone(); + new_main_decl.sig.ident = + syn::Ident::new("__flareon_main", main_function_decl.sig.ident.span()); + + if !main_function_decl.sig.inputs.is_empty() { + return Err(syn::Error::new_spanned( + main_function_decl.sig.inputs, + "flareon::main function must have zero arguments", + )); + } + + let crate_name = flareon_ident(); + let result = quote! { + fn main() { + let body = async { + let project: #crate_name::FlareonProject = __flareon_main().await.unwrap(); + #crate_name::run_cli(project).await.unwrap(); + + #new_main_decl + }; + #[allow(clippy::expect_used)] + { + return #crate_name::__private::tokio::runtime::Builder::new_multi_thread() + .enable_all() + .build() + .expect("Failed building the Runtime") + .block_on(body); + } + } + + }; + Ok(result) +} diff --git a/flareon-macros/src/model.rs b/flareon-macros/src/model.rs index 37deb03..fc0ebc9 100644 --- a/flareon-macros/src/model.rs +++ b/flareon-macros/src/model.rs @@ -1,13 +1,18 @@ use darling::ast::NestedMeta; -use darling::{FromDeriveInput, FromMeta}; +use darling::FromMeta; use flareon_codegen::model::{Field, Model, ModelArgs, ModelOpts}; use proc_macro2::{Ident, TokenStream}; use quote::{format_ident, quote, ToTokens, TokenStreamExt}; +use syn::punctuated::Punctuated; +use syn::Token; use crate::flareon_ident; #[must_use] -pub(super) fn impl_model_for_struct(args: &[NestedMeta], ast: &syn::DeriveInput) -> TokenStream { +pub(super) fn impl_model_for_struct( + args: &[NestedMeta], + ast: &mut syn::DeriveInput, +) -> TokenStream { let args = match ModelArgs::from_list(args) { Ok(v) => v, Err(e) => { @@ -15,7 +20,7 @@ pub(super) fn impl_model_for_struct(args: &[NestedMeta], ast: &syn::DeriveInput) } }; - let opts = match ModelOpts::from_derive_input(ast) { + let opts = match ModelOpts::new_from_derive_input(ast) { Ok(val) => val, Err(err) => { return err.write_errors(); @@ -30,7 +35,36 @@ pub(super) fn impl_model_for_struct(args: &[NestedMeta], ast: &syn::DeriveInput) }; let builder = ModelBuilder::from_model(model); - quote!(#ast #builder) + let attrs = &ast.attrs; + let vis = &ast.vis; + let ident = &ast.ident; + + // Filter out our helper attributes so they don't get passed to the struct + let fields = match &mut ast.data { + syn::Data::Struct(data) => &mut data.fields, + _ => panic!("Only structs are supported"), + }; + let fields = remove_helper_field_attributes(fields); + + quote!( + #(#attrs)* + #vis struct #ident { + #fields + } + #builder + ) +} + +fn remove_helper_field_attributes(fields: &mut syn::Fields) -> &Punctuated { + match fields { + syn::Fields::Named(fields) => { + for field in &mut fields.named { + field.attrs.retain(|a| !a.path().is_ident("model")); + } + &fields.named + } + _ => panic!("Only named fields are supported"), + } } #[derive(Debug)] @@ -77,19 +111,20 @@ impl ModelBuilder { let ty = &field.ty; let index = self.fields_as_columns.len(); let column_name = &field.column_name; - let is_auto = field.auto_value; - let is_null = field.null; { let mut field_as_column = quote!(#orm_ident::Column::new( #orm_ident::Identifier::new(#column_name) )); - if is_auto { + if field.auto_value { field_as_column.append_all(quote!(.auto())); } - if is_null { + if field.null { field_as_column.append_all(quote!(.null())); } + if field.unique { + field_as_column.append_all(quote!(.unique())); + } self.fields_as_columns.push(field_as_column); } diff --git a/flareon-macros/tests/compile_tests.rs b/flareon-macros/tests/compile_tests.rs index 00fe9c7..e51a809 100644 --- a/flareon-macros/tests/compile_tests.rs +++ b/flareon-macros/tests/compile_tests.rs @@ -1,5 +1,6 @@ #[rustversion::attr(not(nightly), ignore)] #[test] +#[cfg_attr(miri, ignore)] // unsupported operation: extern static `pidfd_spawnp` is not supported by Miri fn derive_form() { let t = trybuild::TestCases::new(); t.pass("tests/ui/derive_form.rs"); @@ -7,16 +8,19 @@ fn derive_form() { #[rustversion::attr(not(nightly), ignore)] #[test] +#[cfg_attr(miri, ignore)] // unsupported operation: extern static `pidfd_spawnp` is not supported by Miri fn attr_model() { let t = trybuild::TestCases::new(); t.pass("tests/ui/attr_model.rs"); t.compile_fail("tests/ui/attr_model_migration_invalid_name.rs"); t.compile_fail("tests/ui/attr_model_tuple.rs"); t.compile_fail("tests/ui/attr_model_enum.rs"); + t.compile_fail("tests/ui/attr_model_generic.rs"); } #[rustversion::attr(not(nightly), ignore)] #[test] +#[cfg_attr(miri, ignore)] // unsupported operation: extern static `pidfd_spawnp` is not supported by Miri fn func_query() { let t = trybuild::TestCases::new(); t.pass("tests/ui/func_query.rs"); @@ -25,3 +29,12 @@ fn func_query() { t.compile_fail("tests/ui/func_query_double_field.rs"); t.compile_fail("tests/ui/func_query_invalid_field.rs"); } + +#[rustversion::attr(not(nightly), ignore)] +#[test] +#[cfg_attr(miri, ignore)] // unsupported operation: extern static `pidfd_spawnp` is not supported by Miri +fn attr_main() { + let t = trybuild::TestCases::new(); + t.pass("tests/ui/attr_main.rs"); + t.compile_fail("tests/ui/attr_main_args.rs"); +} diff --git a/flareon-macros/tests/ui/attr_main.rs b/flareon-macros/tests/ui/attr_main.rs new file mode 100644 index 0000000..0b028c4 --- /dev/null +++ b/flareon-macros/tests/ui/attr_main.rs @@ -0,0 +1,6 @@ +use flareon::FlareonProject; + +#[flareon::main] +async fn main() -> flareon::Result { + std::process::exit(0); +} diff --git a/flareon-macros/tests/ui/attr_main_args.rs b/flareon-macros/tests/ui/attr_main_args.rs new file mode 100644 index 0000000..e69d3ab --- /dev/null +++ b/flareon-macros/tests/ui/attr_main_args.rs @@ -0,0 +1,4 @@ +#[flareon::main] +async fn main(arg: i32) -> flareon::Result { + std::process::exit(0); +} diff --git a/flareon-macros/tests/ui/attr_main_args.stderr b/flareon-macros/tests/ui/attr_main_args.stderr new file mode 100644 index 0000000..f5cf275 --- /dev/null +++ b/flareon-macros/tests/ui/attr_main_args.stderr @@ -0,0 +1,11 @@ +error: flareon::main function must have zero arguments + --> tests/ui/attr_main_args.rs:2:15 + | +2 | async fn main(arg: i32) -> flareon::Result { + | ^^^^^^^^ + +error[E0601]: `main` function not found in crate `$CRATE` + --> tests/ui/attr_main_args.rs:4:2 + | +4 | } + | ^ consider adding a `main` function to `$DIR/tests/ui/attr_main_args.rs` diff --git a/flareon-macros/tests/ui/attr_model_generic.rs b/flareon-macros/tests/ui/attr_model_generic.rs new file mode 100644 index 0000000..4cb57ed --- /dev/null +++ b/flareon-macros/tests/ui/attr_model_generic.rs @@ -0,0 +1,9 @@ +use flareon::db::model; + +#[model] +struct MyModel { + id: i32, + some_data: T, +} + +fn main() {} diff --git a/flareon-macros/tests/ui/attr_model_generic.stderr b/flareon-macros/tests/ui/attr_model_generic.stderr new file mode 100644 index 0000000..9066edb --- /dev/null +++ b/flareon-macros/tests/ui/attr_model_generic.stderr @@ -0,0 +1,5 @@ +error: generics in models are not supported + --> tests/ui/attr_model_generic.rs:4:15 + | +4 | struct MyModel { + | ^^^ diff --git a/flareon/Cargo.toml b/flareon/Cargo.toml index 526e80d..777fa70 100644 --- a/flareon/Cargo.toml +++ b/flareon/Cargo.toml @@ -5,17 +5,21 @@ edition.workspace = true license.workspace = true description = "Modern web framework focused on speed and ease of use." +[lints] +workspace = true + [dependencies] askama.workspace = true askama_derive.workspace = true askama_parser.workspace = true async-trait.workspace = true -axum.workspace = true +axum = { workspace = true, features = ["http1", "tokio"] } backtrace.workspace = true bytes.workspace = true chrono.workspace = true derive_builder.workspace = true -derive_more.workspace = true +derive_more = { workspace = true, features = ["debug", "deref", "display", "from"] } +fake = { workspace = true, optional = true, features = ["derive", "chrono"] } flareon_macros.workspace = true form_urlencoded.workspace = true futures-core.workspace = true @@ -27,28 +31,31 @@ http-body-util.workspace = true indexmap.workspace = true log.workspace = true mime_guess.workspace = true -mockall.workspace = true -password-auth.workspace = true +password-auth = { workspace = true, features = ["std", "argon2"] } pin-project-lite.workspace = true -regex.workspace = true -sea-query-binder.workspace = true -sea-query.workspace = true -serde.workspace = true +sea-query = { workspace = true } +sea-query-binder = { workspace = true, features = ["with-chrono", "runtime-tokio"] } +serde = { workspace = true, features = ["derive"] } +serde_json = { workspace = true, optional = true } sha2.workspace = true -sqlx.workspace = true -subtle.workspace = true +sqlx = { workspace = true, features = ["runtime-tokio", "chrono"] } +subtle = { workspace = true, features = ["std"] } sync_wrapper.workspace = true thiserror.workspace = true time.workspace = true -tokio.workspace = true -tower.workspace = true -tower-sessions.workspace = true +tokio = { workspace = true, features = ["macros", "rt-multi-thread"] } +tower = { workspace = true, features = ["util"] } +tower-sessions = { workspace = true, features = ["memory-store"] } [dev-dependencies] async-stream.workspace = true fake.workspace = true futures.workspace = true -rand.workspace = true +mockall.workspace = true + +[package.metadata.docs.rs] +all-features = true +rustdoc-args = ["--cfg", "docsrs"] [package.metadata.cargo-machete] ignored = [ @@ -59,3 +66,12 @@ ignored = [ # time requires version 0.3.35 to work with the latest versions of Rust, but we don't use it directly "time", ] + +[features] +default = ["sqlite", "postgres", "mysql", "json"] +fake = ["dep:fake"] +db = [] +sqlite = ["db", "sea-query/backend-sqlite", "sea-query-binder/sqlx-sqlite", "sqlx/sqlite"] +postgres = ["db", "sea-query/backend-postgres", "sea-query-binder/sqlx-postgres", "sqlx/postgres"] +mysql = ["db", "sea-query/backend-mysql", "sea-query-binder/sqlx-mysql", "sqlx/mysql"] +json = ["serde_json"] diff --git a/flareon/build.rs b/flareon/build.rs new file mode 100644 index 0000000..caef367 --- /dev/null +++ b/flareon/build.rs @@ -0,0 +1,9 @@ +#[cfg(all( + feature = "db", + not(any(feature = "sqlite", feature = "postgres", feature = "mysql")) +))] +compile_error!("feature \"db\" requires one of: \"sqlite\", \"postgres\", \"mysql\" to be enabled"); + +fn main() { + // do nothing; this only checks the feature flags +} diff --git a/flareon/src/admin.rs b/flareon/src/admin.rs index bc374b0..3de225b 100644 --- a/flareon/src/admin.rs +++ b/flareon/src/admin.rs @@ -10,7 +10,6 @@ use async_trait::async_trait; use bytes::Bytes; use derive_more::Debug; -use crate::auth::db::DatabaseUserCredentials; use crate::auth::AuthRequestExt; use crate::forms::fields::Password; use crate::forms::{ @@ -100,14 +99,18 @@ async fn login(mut request: Request) -> flareon::Result { } async fn authenticate(request: &mut Request, login_form: LoginForm) -> flareon::Result { + #[cfg(feature = "db")] let user = request - .authenticate(&DatabaseUserCredentials::new( + .authenticate(&crate::auth::db::DatabaseUserCredentials::new( login_form.username, // TODO unify auth::Password and forms::fields::Password flareon::auth::Password::new(login_form.password.into_string()), )) .await?; + #[cfg(not(any(feature = "sqlite", feature = "postgres", feature = "mysql")))] + let mut user: Option> = None; + if let Some(user) = user { request.login(user).await?; Ok(true) diff --git a/flareon/src/auth.rs b/flareon/src/auth.rs index 6e906b3..28efb14 100644 --- a/flareon/src/auth.rs +++ b/flareon/src/auth.rs @@ -6,6 +6,7 @@ //! //! For the default way to store users in the database, see the [`db`] module. +#[cfg(feature = "db")] pub mod db; use std::any::Any; @@ -14,7 +15,6 @@ use std::sync::Arc; use async_trait::async_trait; use chrono::{DateTime, FixedOffset}; -use flareon::config::SecretKey; #[cfg(test)] use mockall::automock; use password_auth::VerifyError; @@ -22,7 +22,8 @@ use serde::{Deserialize, Serialize}; use subtle::ConstantTimeEq; use thiserror::Error; -use crate::db::impl_sqlite::SqliteValueRef; +use crate::config::SecretKey; +#[cfg(feature = "db")] use crate::db::{ColumnType, DatabaseField, FromDbValue, SqlxValueRef, ToDbValue}; use crate::request::{Request, RequestExt}; @@ -285,7 +286,12 @@ impl PasswordHash { /// Returns an error if the password hash is invalid. pub fn new>(hash: T) -> Result { let hash = hash.into(); + + if hash.len() > MAX_PASSWORD_HASH_LENGTH as usize { + return Err(AuthError::PasswordHashInvalid); + } password_auth::is_hash_obsolete(&hash).map_err(|_| AuthError::PasswordHashInvalid)?; + Ok(Self(hash)) } @@ -303,6 +309,8 @@ impl PasswordHash { #[must_use] pub fn from_password(password: &Password) -> Self { let hash = password_auth::generate_hash(password.as_str()); + + assert!(hash.len() <= MAX_PASSWORD_HASH_LENGTH as usize); Self(hash) } @@ -393,17 +401,37 @@ impl Debug for PasswordHash { } } +const MAX_PASSWORD_HASH_LENGTH: u32 = 128; + +#[cfg(feature = "db")] impl DatabaseField for PasswordHash { - // TODO change to length-limiting type - const TYPE: ColumnType = ColumnType::Text; + const TYPE: ColumnType = ColumnType::String(MAX_PASSWORD_HASH_LENGTH); } +#[cfg(feature = "db")] impl FromDbValue for PasswordHash { - fn from_sqlite(value: SqliteValueRef) -> flareon::db::Result { + #[cfg(feature = "sqlite")] + fn from_sqlite(value: crate::db::impl_sqlite::SqliteValueRef) -> flareon::db::Result { + PasswordHash::new(value.get::()?).map_err(flareon::db::DatabaseError::value_decode) + } + + #[cfg(feature = "postgres")] + fn from_postgres( + value: crate::db::impl_postgres::PostgresValueRef, + ) -> flareon::db::Result { + PasswordHash::new(value.get::()?).map_err(flareon::db::DatabaseError::value_decode) + } + + #[cfg(feature = "mysql")] + fn from_mysql(value: crate::db::impl_mysql::MySqlValueRef) -> crate::db::Result + where + Self: Sized, + { PasswordHash::new(value.get::()?).map_err(flareon::db::DatabaseError::value_decode) } } +#[cfg(feature = "db")] impl ToDbValue for PasswordHash { fn to_sea_query_value(&self) -> sea_query::Value { self.0.clone().into() @@ -697,6 +725,28 @@ pub trait AuthBackend: Send + Sync { ) -> Result>>; } +#[derive(Debug, Copy, Clone)] +pub struct NoAuthBackend; + +#[async_trait] +impl AuthBackend for NoAuthBackend { + async fn authenticate( + &self, + _request: &Request, + _credentials: &(dyn Any + Send + Sync), + ) -> Result>> { + Ok(None) + } + + async fn get_by_id( + &self, + _request: &Request, + _id: UserId, + ) -> Result>> { + Ok(None) + } +} + #[cfg(test)] mod tests { use std::sync::Mutex; @@ -707,27 +757,6 @@ mod tests { use crate::config::ProjectConfig; use crate::test::TestRequestBuilder; - struct NoUserAuthBackend; - - #[async_trait] - impl AuthBackend for NoUserAuthBackend { - async fn authenticate( - &self, - _request: &Request, - _credentials: &(dyn Any + Send + Sync), - ) -> Result>> { - Ok(None) - } - - async fn get_by_id( - &self, - _request: &Request, - _id: UserId, - ) -> Result>> { - Ok(None) - } - } - struct MockAuthBackend { return_user: F, } @@ -813,6 +842,7 @@ mod tests { } #[test] + #[cfg_attr(miri, ignore)] fn password_hash() { let password = Password::new("password".to_string()); let hash = PasswordHash::from_password(&password); @@ -844,6 +874,7 @@ mod tests { const TEST_PASSWORD_HASH: &str = "$argon2id$v=19$m=19456,t=2,p=1$QAAI3EMU1eTLT9NzzBhQjg$khq4zuHsEyk9trGjuqMBFYnTbpqkmn0wXGxFn1nkPBc"; #[test] + #[cfg_attr(miri, ignore)] fn password_hash_debug() { let hash = PasswordHash::new(TEST_PASSWORD_HASH).unwrap(); assert_eq!( @@ -853,6 +884,7 @@ mod tests { } #[test] + #[cfg_attr(miri, ignore)] fn password_hash_verify() { let password = Password::new("password"); let hash = PasswordHash::from_password(&password); @@ -869,6 +901,7 @@ mod tests { } #[test] + #[cfg_attr(miri, ignore)] fn password_hash_str() { let hash = PasswordHash::new(TEST_PASSWORD_HASH).unwrap(); assert_eq!(hash.as_str(), TEST_PASSWORD_HASH); @@ -881,7 +914,7 @@ mod tests { #[tokio::test] async fn user_anonymous() { - let mut request = test_request_with_auth_backend(NoUserAuthBackend {}); + let mut request = test_request_with_auth_backend(NoAuthBackend {}); let user = request.user().await.unwrap(); assert!(!user.is_authenticated()); @@ -942,7 +975,7 @@ mod tests { /// session (can happen if the user is deleted from the database) #[tokio::test] async fn logout_on_invalid_user_id_in_session() { - let mut request = test_request_with_auth_backend(NoUserAuthBackend {}); + let mut request = test_request_with_auth_backend(NoAuthBackend {}); request .session_mut() diff --git a/flareon/src/auth/db.rs b/flareon/src/auth/db.rs index e1b680a..f791321 100644 --- a/flareon/src/auth/db.rs +++ b/flareon/src/auth/db.rs @@ -9,6 +9,7 @@ use async_trait::async_trait; use flareon_macros::model; use hmac::{Hmac, KeyInit, Mac}; use sha2::Sha512; +use thiserror::Error; use crate::admin::{AdminModel, AdminModelManager, DefaultAdminModelManager}; use crate::auth::{ @@ -17,24 +18,34 @@ use crate::auth::{ }; use crate::config::SecretKey; use crate::db::migrations::DynMigration; -use crate::db::{query, DatabaseBackend, Model}; +use crate::db::{query, DatabaseBackend, LimitedString, Model}; use crate::request::{Request, RequestExt}; use crate::FlareonApp; pub mod migrations; +pub(crate) const MAX_USERNAME_LENGTH: u32 = 255; + /// A user stored in the database. #[derive(Debug, Clone)] #[model] pub struct DatabaseUser { id: i64, - username: String, + #[model(unique)] + username: LimitedString, password: PasswordHash, } +#[derive(Debug, Clone, Error)] +#[non_exhaustive] +pub enum CreateUserError { + #[error("username is too long (max {MAX_USERNAME_LENGTH} characters, got {0})")] + UsernameTooLong(usize), +} + impl DatabaseUser { #[must_use] - pub fn new(id: i64, username: String, password: &Password) -> Self { + pub fn new(id: i64, username: LimitedString, password: &Password) -> Self { Self { id, username, @@ -73,13 +84,14 @@ impl DatabaseUser { /// /// # #[tokio::main] /// # async fn main() -> flareon::Result<()> { - /// # use flareon::test::{TestDatabaseBuilder, TestRequestBuilder}; + /// # use flareon::test::{TestDatabase, TestRequestBuilder}; + /// # let mut test_database = TestDatabase::new_sqlite().await?; + /// # test_database.with_auth().run_migrations().await; /// # let request = TestRequestBuilder::get("/") - /// # .with_db_auth(std::sync::Arc::new( - /// # TestDatabaseBuilder::new().with_auth().build().await, - /// # )) + /// # .with_db_auth(test_database.database()) /// # .build(); /// # view(&request).await?; + /// # test_database.cleanup().await?; /// # Ok(()) /// # } /// ``` @@ -88,7 +100,13 @@ impl DatabaseUser { username: T, password: U, ) -> Result { - let mut user = Self::new(0, username.into(), &password.into()); + let username = username.into(); + let username_length = username.len(); + let username = LimitedString::::new(username).map_err(|_| { + AuthError::backend_error(CreateUserError::UsernameTooLong(username_length)) + })?; + + let mut user = Self::new(0, username, &password.into()); user.save(db).await.map_err(AuthError::backend_error)?; Ok(user) @@ -105,11 +123,31 @@ impl DatabaseUser { Ok(db_user) } + pub async fn get_by_username( + db: &DB, + username: &str, + ) -> Result> { + let username = LimitedString::::new(username).map_err(|_| { + AuthError::backend_error(CreateUserError::UsernameTooLong(username.len())) + })?; + let db_user = query!(DatabaseUser, $username == username) + .get(db) + .await + .map_err(AuthError::backend_error)?; + + Ok(db_user) + } + pub async fn authenticate( db: &DB, credentials: &DatabaseUserCredentials, ) -> Result> { - let user = query!(DatabaseUser, $username == credentials.username()) + let username = credentials.username(); + let username_limited = LimitedString::::new(username.to_string()) + .map_err(|_| { + AuthError::backend_error(CreateUserError::UsernameTooLong(username.len())) + })?; + let user = query!(DatabaseUser, $username == username_limited) .get(db) .await .map_err(AuthError::backend_error)?; @@ -338,8 +376,13 @@ mod tests { use crate::db::MockDatabaseBackend; #[test] + #[cfg_attr(miri, ignore)] fn session_auth_hash() { - let user = DatabaseUser::new(1, "testuser".to_string(), &Password::new("password123")); + let user = DatabaseUser::new( + 1, + LimitedString::new("testuser").unwrap(), + &Password::new("password123"), + ); let secret_key = SecretKey::new(b"supersecretkey"); let hash = user.session_auth_hash(&secret_key); @@ -347,8 +390,13 @@ mod tests { } #[test] + #[cfg_attr(miri, ignore)] fn database_user_traits() { - let user = DatabaseUser::new(1, "testuser".to_string(), &Password::new("password123")); + let user = DatabaseUser::new( + 1, + LimitedString::new("testuser").unwrap(), + &Password::new("password123"), + ); let user_ref: &dyn User = &user; assert_eq!(user_ref.id(), Some(UserId::Int(1))); assert_eq!(user_ref.username(), Some("testuser")); @@ -360,6 +408,7 @@ mod tests { } #[tokio::test] + #[cfg_attr(miri, ignore)] async fn create_user() { let mut mock_db = MockDatabaseBackend::new(); mock_db @@ -376,9 +425,14 @@ mod tests { } #[tokio::test] + #[cfg_attr(miri, ignore)] async fn get_by_id() { let mut mock_db = MockDatabaseBackend::new(); - let user = DatabaseUser::new(1, "testuser".to_string(), &Password::new("password123")); + let user = DatabaseUser::new( + 1, + LimitedString::new("testuser").unwrap(), + &Password::new("password123"), + ); mock_db .expect_get::() @@ -392,9 +446,14 @@ mod tests { } #[tokio::test] + #[cfg_attr(miri, ignore)] async fn authenticate() { let mut mock_db = MockDatabaseBackend::new(); - let user = DatabaseUser::new(1, "testuser".to_string(), &Password::new("password123")); + let user = DatabaseUser::new( + 1, + LimitedString::new("testuser").unwrap(), + &Password::new("password123"), + ); mock_db .expect_get::() @@ -410,6 +469,7 @@ mod tests { } #[tokio::test] + #[cfg_attr(miri, ignore)] async fn authenticate_non_existing() { let mut mock_db = MockDatabaseBackend::new(); @@ -426,9 +486,14 @@ mod tests { } #[tokio::test] + #[cfg_attr(miri, ignore)] async fn authenticate_invalid_password() { let mut mock_db = MockDatabaseBackend::new(); - let user = DatabaseUser::new(1, "testuser".to_string(), &Password::new("password123")); + let user = DatabaseUser::new( + 1, + LimitedString::new("testuser").unwrap(), + &Password::new("password123"), + ); mock_db .expect_get::() diff --git a/flareon/src/auth/db/migrations/m_0001_initial.rs b/flareon/src/auth/db/migrations/m_0001_initial.rs index 29baa36..821822d 100644 --- a/flareon/src/auth/db/migrations/m_0001_initial.rs +++ b/flareon/src/auth/db/migrations/m_0001_initial.rs @@ -1,38 +1,53 @@ -//! Generated by flareon CLI 0.1.0 on 2024-10-04 19:55:15+00:00 - -use crate::auth::PasswordHash; +//! Generated by flareon CLI 0.1.0 on 2024-11-12 15:49:48+00:00 #[derive(Debug, Copy, Clone)] pub(super) struct Migration; impl ::flareon::db::migrations::Migration for Migration { const APP_NAME: &'static str = "flareon_auth"; const MIGRATION_NAME: &'static str = "m_0001_initial"; - const OPERATIONS: &'static [::flareon::db::migrations::Operation] = - &[::flareon::db::migrations::Operation::create_model() + const DEPENDENCIES: &'static [::flareon::db::migrations::MigrationDependency] = &[]; + const OPERATIONS: &'static [::flareon::db::migrations::Operation] = &[ + ::flareon::db::migrations::Operation::create_model() .table_name(::flareon::db::Identifier::new("database_user")) - .fields(&[ - ::flareon::db::migrations::Field::new( - ::flareon::db::Identifier::new("id"), - ::TYPE, - ) - .auto() - .primary_key(), - ::flareon::db::migrations::Field::new( - ::flareon::db::Identifier::new("username"), - ::TYPE, - ), - ::flareon::db::migrations::Field::new( - ::flareon::db::Identifier::new("password"), - ::TYPE, - ), - ]) - .build()]; + .fields( + &[ + ::flareon::db::migrations::Field::new( + ::flareon::db::Identifier::new("id"), + ::TYPE, + ) + .auto() + .primary_key() + .set_null(::NULLABLE), + ::flareon::db::migrations::Field::new( + ::flareon::db::Identifier::new("username"), + as ::flareon::db::DatabaseField>::TYPE, + ) + .set_null( + as ::flareon::db::DatabaseField>::NULLABLE, + ) + .unique(), + ::flareon::db::migrations::Field::new( + ::flareon::db::Identifier::new("password"), + ::TYPE, + ) + .set_null( + ::NULLABLE, + ), + ], + ) + .build(), + ]; } #[derive(::core::fmt::Debug)] #[::flareon::db::model(model_type = "migration")] struct _DatabaseUser { id: i64, - username: String, - password: PasswordHash, + #[model(unique)] + username: crate::db::LimitedString<{ crate::auth::db::MAX_USERNAME_LENGTH }>, + password: crate::auth::PasswordHash, } diff --git a/flareon/src/config.rs b/flareon/src/config.rs index 92396c1..2f4e872 100644 --- a/flareon/src/config.rs +++ b/flareon/src/config.rs @@ -16,6 +16,7 @@ use derive_builder::Builder; use derive_more::Debug; use subtle::ConstantTimeEq; +#[cfg(feature = "db")] use crate::auth::db::DatabaseUserBackend; use crate::auth::AuthBackend; @@ -63,6 +64,7 @@ pub struct ProjectConfig { #[debug("..")] #[builder(setter(custom))] auth_backend: Arc, + #[cfg(feature = "db")] database_config: DatabaseConfig, } @@ -81,17 +83,20 @@ impl ProjectConfigBuilder { .auth_backend .clone() .unwrap_or_else(default_auth_backend), + #[cfg(feature = "db")] database_config: self.database_config.clone().unwrap_or_default(), } } } +#[cfg(feature = "db")] #[derive(Debug, Clone, Builder)] pub struct DatabaseConfig { #[builder(setter(into))] url: String, } +#[cfg(feature = "db")] impl DatabaseConfig { #[must_use] pub fn builder() -> DatabaseConfigBuilder { @@ -104,6 +109,7 @@ impl DatabaseConfig { } } +#[cfg(feature = "db")] impl Default for DatabaseConfig { fn default() -> Self { Self { @@ -119,7 +125,15 @@ impl Default for ProjectConfig { } fn default_auth_backend() -> Arc { - Arc::new(DatabaseUserBackend::new()) + #[cfg(feature = "db")] + { + Arc::new(DatabaseUserBackend::new()) + } + + #[cfg(not(any(feature = "sqlite", feature = "postgres", feature = "mysql")))] + { + Arc::new(flareon::auth::NoAuthBackend) + } } impl ProjectConfig { @@ -144,6 +158,7 @@ impl ProjectConfig { } #[must_use] + #[cfg(feature = "db")] pub fn database_config(&self) -> &DatabaseConfig { &self.database_config } diff --git a/flareon/src/db.rs b/flareon/src/db.rs index c5cbccc..7e2c835 100644 --- a/flareon/src/db.rs +++ b/flareon/src/db.rs @@ -4,9 +4,15 @@ //! the error types that can occur when interacting with the database. mod fields; +#[cfg(feature = "mysql")] +pub mod impl_mysql; +#[cfg(feature = "postgres")] +pub mod impl_postgres; +#[cfg(feature = "sqlite")] pub mod impl_sqlite; pub mod migrations; pub mod query; +mod sea_query_db; use std::fmt::Write; use std::hash::Hash; @@ -19,11 +25,17 @@ use log::debug; use mockall::automock; use query::Query; use sea_query::{Iden, SchemaStatementBuilder, SimpleExpr}; -use sea_query_binder::SqlxBinder; +use sea_query_binder::{SqlxBinder, SqlxValues}; use sqlx::{Type, TypeInfo}; use thiserror::Error; +#[cfg(feature = "mysql")] +use crate::db::impl_mysql::{DatabaseMySql, MySqlRow, MySqlValueRef}; +#[cfg(feature = "postgres")] +use crate::db::impl_postgres::{DatabasePostgres, PostgresRow, PostgresValueRef}; +#[cfg(feature = "sqlite")] use crate::db::impl_sqlite::{DatabaseSqlite, SqliteRow, SqliteValueRef}; +use crate::db::migrations::ColumnTypeMapper; /// An error that can occur when interacting with the database. #[derive(Debug, Error)] @@ -42,6 +54,9 @@ pub enum DatabaseError { /// Error when decoding database value. #[error("Error when decoding database value: {0}")] ValueDecode(Box), + /// Error when applying migrations. + #[error("Error when applying migrations: {0}")] + MigrationError(#[from] migrations::MigrationEngineError), } impl DatabaseError { @@ -161,6 +176,7 @@ impl Iden for &Identifier { pub struct Column { name: Identifier, auto_value: bool, + unique: bool, null: bool, } @@ -171,6 +187,7 @@ impl Column { Self { name, auto_value: false, + unique: false, null: false, } } @@ -182,6 +199,13 @@ impl Column { self } + /// Marks the column unique. + #[must_use] + pub const fn unique(mut self) -> Self { + self.unique = true; + self + } + /// Marks the column as nullable. #[must_use] pub const fn null(mut self) -> Self { @@ -192,9 +216,15 @@ impl Column { /// A row structure that holds the data of a single row retrieved from the /// database. +#[non_exhaustive] #[derive(Debug)] pub enum Row { + #[cfg(feature = "sqlite")] Sqlite(SqliteRow), + #[cfg(feature = "postgres")] + Postgres(PostgresRow), + #[cfg(feature = "mysql")] + MySql(MySqlRow), } impl Row { @@ -210,30 +240,82 @@ impl Row { /// returned by the database. pub fn get(&self, index: usize) -> Result { let result = match self { + #[cfg(feature = "sqlite")] Row::Sqlite(sqlite_row) => sqlite_row .get_raw(index) .and_then(|value| T::from_sqlite(value))?, + #[cfg(feature = "postgres")] + Row::Postgres(postgres_row) => postgres_row + .get_raw(index) + .and_then(|value| T::from_postgres(value))?, + #[cfg(feature = "mysql")] + Row::MySql(mysql_row) => mysql_row + .get_raw(index) + .and_then(|value| T::from_mysql(value))?, }; Ok(result) } } +/// A trait denoting that some type can be used as a field in a database. pub trait DatabaseField: FromDbValue + ToDbValue { + const NULLABLE: bool = false; + + /// The type of the column in the database as one of the variants of + /// the [`ColumnType`] enum. + /// + /// # Changing the column type after initial implementation + /// + /// Note that this should never be changed after the type is implemented. + /// The migration generator is unable to detect a change in the column type + /// and will not generate a migration for it. If the column type needs to + /// be changed, a manual migration should be written, or a new type should + /// be created. + /// + /// This is especially important for types that are stored as fixed-length + /// strings in the database, as the migration generator cannot detect a + /// change in the string length. For this reason, it's recommended to use + /// the [`LimitedString`] type for fixed-length strings (which uses const + /// generics, so each change in the length will be a new type) instead of + /// a custom type with a fixed length. const TYPE: ColumnType; } /// A trait for converting a database value to a Rust value. pub trait FromDbValue { - /// Converts the given `SQLite` database value to a Rust value. + /// Converts the given SQLite database value to a Rust value. /// /// # Errors /// /// This method can return an error if the value is not compatible with the /// Rust type. + #[cfg(feature = "sqlite")] fn from_sqlite(value: SqliteValueRef) -> Result where Self: Sized; + + /// Converts the given PostgreSQL database value to a Rust value. + /// + /// # Errors + /// + /// This method can return an error if the value is not compatible with the + /// Rust type. + #[cfg(feature = "postgres")] + fn from_postgres(value: PostgresValueRef) -> Result + where + Self: Sized; + + /// Converts the given MySQL database value to a Rust value. + /// + /// # Errors + /// + /// This method can return an error if the value is not compatible with the + /// Rust type. + #[cfg(feature = "mysql")] + fn from_mysql(value: MySqlValueRef) -> Result + where + Self: Sized; } /// A trait for converting a Rust value to a database value. @@ -245,6 +327,12 @@ pub trait ToDbValue: Send + Sync { fn to_sea_query_value(&self) -> sea_query::Value; } +impl ToDbValue for &T { + fn to_sea_query_value(&self) -> sea_query::Value { + (*self).to_sea_query_value() + } +} + trait SqlxRowRef { type ValueRef<'r>: SqlxValueRef<'r> where @@ -290,7 +378,12 @@ pub struct Database { #[derive(Debug)] enum DatabaseImpl { + #[cfg(feature = "sqlite")] Sqlite(DatabaseSqlite), + #[cfg(feature = "postgres")] + Postgres(DatabasePostgres), + #[cfg(feature = "mysql")] + MySql(DatabaseMySql), } impl Database { @@ -318,17 +411,35 @@ impl Database { /// ``` pub async fn new>(url: T) -> Result { let url = url.into(); - let db = if url.starts_with("sqlite:") { + + #[cfg(feature = "sqlite")] + if url.starts_with("sqlite:") { let inner = DatabaseSqlite::new(&url).await?; - Self { + return Ok(Self { _url: url, inner: DatabaseImpl::Sqlite(inner), - } - } else { - todo!("Other databases are not supported yet"); - }; + }); + } + + #[cfg(feature = "postgres")] + if url.starts_with("postgresql:") { + let inner = DatabasePostgres::new(&url).await?; + return Ok(Self { + _url: url, + inner: DatabaseImpl::Postgres(inner), + }); + } - Ok(db) + #[cfg(feature = "mysql")] + if url.starts_with("mysql:") { + let inner = DatabaseMySql::new(&url).await?; + return Ok(Self { + _url: url, + inner: DatabaseImpl::MySql(inner), + }); + } + + panic!("Unsupported database URL: {url}"); } /// Closes the database connection. @@ -353,9 +464,14 @@ impl Database { /// db.close().await.unwrap(); /// } /// ``` - pub async fn close(self) -> Result<()> { - match self.inner { + pub async fn close(&self) -> Result<()> { + match &self.inner { + #[cfg(feature = "sqlite")] DatabaseImpl::Sqlite(inner) => inner.close().await, + #[cfg(feature = "postgres")] + DatabaseImpl::Postgres(inner) => inner.close().await, + #[cfg(feature = "mysql")] + DatabaseImpl::MySql(inner) => inner.close().await, } } @@ -393,13 +509,14 @@ impl Database { .map(|value| SimpleExpr::Value(value.to_sea_query_value())) .collect::>(), )? - .returning_col(Identifier::new("id")) .to_owned(); - let row = self.fetch_one(&insert_statement).await?; - let id = row.get::(0)?; + let statement_result = self.execute_statement(&insert_statement).await?; - debug!("Inserted row with id: {}", id); + debug!( + "Inserted row; rows affected: {}", + statement_result.rows_affected() + ); Ok(()) } @@ -497,12 +614,28 @@ impl Database { self.execute_statement(&delete).await } - async fn fetch_one(&self, statement: &T) -> Result - where - T: SqlxBinder, - { + pub async fn raw(&self, query: &str) -> Result { + self.raw_with(query, &[]).await + } + + pub async fn raw_with( + &self, + query: &str, + values: &[&(dyn ToDbValue)], + ) -> Result { + let values = values + .iter() + .map(ToDbValue::to_sea_query_value) + .collect::>(); + let values = SqlxValues(sea_query::Values(values)); + let result = match &self.inner { - DatabaseImpl::Sqlite(inner) => Row::Sqlite(inner.fetch_one(statement).await?), + #[cfg(feature = "sqlite")] + DatabaseImpl::Sqlite(inner) => inner.raw_with(query, values).await?, + #[cfg(feature = "postgres")] + DatabaseImpl::Postgres(inner) => inner.raw_with(query, values).await?, + #[cfg(feature = "mysql")] + DatabaseImpl::MySql(inner) => inner.raw_with(query, values).await?, }; Ok(result) @@ -513,7 +646,14 @@ impl Database { T: SqlxBinder, { let result = match &self.inner { + #[cfg(feature = "sqlite")] DatabaseImpl::Sqlite(inner) => inner.fetch_option(statement).await?.map(Row::Sqlite), + #[cfg(feature = "postgres")] + DatabaseImpl::Postgres(inner) => { + inner.fetch_option(statement).await?.map(Row::Postgres) + } + #[cfg(feature = "mysql")] + DatabaseImpl::MySql(inner) => inner.fetch_option(statement).await?.map(Row::MySql), }; Ok(result) @@ -524,12 +664,27 @@ impl Database { T: SqlxBinder, { let result = match &self.inner { + #[cfg(feature = "sqlite")] DatabaseImpl::Sqlite(inner) => inner .fetch_all(statement) .await? .into_iter() .map(Row::Sqlite) .collect(), + #[cfg(feature = "postgres")] + DatabaseImpl::Postgres(inner) => inner + .fetch_all(statement) + .await? + .into_iter() + .map(Row::Postgres) + .collect(), + #[cfg(feature = "mysql")] + DatabaseImpl::MySql(inner) => inner + .fetch_all(statement) + .await? + .into_iter() + .map(Row::MySql) + .collect(), }; Ok(result) @@ -537,10 +692,15 @@ impl Database { async fn execute_statement(&self, statement: &T) -> Result where - T: SqlxBinder, + T: SqlxBinder + Sync, { let result = match &self.inner { + #[cfg(feature = "sqlite")] DatabaseImpl::Sqlite(inner) => inner.execute_statement(statement).await?, + #[cfg(feature = "postgres")] + DatabaseImpl::Postgres(inner) => inner.execute_statement(statement).await?, + #[cfg(feature = "mysql")] + DatabaseImpl::MySql(inner) => inner.execute_statement(statement).await?, }; Ok(result) @@ -551,13 +711,31 @@ impl Database { statement: T, ) -> Result { let result = match &self.inner { + #[cfg(feature = "sqlite")] DatabaseImpl::Sqlite(inner) => inner.execute_schema(statement).await?, + #[cfg(feature = "postgres")] + DatabaseImpl::Postgres(inner) => inner.execute_schema(statement).await?, + #[cfg(feature = "mysql")] + DatabaseImpl::MySql(inner) => inner.execute_schema(statement).await?, }; Ok(result) } } +impl ColumnTypeMapper for Database { + fn sea_query_column_type_for(&self, column_type: ColumnType) -> sea_query::ColumnType { + match &self.inner { + #[cfg(feature = "sqlite")] + DatabaseImpl::Sqlite(inner) => inner.sea_query_column_type_for(column_type), + #[cfg(feature = "postgres")] + DatabaseImpl::Postgres(inner) => inner.sea_query_column_type_for(column_type), + #[cfg(feature = "mysql")] + DatabaseImpl::MySql(inner) => inner.sea_query_column_type_for(column_type), + } + } +} + #[cfg_attr(test, automock)] #[async_trait] pub trait DatabaseBackend: Send + Sync { @@ -603,6 +781,7 @@ pub struct StatementResult { impl StatementResult { /// Creates a new statement result with the given number of rows affected. + #[cfg(test)] #[must_use] pub(crate) fn new(rows_affected: RowsNum) -> Self { Self { rows_affected } @@ -619,6 +798,106 @@ impl StatementResult { #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Deref, Display)] pub struct RowsNum(pub u64); +/// A wrapper over a string that has a limited length. +/// +/// This type is used to represent a string that has a limited length in the +/// database. The length is specified as a const generic parameter. The string +/// is stored as a normal string in memory, but it is checked when it is +/// created to ensure that it is not longer than the specified limit. +/// +/// # Database +/// +/// This type is represented by the `VARCHAR` type in the database, with the +/// maximum length same as the limit specified in the type. +/// +/// # Examples +/// +/// ``` +/// use flareon::db::LimitedString; +/// +/// let limited_string = LimitedString::<5>::new("test").unwrap(); +/// assert_eq!(limited_string, "test"); +/// +/// let limited_string = LimitedString::<5>::new("too long"); +/// assert!(limited_string.is_err()); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Deref)] +pub struct LimitedString(String); + +impl PartialEq<&str> for LimitedString { + fn eq(&self, other: &&str) -> bool { + self.0 == *other + } +} +impl PartialEq for LimitedString { + fn eq(&self, other: &String) -> bool { + self.0 == *other + } +} +impl PartialEq> for &str { + fn eq(&self, other: &LimitedString) -> bool { + *self == other.0 + } +} +impl PartialEq> for String { + fn eq(&self, other: &LimitedString) -> bool { + *self == other.0 + } +} + +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Error)] +#[error("string is too long ({length} > {LIMIT})")] +pub struct NewLimitedStringError { + pub(crate) length: u32, +} + +impl LimitedString { + pub fn new( + value: impl Into, + ) -> std::result::Result> { + let value = value.into(); + let length = value.len() as u32; + + if length > LIMIT { + return Err(NewLimitedStringError { length }); + } + Ok(Self(value)) + } +} + +#[cfg(feature = "fake")] +impl fake::Dummy for LimitedString { + fn dummy_with_rng(len: &usize, rng: &mut R) -> Self { + use fake::rand::Rng; + + assert!( + *len <= LIMIT as usize, + concat!( + "len must be less than or equal to LIMIT (", + stringify!(LIMIT), + ")" + ) + ); + + let str: String = rng + .sample_iter(&fake::rand::distributions::Alphanumeric) + .take(*len) + .map(char::from) + .collect(); + Self::new(str).unwrap() + } +} + +#[cfg(feature = "fake")] +impl fake::Dummy for LimitedString { + fn dummy_with_rng(_: &fake::Faker, rng: &mut R) -> Self { + use fake::Fake; + + let len: usize = (0..LIMIT as usize).fake_with_rng(rng); + len.fake_with_rng(rng) + } +} + /// A type that represents a column type in the database. #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum ColumnType { @@ -636,10 +915,10 @@ pub enum ColumnType { Time, Date, DateTime, - Timestamp, - TimestampWithTimeZone, + DateTimeWithTimeZone, Text, Blob, + String(u32), } #[cfg(test)] @@ -665,4 +944,41 @@ mod tests { let column_null = column.null(); assert!(column_null.null); } + + #[test] + fn limited_string_new_within_limit() { + let limited_string = LimitedString::<10>::new("short"); + assert!(limited_string.is_ok()); + assert_eq!(limited_string.unwrap(), "short"); + } + + #[test] + fn limited_string_new_exceeds_limit() { + let limited_string = LimitedString::<5>::new("too long"); + + assert!(limited_string.is_err()); + let error = limited_string.unwrap_err(); + assert_eq!(error.to_string(), "string is too long (8 > 5)"); + } + + #[test] + fn limited_string_new_exact_limit() { + let limited_string = LimitedString::<5>::new("exact"); + assert!(limited_string.is_ok()); + assert_eq!(limited_string.unwrap(), "exact"); + } + + #[test] + fn limited_string_eq() { + assert_eq!(LimitedString::<5>::new("test").unwrap(), "test"); + assert_eq!("test", LimitedString::<5>::new("test").unwrap()); + assert_eq!( + LimitedString::<5>::new("test").unwrap(), + String::from("test"), + ); + assert_eq!( + String::from("test"), + LimitedString::<5>::new("test").unwrap(), + ); + } } diff --git a/flareon/src/db/fields.rs b/flareon/src/db/fields.rs index b31c28c..47abc69 100644 --- a/flareon/src/db/fields.rs +++ b/flareon/src/db/fields.rs @@ -1,7 +1,58 @@ use flareon::db::DatabaseField; use sea_query::Value; -use crate::db::{ColumnType, FromDbValue, Result, SqliteValueRef, SqlxValueRef, ToDbValue}; +#[cfg(feature = "mysql")] +use crate::db::impl_mysql::MySqlValueRef; +#[cfg(feature = "postgres")] +use crate::db::impl_postgres::PostgresValueRef; +#[cfg(feature = "sqlite")] +use crate::db::impl_sqlite::SqliteValueRef; +use crate::db::{ + ColumnType, DatabaseError, FromDbValue, LimitedString, Result, SqlxValueRef, ToDbValue, +}; + +macro_rules! impl_from_sqlite_default { + () => { + #[cfg(feature = "sqlite")] + fn from_sqlite(value: SqliteValueRef) -> Result { + value.get::() + } + }; +} + +macro_rules! impl_from_postgres_default { + () => { + #[cfg(feature = "postgres")] + fn from_postgres(value: PostgresValueRef) -> Result { + value.get::() + } + }; +} + +macro_rules! impl_from_mysql_default { + () => { + #[cfg(feature = "mysql")] + fn from_mysql(value: MySqlValueRef) -> Result { + value.get::() + } + }; +} + +macro_rules! impl_to_db_value_default { + ($ty:ty) => { + impl ToDbValue for $ty { + fn to_sea_query_value(&self) -> Value { + self.clone().into() + } + } + + impl ToDbValue for Option<$ty> { + fn to_sea_query_value(&self) -> Value { + self.clone().into() + } + } + }; +} macro_rules! impl_db_field { ($ty:ty, $column_type:ident) => { @@ -10,34 +61,77 @@ macro_rules! impl_db_field { } impl FromDbValue for $ty { - fn from_sqlite(value: SqliteValueRef) -> Result { - value.get::<$ty>() + impl_from_sqlite_default!(); + + impl_from_postgres_default!(); + + impl_from_mysql_default!(); + } + + impl FromDbValue for Option<$ty> { + impl_from_sqlite_default!(); + + impl_from_postgres_default!(); + + impl_from_mysql_default!(); + } + + impl_to_db_value_default!($ty); + }; +} + +macro_rules! impl_db_field_with_postgres_int_cast { + ($dest_ty:ty, $src_ty:ty, $column_type:ident) => { + impl DatabaseField for $dest_ty { + const TYPE: ColumnType = ColumnType::$column_type; + } + + impl FromDbValue for $dest_ty { + impl_from_sqlite_default!(); + + impl_from_mysql_default!(); + + #[cfg(feature = "postgres")] + fn from_postgres(value: PostgresValueRef) -> Result { + #[allow(clippy::cast_possible_truncation)] + #[allow(clippy::cast_sign_loss)] + value.get::<$src_ty>().map(|v| v as $dest_ty) } } - impl ToDbValue for $ty { - fn to_sea_query_value(&self) -> Value { - self.clone().into() + impl FromDbValue for Option<$dest_ty> { + impl_from_sqlite_default!(); + + impl_from_mysql_default!(); + + #[cfg(feature = "postgres")] + fn from_postgres(value: PostgresValueRef) -> Result { + #[allow(clippy::cast_possible_truncation)] + #[allow(clippy::cast_sign_loss)] + value + .get::>() + .map(|v| v.map(|v| v as $dest_ty)) } } + + impl_to_db_value_default!($dest_ty); }; } impl_db_field!(bool, Boolean); -impl_db_field!(i8, TinyInteger); impl_db_field!(i16, SmallInteger); impl_db_field!(i32, Integer); impl_db_field!(i64, BigInteger); -impl_db_field!(u8, TinyUnsignedInteger); -impl_db_field!(u16, SmallUnsignedInteger); -impl_db_field!(u32, UnsignedInteger); -impl_db_field!(u64, BigUnsignedInteger); +impl_db_field_with_postgres_int_cast!(i8, i16, TinyInteger); +impl_db_field_with_postgres_int_cast!(u8, i16, TinyUnsignedInteger); +impl_db_field_with_postgres_int_cast!(u16, i16, SmallUnsignedInteger); +impl_db_field_with_postgres_int_cast!(u32, i32, UnsignedInteger); +impl_db_field_with_postgres_int_cast!(u64, i64, BigUnsignedInteger); impl_db_field!(f32, Float); impl_db_field!(f64, Double); impl_db_field!(chrono::NaiveDate, Date); impl_db_field!(chrono::NaiveTime, Time); impl_db_field!(chrono::NaiveDateTime, DateTime); -impl_db_field!(chrono::DateTime, TimestampWithTimeZone); impl_db_field!(String, Text); impl_db_field!(Vec, Blob); @@ -46,3 +140,82 @@ impl ToDbValue for &str { (*self).to_string().into() } } + +impl DatabaseField for chrono::DateTime { + const TYPE: ColumnType = ColumnType::DateTimeWithTimeZone; +} + +impl FromDbValue for chrono::DateTime { + impl_from_sqlite_default!(); + + impl_from_postgres_default!(); + + #[cfg(feature = "mysql")] + fn from_mysql(value: MySqlValueRef) -> Result { + Ok(value.get::>()?.fixed_offset()) + } +} +impl FromDbValue for Option> { + impl_from_sqlite_default!(); + + impl_from_postgres_default!(); + + #[cfg(feature = "mysql")] + fn from_mysql(value: MySqlValueRef) -> Result { + Ok(value + .get::>>()? + .map(|dt| dt.fixed_offset())) + } +} + +impl_to_db_value_default!(chrono::DateTime); + +impl ToDbValue for Option<&str> { + fn to_sea_query_value(&self) -> Value { + self.map(ToString::to_string).into() + } +} + +impl DatabaseField for Option +where + Option: ToDbValue + FromDbValue, +{ + const NULLABLE: bool = true; + const TYPE: ColumnType = T::TYPE; +} + +impl DatabaseField for LimitedString { + const TYPE: ColumnType = ColumnType::String(LIMIT); +} + +impl FromDbValue for LimitedString { + #[cfg(feature = "sqlite")] + fn from_sqlite(value: SqliteValueRef) -> Result { + let str = value.get::()?; + Self::new(str).map_err(DatabaseError::value_decode) + } + + #[cfg(feature = "postgres")] + fn from_postgres(value: PostgresValueRef) -> Result { + let str = value.get::()?; + Self::new(str).map_err(DatabaseError::value_decode) + } + + #[cfg(feature = "mysql")] + fn from_mysql(value: MySqlValueRef) -> Result { + let str = value.get::()?; + Self::new(str).map_err(DatabaseError::value_decode) + } +} + +impl ToDbValue for LimitedString { + fn to_sea_query_value(&self) -> Value { + self.0.clone().into() + } +} + +impl ToDbValue for Option> { + fn to_sea_query_value(&self) -> Value { + self.clone().map(|s| s.0).into() + } +} diff --git a/flareon/src/db/impl_mysql.rs b/flareon/src/db/impl_mysql.rs new file mode 100644 index 0000000..2314104 --- /dev/null +++ b/flareon/src/db/impl_mysql.rs @@ -0,0 +1,24 @@ +use crate::db::sea_query_db::impl_sea_query_db_backend; +use crate::db::ColumnType; + +impl_sea_query_db_backend!(DatabaseMySql: sqlx::mysql::MySql, sqlx::mysql::MySqlPool, MySqlRow, MySqlValueRef, sea_query::MysqlQueryBuilder); + +impl DatabaseMySql { + fn prepare_values(_values: &mut sea_query_binder::SqlxValues) { + // No changes are needed for MySQL + } + + pub(super) fn sea_query_column_type_for( + &self, + column_type: ColumnType, + ) -> sea_query::ColumnType { + match column_type { + ColumnType::DateTime | ColumnType::DateTimeWithTimeZone => { + return sea_query::ColumnType::custom("DATETIME(6)"); + } + _ => {} + } + + sea_query::ColumnType::from(column_type) + } +} diff --git a/flareon/src/db/impl_postgres.rs b/flareon/src/db/impl_postgres.rs new file mode 100644 index 0000000..5ade464 --- /dev/null +++ b/flareon/src/db/impl_postgres.rs @@ -0,0 +1,43 @@ +use crate::db::sea_query_db::impl_sea_query_db_backend; + +impl_sea_query_db_backend!(DatabasePostgres: sqlx::postgres::Postgres, sqlx::postgres::PgPool, PostgresRow, PostgresValueRef, sea_query::PostgresQueryBuilder); + +impl DatabasePostgres { + fn prepare_values(values: &mut sea_query_binder::SqlxValues) { + for value in &mut values.0 .0 { + Self::tinyint_to_smallint(value); + Self::unsigned_to_signed(value); + } + } + + /// PostgreSQL does only support 2+ bytes integers, so we need to convert + /// i8/u8 to i16/u16. Otherwise, sqlx will convert them internally to `char` + /// and we'll get an error. + fn tinyint_to_smallint(value: &mut sea_query::Value) { + if let sea_query::Value::TinyInt(num) = value { + *value = sea_query::Value::SmallInt(num.map(i16::from)); + } else if let sea_query::Value::TinyUnsigned(num) = value { + *value = sea_query::Value::SmallInt(num.map(i16::from)); + } + } + + /// PostgreSQL doesn't support unsigned integers, so we need to convert + /// them to signed integers. + fn unsigned_to_signed(value: &mut sea_query::Value) { + #[allow(clippy::cast_possible_wrap)] + if let sea_query::Value::SmallUnsigned(num) = value { + *value = sea_query::Value::SmallInt(num.map(|v| v as i16)); + } else if let sea_query::Value::Unsigned(num) = value { + *value = sea_query::Value::Int(num.map(|v| v as i32)); + } else if let sea_query::Value::BigUnsigned(num) = value { + *value = sea_query::Value::BigInt(num.map(|v| v as i64)); + } + } + + pub(super) fn sea_query_column_type_for( + &self, + column_type: crate::db::ColumnType, + ) -> sea_query::ColumnType { + sea_query::ColumnType::from(column_type) + } +} diff --git a/flareon/src/db/impl_sqlite.rs b/flareon/src/db/impl_sqlite.rs index c5138f3..5f228b1 100644 --- a/flareon/src/db/impl_sqlite.rs +++ b/flareon/src/db/impl_sqlite.rs @@ -1,146 +1,16 @@ -use derive_more::Debug; -use flareon::db::{SqlxRowRef, SqlxValueRef}; -use log::debug; -use sea_query::{SchemaStatementBuilder, SqliteQueryBuilder}; -use sea_query_binder::{SqlxBinder, SqlxValues}; -use sqlx::{Database, Row, SqlitePool}; +use crate::db::sea_query_db::impl_sea_query_db_backend; -use super::{Result, RowsNum, StatementResult}; - -#[derive(Debug)] -pub(super) struct DatabaseSqlite { - db_connection: SqlitePool, -} +impl_sea_query_db_backend!(DatabaseSqlite: sqlx::sqlite::Sqlite, sqlx::sqlite::SqlitePool, SqliteRow, SqliteValueRef, sea_query::SqliteQueryBuilder); impl DatabaseSqlite { - pub(super) async fn new(url: &str) -> Result { - let db_connection = SqlitePool::connect(url).await?; - - Ok(Self { db_connection }) - } - - pub(super) async fn close(self) -> Result<()> { - self.db_connection.close().await; - Ok(()) - } - - pub(super) async fn fetch_one(&self, statement: &T) -> Result { - let (sql, values) = Self::build_sql(statement); - - let row = sqlx::query_with(&sql, values) - .fetch_one(&self.db_connection) - .await?; - Ok(SqliteRow::new(row)) - } - - pub(super) async fn fetch_option( - &self, - statement: &T, - ) -> Result> { - let (sql, values) = Self::build_sql(statement); - - let row = sqlx::query_with(&sql, values) - .fetch_optional(&self.db_connection) - .await?; - Ok(row.map(SqliteRow::new)) - } - - pub(super) async fn fetch_all(&self, statement: &T) -> Result> { - let (sql, values) = Self::build_sql(statement); - - let result = sqlx::query_with(&sql, values) - .fetch_all(&self.db_connection) - .await? - .into_iter() - .map(SqliteRow::new) - .collect(); - Ok(result) - } - - pub(super) async fn execute_statement( - &self, - statement: &T, - ) -> Result { - let (sql, values) = Self::build_sql(statement); - - self.execute_sqlx(sqlx::query_with(&sql, values)).await - } - - pub(super) async fn execute_schema( - &self, - statement: T, - ) -> Result { - let sql = statement.build(SqliteQueryBuilder); - debug!("Schema modification: {}", sql); - - self.execute_sqlx(sqlx::query(&sql)).await + fn prepare_values(_values: &mut sea_query_binder::SqlxValues) { + // No changes are needed for SQLite } - async fn execute_sqlx<'a, A>( + pub(super) fn sea_query_column_type_for( &self, - sqlx_statement: sqlx::query::Query<'a, sqlx::sqlite::Sqlite, A>, - ) -> Result - where - A: 'a + sqlx::IntoArguments<'a, sqlx::sqlite::Sqlite>, - { - let result = sqlx_statement.execute(&self.db_connection).await?; - let result = StatementResult { - rows_affected: RowsNum(result.rows_affected()), - }; - - debug!("Rows affected: {}", result.rows_affected.0); - Ok(result) - } - - fn build_sql(statement: &T) -> (String, SqlxValues) - where - T: SqlxBinder, - { - let (sql, values) = statement.build_sqlx(SqliteQueryBuilder); - debug!("SQLite Query: `{}` (values: {:?})", sql, values); - - (sql, values) - } -} - -#[derive(Debug)] -pub struct SqliteRow { - #[debug("...")] - inner: sqlx::sqlite::SqliteRow, -} - -impl SqliteRow { - #[must_use] - fn new(inner: sqlx::sqlite::SqliteRow) -> Self { - Self { inner } - } -} - -impl SqlxRowRef for SqliteRow { - type ValueRef<'r> = SqliteValueRef<'r>; - - fn get_raw(&self, index: usize) -> Result> { - Ok(SqliteValueRef::new(self.inner.try_get_raw(index)?)) - } -} - -#[derive(Debug)] -pub struct SqliteValueRef<'r> { - #[debug("...")] - inner: sqlx::sqlite::SqliteValueRef<'r>, -} - -impl<'r> SqliteValueRef<'r> { - #[must_use] - fn new(inner: sqlx::sqlite::SqliteValueRef<'r>) -> Self { - Self { inner } - } -} - -impl<'r> SqlxValueRef<'r> for SqliteValueRef<'r> { - type DB = sqlx::Sqlite; - - fn get_raw(self) -> ::ValueRef<'r> { - self.inner + column_type: crate::db::ColumnType, + ) -> sea_query::ColumnType { + sea_query::ColumnType::from(column_type) } } diff --git a/flareon/src/db/migrations.rs b/flareon/src/db/migrations.rs index eff2b50..62c13a4 100644 --- a/flareon/src/db/migrations.rs +++ b/flareon/src/db/migrations.rs @@ -1,12 +1,24 @@ +mod sorter; + use std::fmt; use std::fmt::{Debug, Formatter}; use flareon_macros::{model, query}; use log::info; -use sea_query::ColumnDef; +use sea_query::{ColumnDef, StringLen}; +use thiserror::Error; +use crate::db::migrations::sorter::{MigrationSorter, MigrationSorterError}; use crate::db::{ColumnType, Database, DatabaseField, Identifier, Result}; +#[derive(Debug, Clone, Error)] +#[non_exhaustive] +pub enum MigrationEngineError { + /// An error occurred while determining the correct order of migrations. + #[error("Error while determining the correct order of migrations")] + MigrationSortError(#[from] MigrationSorterError), +} + /// A migration engine that can run migrations. #[derive(Debug)] pub struct MigrationEngine { @@ -14,25 +26,27 @@ pub struct MigrationEngine { } impl MigrationEngine { - #[must_use] - pub fn new>(migrations: V) -> Self { + pub fn new>( + migrations: V, + ) -> Result { let migrations = migrations.into_iter().map(MigrationWrapper::new).collect(); Self::from_wrapper(migrations) } - #[must_use] - pub fn from_wrapper(mut migrations: Vec) -> Self { - Self::sort_migrations(&mut migrations); - Self { migrations } + pub fn from_wrapper(mut migrations: Vec) -> Result { + Self::sort_migrations(&mut migrations)?; + Ok(Self { migrations }) } /// Sorts the migrations by app name and migration name to ensure that the /// order of applying migrations is consistent and deterministic. Then /// determines the correct order of applying migrations based on the /// dependencies between them. - pub fn sort_migrations(migrations: &mut [T]) { - migrations.sort_by(|a, b| (a.app_name(), a.name()).cmp(&(b.app_name(), b.name()))); - // TODO: Determine the correct order based on the dependencies + pub fn sort_migrations(migrations: &mut [T]) -> Result<()> { + MigrationSorter::new(migrations) + .sort() + .map_err(MigrationEngineError::from)?; + Ok(()) } /// Runs the migrations. If a migration is already applied, it will be @@ -51,7 +65,9 @@ impl MigrationEngine { /// # Examples /// /// ``` - /// use flareon::db::migrations::{Field, Migration, MigrationEngine, Operation}; + /// use flareon::db::migrations::{ + /// Field, Migration, MigrationDependency, MigrationEngine, Operation, + /// }; /// use flareon::db::{Database, DatabaseField, Identifier}; /// use flareon::Result; /// @@ -60,6 +76,7 @@ impl MigrationEngine { /// impl Migration for MyMigration { /// const APP_NAME: &'static str = "todoapp"; /// const MIGRATION_NAME: &'static str = "m_0001_initial"; + /// const DEPENDENCIES: &'static [MigrationDependency] = &[]; /// const OPERATIONS: &'static [Operation] = &[Operation::create_model() /// .table_name(Identifier::new("todoapp__my_model")) /// .fields(&[ @@ -73,7 +90,7 @@ impl MigrationEngine { /// /// # #[tokio::main] /// # async fn main() -> Result<()> { - /// let engine = MigrationEngine::new([MyMigration]); + /// let engine = MigrationEngine::new([MyMigration])?; /// let database = Database::new("sqlite::memory:").await?; /// engine.run(&database).await?; /// # Ok(()) @@ -226,7 +243,7 @@ impl Operation { } => { let mut query = sea_query::Table::create().table(*table_name).to_owned(); for field in *fields { - query.col(ColumnDef::from(field)); + query.col(field.as_column_def(database)); } if *if_not_exists { query.if_not_exists(); @@ -236,7 +253,7 @@ impl Operation { OperationInner::AddField { table_name, field } => { let query = sea_query::Table::alter() .table(*table_name) - .add_column(ColumnDef::from(field)) + .add_column(field.as_column_def(database)) .to_owned(); database.execute_schema(query).await?; } @@ -326,6 +343,8 @@ pub struct Field { pub auto_value: bool, /// Whether the column can be null pub null: bool, + /// Whether the column has a unique constraint + pub unique: bool, } impl Field { @@ -337,6 +356,7 @@ impl Field { primary_key: false, auto_value: false, null: false, + unique: false, } } @@ -357,24 +377,45 @@ impl Field { self.null = true; self } -} -impl From<&Field> for ColumnDef { - fn from(column: &Field) -> Self { - let mut def = ColumnDef::new_with_type(column.name, column.ty.into()); - if column.primary_key { + #[must_use] + pub const fn set_null(mut self, value: bool) -> Self { + self.null = value; + self + } + + #[must_use] + pub const fn unique(mut self) -> Self { + self.unique = true; + self + } + + fn as_column_def(&self, mapper: &T) -> ColumnDef { + let mut def = + ColumnDef::new_with_type(self.name, mapper.sea_query_column_type_for(self.ty)); + if self.primary_key { def.primary_key(); } - if column.auto_value { + if self.auto_value { def.auto_increment(); } - if column.null { + if self.null { def.null(); + } else { + def.not_null(); + } + if self.unique { + def.unique_key(); } def } } +#[cfg_attr(test, mockall::automock)] +pub(super) trait ColumnTypeMapper { + fn sea_query_column_type_for(&self, column_type: ColumnType) -> sea_query::ColumnType; +} + macro_rules! unwrap_builder_option { ($self:ident, $field:ident) => { match $self.$field { @@ -480,12 +521,14 @@ impl AddFieldBuilder { pub trait Migration { const APP_NAME: &'static str; const MIGRATION_NAME: &'static str; + const DEPENDENCIES: &'static [MigrationDependency]; const OPERATIONS: &'static [Operation]; } pub trait DynMigration { fn app_name(&self) -> &str; fn name(&self) -> &str; + fn dependencies(&self) -> &[MigrationDependency]; fn operations(&self) -> &[Operation]; } @@ -498,6 +541,10 @@ impl DynMigration for T { Self::MIGRATION_NAME } + fn dependencies(&self) -> &[MigrationDependency] { + Self::DEPENDENCIES + } + fn operations(&self) -> &[Operation] { Self::OPERATIONS } @@ -512,6 +559,10 @@ impl DynMigration for &dyn DynMigration { DynMigration::name(*self) } + fn dependencies(&self) -> &[MigrationDependency] { + DynMigration::dependencies(*self) + } + fn operations(&self) -> &[Operation] { DynMigration::operations(*self) } @@ -526,6 +577,10 @@ impl DynMigration for Box { DynMigration::name(&**self) } + fn dependencies(&self) -> &[MigrationDependency] { + DynMigration::dependencies(&**self) + } + fn operations(&self) -> &[Operation] { DynMigration::operations(&**self) } @@ -549,6 +604,10 @@ impl DynMigration for MigrationWrapper { self.0.name() } + fn dependencies(&self) -> &[MigrationDependency] { + self.0.dependencies() + } + fn operations(&self) -> &[Operation] { self.0.operations() } @@ -581,14 +640,60 @@ impl From for sea_query::ColumnType { ColumnType::Time => Self::Time, ColumnType::Date => Self::Date, ColumnType::DateTime => Self::DateTime, - ColumnType::Timestamp => Self::Timestamp, - ColumnType::TimestampWithTimeZone => Self::TimestampWithTimeZone, + ColumnType::DateTimeWithTimeZone => Self::TimestampWithTimeZone, ColumnType::Text => Self::Text, ColumnType::Blob => Self::Blob, + ColumnType::String(len) => Self::String(StringLen::N(len)), } } } +/// A migration dependency: a relationship between two migrations that tells the +/// migration engine which migrations need to be applied before +/// others. +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct MigrationDependency { + inner: MigrationDependencyInner, +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +enum MigrationDependencyInner { + Migration { + app: &'static str, + migration: &'static str, + }, + Model { + app: &'static str, + model_name: &'static str, + }, +} + +impl MigrationDependency { + #[must_use] + const fn new(inner: MigrationDependencyInner) -> Self { + Self { inner } + } + + /// Creates a dependency on another migration. + /// + /// This ensures that the migration engine will apply the migration with + /// given app and migration name before the current migration. + #[must_use] + pub const fn migration(app: &'static str, migration: &'static str) -> Self { + Self::new(MigrationDependencyInner::Migration { app, migration }) + } + + /// Creates a dependency on a model. + /// + /// This ensures that the migration engine will apply the migration that + /// creates the model with the given app and model name before the current + /// migration. + #[must_use] + pub const fn model(app: &'static str, model_name: &'static str) -> Self { + Self::new(MigrationDependencyInner::Model { app, model_name }) + } +} + #[derive(Debug)] #[model(table_name = "flareon__migrations", model_type = "internal")] struct AppliedMigration { @@ -616,14 +721,18 @@ const CREATE_APPLIED_MIGRATIONS_MIGRATION: Operation = Operation::create_model() #[cfg(test)] mod tests { + use flareon::test::TestDatabase; + use sea_query::ColumnSpec; + use super::*; - use crate::db::{ColumnType, Database, DatabaseField, Identifier}; + use crate::db::{ColumnType, DatabaseField, Identifier}; struct TestMigration; impl Migration for TestMigration { const APP_NAME: &'static str = "testapp"; const MIGRATION_NAME: &'static str = "m_0001_initial"; + const DEPENDENCIES: &'static [MigrationDependency] = &[]; const OPERATIONS: &'static [Operation] = &[Operation::create_model() .table_name(Identifier::new("testapp__test_model")) .fields(&[ @@ -635,12 +744,11 @@ mod tests { .build()]; } - #[tokio::test] - async fn test_migration_engine_run() { - let engine = MigrationEngine::new([TestMigration]); - let database = Database::new("sqlite::memory:").await.unwrap(); + #[flareon_macros::dbtest] + async fn test_migration_engine_run(test_db: &mut TestDatabase) { + let engine = MigrationEngine::new([TestMigration]).unwrap(); - let result = engine.run(&database).await; + let result = engine.run(&test_db.database()).await; assert!(result.is_ok()); } @@ -713,4 +821,59 @@ mod tests { assert_eq!(migration.name(), "m_0001_initial"); assert_eq!(migration.operations().len(), 1); } + + macro_rules! has_spec { + ($column_def:expr, $spec:pat) => { + $column_def + .get_column_spec() + .iter() + .any(|spec| matches!(spec, $spec)) + }; + } + + #[test] + fn test_field_to_column_def() { + let field = Field::new(Identifier::new("id"), ColumnType::Integer) + .primary_key() + .auto() + .null() + .unique(); + + let mut mapper = MockColumnTypeMapper::new(); + mapper + .expect_sea_query_column_type_for() + .return_const(sea_query::ColumnType::Integer); + let column_def = field.as_column_def(&mapper); + + assert_eq!(column_def.get_column_name(), "id"); + assert_eq!( + column_def.get_column_type(), + Some(&sea_query::ColumnType::Integer) + ); + assert!(has_spec!(column_def, ColumnSpec::PrimaryKey)); + assert!(has_spec!(column_def, ColumnSpec::AutoIncrement)); + assert!(has_spec!(column_def, ColumnSpec::Null)); + assert!(has_spec!(column_def, ColumnSpec::UniqueKey)); + } + + #[test] + fn test_field_to_column_def_without_options() { + let field = Field::new(Identifier::new("name"), ColumnType::Text); + + let mut mapper = MockColumnTypeMapper::new(); + mapper + .expect_sea_query_column_type_for() + .return_const(sea_query::ColumnType::Text); + let column_def = field.as_column_def(&mapper); + + assert_eq!(column_def.get_column_name(), "name"); + assert_eq!( + column_def.get_column_type(), + Some(&sea_query::ColumnType::Text) + ); + assert!(!has_spec!(column_def, ColumnSpec::PrimaryKey)); + assert!(!has_spec!(column_def, ColumnSpec::AutoIncrement)); + assert!(!has_spec!(column_def, ColumnSpec::Null)); + assert!(!has_spec!(column_def, ColumnSpec::UniqueKey)); + } } diff --git a/flareon/src/db/migrations/sorter.rs b/flareon/src/db/migrations/sorter.rs new file mode 100644 index 0000000..dc32bfc --- /dev/null +++ b/flareon/src/db/migrations/sorter.rs @@ -0,0 +1,469 @@ +use std::collections::HashMap; + +use flareon::db::migrations::MigrationDependency; +use thiserror::Error; + +use crate::db::migrations::{DynMigration, MigrationDependencyInner, OperationInner}; + +#[derive(Debug, Clone, PartialEq, Eq, Error)] +#[non_exhaustive] +pub enum MigrationSorterError { + #[error("Cycle detected in migrations")] + CycleDetected, + #[error("Dependency not found: {}", format_migration_dependency(.0))] + InvalidDependency(MigrationDependency), + #[error("Migration defined twice: {app_name}::{migration_name}")] + DuplicateMigration { + app_name: String, + migration_name: String, + }, + #[error("Migration creating model defined twice: {app_name}::{model_name}")] + DuplicateModel { + app_name: String, + model_name: String, + }, +} + +type Result = core::result::Result; + +fn format_migration_dependency(dependency: &MigrationDependency) -> String { + match dependency.inner { + MigrationDependencyInner::Migration { app, migration } => { + format!("migration {app}::{migration}") + } + MigrationDependencyInner::Model { app, model_name } => { + format!("model {app}::{model_name}") + } + } +} + +/// Sorts migrations topologically based on their dependencies. +#[derive(Debug)] +pub(super) struct MigrationSorter<'a, T> { + migrations: &'a mut [T], +} + +impl<'a, T: DynMigration> MigrationSorter<'a, T> { + #[must_use] + pub(super) fn new(migrations: &'a mut [T]) -> Self { + Self { migrations } + } + + pub(super) fn sort(&mut self) -> Result<()> { + // Sort by names to ensure that the order is deterministic + self.migrations + .sort_by(|a, b| (b.app_name(), b.name()).cmp(&(a.app_name(), a.name()))); + + self.toposort()?; + Ok(()) + } + + fn toposort(&mut self) -> Result<()> { + let lookup = Self::create_lookup_table(self.migrations)?; + let mut graph = Graph::new(self.migrations.len()); + + for (index, migration) in self.migrations.iter().enumerate() { + for dependency in migration.dependencies() { + let dependency_index = lookup + .get(&MigrationLookup::from(dependency)) + .ok_or(MigrationSorterError::InvalidDependency(*dependency))?; + graph.add_edge(*dependency_index, index); + } + } + + let mut sorted_indices = graph.toposort()?; + apply_permutation(self.migrations, &mut sorted_indices); + + Ok(()) + } + + fn create_lookup_table(migrations: &[T]) -> Result> { + let mut map = HashMap::with_capacity(migrations.len()); + + for (index, migration) in migrations.iter().enumerate() { + let app_and_name = MigrationLookup::ByAppAndName { + app: migration.app_name(), + name: migration.name(), + }; + if map.insert(app_and_name, index).is_some() { + return Err(MigrationSorterError::DuplicateMigration { + app_name: migration.app_name().to_owned(), + migration_name: migration.name().to_owned(), + }); + }; + + for operation in migration.operations() { + if let OperationInner::CreateModel { table_name, .. } = operation.inner { + let app_and_model = MigrationLookup::ByAppAndModel { + app: migration.app_name(), + model: table_name.0, + }; + if map.insert(app_and_model, index).is_some() { + return Err(MigrationSorterError::DuplicateModel { + app_name: migration.app_name().to_owned(), + model_name: table_name.0.to_owned(), + }); + } + } + } + } + + Ok(map) + } +} + +fn apply_permutation(migrations: &mut [T], order: &mut [usize]) { + for i in 0..order.len() { + let mut current = i; + let mut next = order[current]; + + while next != i { + // process the cycle + migrations.swap(current, next); + order[current] = current; + + current = next; + next = order[current]; + } + + order[current] = current; + } +} + +#[derive(Debug, Clone, Eq, PartialEq, Hash)] +enum MigrationLookup<'a> { + ByAppAndName { app: &'a str, name: &'a str }, + ByAppAndModel { app: &'a str, model: &'a str }, +} + +impl From<&MigrationDependency> for MigrationLookup<'_> { + fn from(dependency: &MigrationDependency) -> Self { + match dependency.inner { + MigrationDependencyInner::Migration { app, migration } => { + MigrationLookup::ByAppAndName { + app, + name: migration, + } + } + MigrationDependencyInner::Model { app, model_name } => MigrationLookup::ByAppAndModel { + app, + model: model_name, + }, + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +struct Graph { + vertex_edges: Vec>, +} + +impl Graph { + #[must_use] + fn new(vertex_num: usize) -> Self { + Self { + vertex_edges: vec![Vec::new(); vertex_num], + } + } + + fn add_edge(&mut self, from: usize, to: usize) { + self.vertex_edges[from].push(to); + } + + #[must_use] + fn vertex_num(&self) -> usize { + self.vertex_edges.len() + } + + fn toposort(&mut self) -> Result> { + let mut visited = vec![VisitedStatus::NotVisited; self.vertex_num()]; + let mut sorted_indices_stack = Vec::with_capacity(self.vertex_num()); + + for index in 0..self.vertex_num() { + self.visit(index, &mut visited, &mut sorted_indices_stack)?; + } + + assert_eq!(sorted_indices_stack.len(), self.vertex_num()); + + sorted_indices_stack.reverse(); + Ok(sorted_indices_stack) + } + + fn visit( + &self, + index: usize, + visited: &mut Vec, + sorted_indices_stack: &mut Vec, + ) -> Result<()> { + match visited[index] { + VisitedStatus::Visited => return Ok(()), + VisitedStatus::Visiting => { + return Err(MigrationSorterError::CycleDetected); + } + VisitedStatus::NotVisited => {} + } + + visited[index] = VisitedStatus::Visiting; + + for &neighbor in &self.vertex_edges[index] { + self.visit(neighbor, visited, sorted_indices_stack)?; + } + + visited[index] = VisitedStatus::Visited; + sorted_indices_stack.push(index); + + Ok(()) + } +} + +#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)] +enum VisitedStatus { + NotVisited, + Visiting, + Visited, +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::db::migrations::{MigrationDependency, Operation}; + use crate::db::Identifier; + use crate::test::TestMigration; + + #[test] + fn graph_toposort() { + let mut graph = Graph::new(8); + graph.add_edge(0, 3); + graph.add_edge(1, 3); + graph.add_edge(1, 4); + graph.add_edge(2, 4); + graph.add_edge(2, 7); + graph.add_edge(3, 5); + graph.add_edge(3, 6); + graph.add_edge(3, 7); + graph.add_edge(4, 6); + + let sorted_indices = graph.toposort().unwrap(); + + assert_eq!(sorted_indices, vec![2, 1, 4, 0, 3, 7, 6, 5]); + } + + #[test] + fn create_lookup_table() { + let migrations = vec![ + TestMigration::new( + "app1", + "migration1", + [], + [Operation::create_model() + .table_name(Identifier::new("model1")) + .fields(&[]) + .build()], + ), + TestMigration::new( + "app1", + "migration2", + [], + [Operation::create_model() + .table_name(Identifier::new("model2")) + .fields(&[]) + .build()], + ), + ]; + + let lookup = MigrationSorter::create_lookup_table(&migrations).unwrap(); + + assert_eq!(lookup.len(), 4); + assert!(lookup.contains_key(&MigrationLookup::ByAppAndName { + app: "app1", + name: "migration1" + })); + assert!(lookup.contains_key(&MigrationLookup::ByAppAndName { + app: "app1", + name: "migration2" + })); + assert!(lookup.contains_key(&MigrationLookup::ByAppAndModel { + app: "app1", + model: "model1" + })); + assert!(lookup.contains_key(&MigrationLookup::ByAppAndModel { + app: "app1", + model: "model2" + })); + } + + #[test] + fn sort() { + let mut migrations = vec![ + TestMigration::new("app1", "migration2", [], []), + TestMigration::new("app1", "migration1", [], []), + ]; + + let mut sorter = MigrationSorter::new(&mut migrations); + sorter.sort().unwrap(); + + assert_eq!(sorter.migrations[0].name(), "migration1"); + assert_eq!(sorter.migrations[1].name(), "migration2"); + } + + #[test] + fn toposort() { + let mut migrations = vec![ + TestMigration::new("app2", "migration_before", [], []), + TestMigration::new( + "app2", + "migration_after", + [MigrationDependency::migration("app2", "migration_before")], + [], + ), + TestMigration::new( + "app1", + "migration_before", + [MigrationDependency::migration("app2", "migration_before")], + [], + ), + TestMigration::new( + "app1", + "migration_after", + [ + MigrationDependency::migration("app1", "migration_before"), + MigrationDependency::migration("app2", "migration_after"), + ], + [], + ), + ]; + + let mut sorter = MigrationSorter::new(&mut migrations); + sorter.sort().unwrap(); + + assert_eq!( + (migrations[0].app_name(), migrations[0].name()), + ("app2", "migration_before") + ); + assert_eq!( + (migrations[1].app_name(), migrations[1].name()), + ("app1", "migration_before") + ); + assert_eq!( + (migrations[2].app_name(), migrations[2].name()), + ("app2", "migration_after") + ); + assert_eq!( + (migrations[3].app_name(), migrations[3].name()), + ("app1", "migration_after") + ); + } + + // migration names must be &'static str + const MIGRATION_NAMES: [&str; 100] = [ + "m0", "m1", "m2", "m3", "m4", "m5", "m6", "m7", "m8", "m9", "m10", "m11", "m12", "m13", + "m14", "m15", "m16", "m17", "m18", "m19", "m20", "m21", "m22", "m23", "m24", "m25", "m26", + "m27", "m28", "m29", "m30", "m31", "m32", "m33", "m34", "m35", "m36", "m37", "m38", "m39", + "m40", "m41", "m42", "m43", "m44", "m45", "m46", "m47", "m48", "m49", "m50", "m51", "m52", + "m53", "m54", "m55", "m56", "m57", "m58", "m59", "m60", "m61", "m62", "m63", "m64", "m65", + "m66", "m67", "m68", "m69", "m70", "m71", "m72", "m73", "m74", "m75", "m76", "m77", "m78", + "m79", "m80", "m81", "m82", "m83", "m84", "m85", "m86", "m87", "m88", "m89", "m90", "m91", + "m92", "m93", "m94", "m95", "m96", "m97", "m98", "m99", + ]; + + #[test] + fn toposort_big() { + const MIGRATION_NUM: usize = 100; + + let mut migrations = Vec::new(); + for i in 0..MIGRATION_NUM { + let deps = (0..i) + .map(|i| MigrationDependency::migration("app1", MIGRATION_NAMES[i])) + .collect::>(); + + migrations.push(TestMigration::new("app1", MIGRATION_NAMES[i], deps, [])); + } + + let mut sorter = MigrationSorter::new(&mut migrations); + sorter.sort().unwrap(); + + for (i, migration) in migrations.iter().enumerate() { + assert_eq!(migration.name(), MIGRATION_NAMES[i]); + } + } + + #[test] + fn cycle_detection() { + let mut migrations = vec![ + TestMigration::new( + "app1", + "migration1", + [MigrationDependency::migration("app1", "migration2")], + [Operation::create_model() + .table_name(Identifier::new("model1")) + .fields(&[]) + .build()], + ), + TestMigration::new( + "app1", + "migration2", + [MigrationDependency::migration("app1", "migration1")], + [Operation::create_model() + .table_name(Identifier::new("model2")) + .fields(&[]) + .build()], + ), + ]; + + let mut sorter = MigrationSorter::new(&mut migrations); + assert_eq!( + sorter.toposort().unwrap_err(), + MigrationSorterError::CycleDetected + ); + } + + #[test] + fn duplicate_migration() { + let mut migrations = vec![ + TestMigration::new("app1", "migration1", [], []), + TestMigration::new("app1", "migration1", [], []), + ]; + + let mut sorter = MigrationSorter::new(&mut migrations); + assert_eq!( + sorter.toposort().unwrap_err(), + MigrationSorterError::DuplicateMigration { + app_name: "app1".to_owned(), + migration_name: "migration1".to_owned() + } + ); + } + + #[test] + fn duplicate_model() { + let mut migrations = vec![ + TestMigration::new( + "app1", + "migration1", + [], + [Operation::create_model() + .table_name(Identifier::new("model1")) + .fields(&[]) + .build()], + ), + TestMigration::new( + "app1", + "migration2", + [], + [Operation::create_model() + .table_name(Identifier::new("model1")) + .fields(&[]) + .build()], + ), + ]; + + let mut sorter = MigrationSorter::new(&mut migrations); + assert_eq!( + sorter.toposort().unwrap_err(), + MigrationSorterError::DuplicateModel { + app_name: "app1".to_owned(), + model_name: "model1".to_owned() + } + ); + } +} diff --git a/flareon/src/db/sea_query_db.rs b/flareon/src/db/sea_query_db.rs new file mode 100644 index 0000000..fc08016 --- /dev/null +++ b/flareon/src/db/sea_query_db.rs @@ -0,0 +1,162 @@ +/// Implements the database backend for a specific engine using `SeaQuery`. +/// +/// Note that this macro doesn't implement certain engine-specific methods, and +/// they need to be implemented in a separate `impl` block. These methods are: +/// * `prepare_values` +/// * `sea_query_column_type_for` +macro_rules! impl_sea_query_db_backend { + ($db_name:ident : $sqlx_db_ty:ty, $pool_ty:ty, $row_name:ident, $value_ref_name:ident, $query_builder:expr) => { + #[derive(Debug)] + pub(super) struct $db_name { + db_connection: $pool_ty, + } + + impl $db_name { + pub(super) async fn new(url: &str) -> crate::db::Result { + let db_connection = <$pool_ty>::connect(url).await?; + + Ok(Self { db_connection }) + } + + pub(super) async fn close(&self) -> crate::db::Result<()> { + self.db_connection.close().await; + Ok(()) + } + + pub(super) async fn fetch_option( + &self, + statement: &T, + ) -> crate::db::Result> { + let (sql, values) = Self::build_sql(statement); + + let row = Self::sqlx_query_with(&sql, values) + .fetch_optional(&self.db_connection) + .await?; + Ok(row.map($row_name::new)) + } + + pub(super) async fn fetch_all( + &self, + statement: &T, + ) -> crate::db::Result> { + let (sql, values) = Self::build_sql(statement); + + let result = Self::sqlx_query_with(&sql, values) + .fetch_all(&self.db_connection) + .await? + .into_iter() + .map($row_name::new) + .collect(); + Ok(result) + } + + pub(super) async fn execute_statement( + &self, + statement: &T, + ) -> crate::db::Result { + let (sql, mut values) = Self::build_sql(statement); + Self::prepare_values(&mut values); + + self.execute_sqlx(Self::sqlx_query_with(&sql, values)).await + } + + pub(super) async fn execute_schema( + &self, + statement: T, + ) -> crate::db::Result { + let sql = statement.build($query_builder); + log::debug!("Schema modification: {}", sql); + + self.execute_sqlx(sqlx::query(&sql)).await + } + + pub(super) async fn raw_with( + &self, + sql: &str, + values: sea_query_binder::SqlxValues, + ) -> crate::db::Result { + self.execute_sqlx(Self::sqlx_query_with(sql, values)).await + } + + async fn execute_sqlx<'a, A>( + &self, + sqlx_statement: sqlx::query::Query<'a, $sqlx_db_ty, A>, + ) -> crate::db::Result + where + A: 'a + sqlx::IntoArguments<'a, $sqlx_db_ty>, + { + let result = sqlx_statement.execute(&self.db_connection).await?; + let result = crate::db::StatementResult { + rows_affected: crate::db::RowsNum(result.rows_affected()), + }; + + log::debug!("Rows affected: {}", result.rows_affected.0); + Ok(result) + } + + fn build_sql(statement: &T) -> (String, sea_query_binder::SqlxValues) + where + T: sea_query_binder::SqlxBinder, + { + let (sql, values) = statement.build_sqlx($query_builder); + + (sql, values) + } + + fn sqlx_query_with( + sql: &str, + mut values: sea_query_binder::SqlxValues, + ) -> sqlx::query::Query<'_, $sqlx_db_ty, sea_query_binder::SqlxValues> { + Self::prepare_values(&mut values); + log::debug!("Query: `{}` (values: {:?})", sql, values); + + sqlx::query_with(sql, values) + } + } + + #[derive(derive_more::Debug)] + pub struct $row_name { + #[debug("...")] + inner: <$sqlx_db_ty as sqlx::Database>::Row, + } + + impl $row_name { + #[must_use] + fn new(inner: <$sqlx_db_ty as sqlx::Database>::Row) -> Self { + Self { inner } + } + } + + impl crate::db::SqlxRowRef for $row_name { + type ValueRef<'r> = $value_ref_name<'r>; + + fn get_raw(&self, index: usize) -> crate::db::Result> { + use sqlx::Row; + Ok($value_ref_name::new(self.inner.try_get_raw(index)?)) + } + } + + #[derive(derive_more::Debug)] + pub struct $value_ref_name<'r> { + #[debug("...")] + inner: <$sqlx_db_ty as sqlx::Database>::ValueRef<'r>, + } + + impl<'r> $value_ref_name<'r> { + #[must_use] + fn new(inner: <$sqlx_db_ty as sqlx::Database>::ValueRef<'r>) -> Self { + Self { inner } + } + } + + impl<'r> crate::db::SqlxValueRef<'r> for $value_ref_name<'r> { + type DB = $sqlx_db_ty; + + fn get_raw(self) -> ::ValueRef<'r> { + self.inner + } + } + }; +} + +pub(super) use impl_sea_query_db_backend; diff --git a/flareon/src/error.rs b/flareon/src/error.rs index c356fca..74b2a8b 100644 --- a/flareon/src/error.rs +++ b/flareon/src/error.rs @@ -68,9 +68,12 @@ impl From for askama::Error { impl_error_from_repr!(askama::Error); impl_error_from_repr!(crate::router::path::ReverseError); +#[cfg(feature = "db")] impl_error_from_repr!(crate::db::DatabaseError); impl_error_from_repr!(crate::forms::FormError); impl_error_from_repr!(crate::auth::AuthError); +#[cfg(feature = "json")] +impl_error_from_repr!(serde_json::Error); #[derive(Debug, Error)] #[non_exhaustive] @@ -85,7 +88,7 @@ pub(crate) enum ErrorRepr { source: Box, }, /// The request body had an invalid `Content-Type` header. - #[error("Invalid content type; expected {expected}, found {actual}")] + #[error("Invalid content type; expected `{expected}`, found `{actual}`")] InvalidContentType { expected: &'static str, actual: String, @@ -105,6 +108,7 @@ pub(crate) enum ErrorRepr { TemplateRender(#[from] askama::Error), /// An error occurred while communicating with the database. #[error("Database error: {0}")] + #[cfg(feature = "db")] DatabaseError(#[from] crate::db::DatabaseError), /// An error occurred while parsing a form. #[error("Failed to process a form: {0}")] @@ -112,6 +116,10 @@ pub(crate) enum ErrorRepr { /// An error occurred while trying to authenticate a user. #[error("Failed to authenticate user: {0}")] AuthenticationError(#[from] crate::auth::AuthError), + /// An error occurred while trying to serialize or deserialize JSON. + #[error("JSON error: {0}")] + #[cfg(feature = "json")] + JsonError(#[from] serde_json::Error), } #[cfg(test)] @@ -143,7 +151,7 @@ mod tests { assert_eq!( display, - "Invalid content type; expected application/json, found text/html" + "Invalid content type; expected `application/json`, found `text/html`" ); } diff --git a/flareon/src/headers.rs b/flareon/src/headers.rs index d3d9d89..aa11f0a 100644 --- a/flareon/src/headers.rs +++ b/flareon/src/headers.rs @@ -1,2 +1,4 @@ pub(crate) const HTML_CONTENT_TYPE: &str = "text/html; charset=utf-8"; pub(crate) const FORM_CONTENT_TYPE: &str = "application/x-www-form-urlencoded"; +#[cfg(feature = "json")] +pub(crate) const JSON_CONTENT_TYPE: &str = "application/json"; diff --git a/flareon/src/lib.rs b/flareon/src/lib.rs index 4c2fad1..64959fe 100644 --- a/flareon/src/lib.rs +++ b/flareon/src/lib.rs @@ -41,9 +41,11 @@ unused_import_braces, unused_qualifications )] +#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] extern crate self as flareon; +#[cfg(feature = "db")] pub mod db; mod error; pub mod forms; @@ -75,8 +77,7 @@ use axum::handler::HandlerWithoutStateExt; use bytes::Bytes; use derive_more::{Debug, Deref, Display, From}; pub use error::Error; -use flareon::config::DatabaseConfig; -use flareon::router::RouterService; +pub use flareon_macros::main; use futures_core::Stream; use futures_util::FutureExt; use http::request::Parts; @@ -85,15 +86,21 @@ use log::info; use request::Request; use router::{Route, Router}; use sync_wrapper::SyncWrapper; +use tower::util::BoxCloneService; use tower::Service; use crate::admin::AdminModelManager; +#[cfg(feature = "db")] +use crate::config::DatabaseConfig; use crate::config::ProjectConfig; +#[cfg(feature = "db")] use crate::db::migrations::{DynMigration, MigrationEngine}; +#[cfg(feature = "db")] use crate::db::Database; use crate::error::ErrorRepr; use crate::error_page::{ErrorPageTrigger, FlareonDiagnostics}; use crate::response::Response; +use crate::router::RouterService; /// A type alias for a result that can return a `flareon::Error`. pub type Result = std::result::Result; @@ -153,6 +160,7 @@ pub trait FlareonApp: Send + Sync { Router::empty() } + #[cfg(feature = "db")] fn migrations(&self) -> Vec> { vec![] } @@ -332,12 +340,13 @@ impl http_body::Body for Body { } } +pub type BoxedHandler = BoxCloneService; + /// A Flareon project, ready to be run. #[derive(Debug)] -// TODO add Middleware type? -pub struct FlareonProject { +pub struct FlareonProject { context: AppContext, - handler: S, + handler: BoxedHandler, } /// A part of [`FlareonProject`] that contains the shared context and configs @@ -348,6 +357,7 @@ pub struct AppContext { #[debug("...")] apps: Vec>, router: Arc, + #[cfg(feature = "db")] database: Option>, } @@ -357,12 +367,13 @@ impl AppContext { config: Arc, apps: Vec>, router: Arc, - database: Option>, + #[cfg(feature = "db")] database: Option>, ) -> Self { Self { config, apps, router, + #[cfg(feature = "db")] database, } } @@ -383,11 +394,13 @@ impl AppContext { } #[must_use] + #[cfg(feature = "db")] pub fn try_database(&self) -> Option<&Arc> { self.database.as_ref() } #[must_use] + #[cfg(feature = "db")] pub fn database(&self) -> &Database { self.try_database().expect( "Database missing. Did you forget to add the database when configuring FlareonProject?", @@ -415,6 +428,7 @@ impl FlareonProjectBuilder { config: Arc::new(ProjectConfig::default()), apps: vec![], router: Arc::new(Router::default()), + #[cfg(feature = "db")] database: None, }, urls: Vec::new(), @@ -465,7 +479,7 @@ impl FlareonProjectBuilder { } /// Builds the Flareon project instance. - pub async fn build(self) -> Result> { + pub async fn build(self) -> Result { self.into_builder_with_service().build().await } @@ -482,7 +496,11 @@ impl FlareonProjectBuilder { } } -impl> FlareonProjectBuilder { +impl FlareonProjectBuilder +where + S: Service + Send + Sync + Clone + 'static, + S::Future: Send, +{ #[must_use] pub fn middleware>( self, @@ -509,16 +527,20 @@ impl> FlareonProjectBuilder { } /// Builds the Flareon project instance. - pub async fn build(mut self) -> Result> { - let database = Self::init_database(self.context.config.database_config()).await?; - self.context.database = Some(database); + pub async fn build(mut self) -> Result { + #[cfg(feature = "db")] + { + let database = Self::init_database(self.context.config.database_config()).await?; + self.context.database = Some(database); + } Ok(FlareonProject { context: self.context, - handler: self.handler, + handler: BoxedHandler::new(self.handler), }) } + #[cfg(feature = "db")] async fn init_database(config: &DatabaseConfig) -> Result> { let database = Database::new(config.url()).await?; Ok(Arc::new(database)) @@ -531,19 +553,14 @@ impl Default for FlareonProjectBuilder { } } -impl FlareonProject<()> { +impl FlareonProject { #[must_use] pub fn builder() -> FlareonProjectBuilder { FlareonProjectBuilder::default() } -} -impl FlareonProject -where - S: Service + Send + Sync + Clone + 'static, -{ #[must_use] - pub fn into_context(self) -> (AppContext, S) { + pub fn into_context(self) -> (AppContext, BoxedHandler) { (self.context, self.handler) } } @@ -556,11 +573,7 @@ where /// # Errors /// /// This function returns an error if the server fails to start. -pub async fn run(project: FlareonProject, address_str: &str) -> Result<()> -where - S: Service + Send + Sync + Clone + 'static, - S::Future: Send, -{ +pub async fn run(project: FlareonProject, address_str: &str) -> Result<()> { let listener = tokio::net::TcpListener::bind(address_str) .await .map_err(|e| ErrorRepr::StartServer { source: e })?; @@ -581,19 +594,16 @@ where /// # Errors /// /// This function returns an error if the server fails to start. -pub async fn run_at(project: FlareonProject, listener: tokio::net::TcpListener) -> Result<()> -where - S: Service + Send + Sync + Clone + 'static, - S::Future: Send, -{ +pub async fn run_at(project: FlareonProject, listener: tokio::net::TcpListener) -> Result<()> { let (mut context, mut project_handler) = project.into_context(); + #[cfg(feature = "db")] if let Some(database) = &context.database { let mut migrations: Vec> = Vec::new(); for app in &context.apps { migrations.extend(app.migrations()); } - let migration_engine = MigrationEngine::new(migrations); + let migration_engine = MigrationEngine::new(migrations)?; migration_engine.run(database).await?; } @@ -606,6 +616,8 @@ where context.apps = apps; let context = Arc::new(context); + #[cfg(feature = "db")] + let context_cleanup = context.clone(); let handler = |axum_request: axum::extract::Request| async move { let request = request_axum_to_flareon(axum_request, Arc::clone(&context)); @@ -664,10 +676,21 @@ where if config::REGISTER_PANIC_HOOK { let _ = std::panic::take_hook(); } + #[cfg(feature = "db")] + if let Some(database) = &context_cleanup.database { + database.close().await?; + } Ok(()) } +pub async fn run_cli(project: FlareonProject) -> Result<()> { + // TODO: we want to have a (extensible) CLI interface soon, but for simplicity + // we just run the server now + run(project, "127.0.0.1:8080").await?; + Ok(()) +} + fn request_parts_for_diagnostics(request: Request) -> (Option, Request) { if config::DEBUG_MODE { let (parts, body) = request.into_parts(); @@ -692,11 +715,10 @@ pub(crate) fn prepare_request(request: &mut Request, context: Arc) { request.extensions_mut().insert(context); } -async fn pass_to_axum(request: Request, handler: &mut S) -> Result -where - S: Service + Send + Sync + Clone + 'static, - S::Future: Send, -{ +async fn pass_to_axum( + request: Request, + handler: &mut BoxedHandler, +) -> Result { poll_fn(|cx| handler.poll_ready(cx)).await?; let response = handler.call(request).await?; @@ -752,6 +774,7 @@ mod tests { } #[tokio::test] + #[cfg_attr(miri, ignore)] // unsupported operation: can't call foreign function `sqlite3_open_v2` async fn flareon_project_builder() { let project = FlareonProject::builder() .register_app_with_views(MockFlareonApp {}, "/app") @@ -763,6 +786,7 @@ mod tests { } #[tokio::test] + #[cfg_attr(miri, ignore)] // unsupported operation: can't call foreign function `sqlite3_open_v2` async fn flareon_project_router() { let project = FlareonProject::builder() .register_app_with_views(MockFlareonApp {}, "/app") diff --git a/flareon/src/private.rs b/flareon/src/private.rs index 80d7ee7..3ee22cc 100644 --- a/flareon/src/private.rs +++ b/flareon/src/private.rs @@ -7,3 +7,4 @@ pub use async_trait::async_trait; pub use bytes::Bytes; +pub use tokio; diff --git a/flareon/src/request.rs b/flareon/src/request.rs index 3708cc5..98e97b1 100644 --- a/flareon/src/request.rs +++ b/flareon/src/request.rs @@ -17,9 +17,12 @@ use std::sync::Arc; use async_trait::async_trait; use bytes::Bytes; +#[cfg(feature = "json")] +use flareon::headers::JSON_CONTENT_TYPE; use indexmap::IndexMap; use tower_sessions::Session; +#[cfg(feature = "db")] use crate::db::Database; use crate::error::ErrorRepr; use crate::headers::FORM_CONTENT_TYPE; @@ -57,6 +60,7 @@ pub trait RequestExt: private::Sealed { #[must_use] fn path_params_mut(&mut self) -> &mut PathParams; + #[cfg(feature = "db")] #[must_use] fn db(&self) -> &Database; @@ -76,11 +80,40 @@ pub trait RequestExt: private::Sealed { /// Throws an error if the request method is not GET or HEAD and the content /// type is not `application/x-www-form-urlencoded`. /// Throws an error if the request body could not be read. + async fn form_data(&mut self) -> Result; + + /// Get the request body as JSON and deserialize it into a type `T` + /// implementing `serde::de::DeserializeOwned`. /// - /// # Returns + /// The content type of the request must be `application/json`. /// - /// The request body as bytes. - async fn form_data(&mut self) -> Result; + /// # Errors + /// + /// Throws an error if the content type is not `application/json`. + /// Throws an error if the request body could not be read. + /// Throws an error if the request body could not be deserialized - either + /// because the JSON is invalid or because the deserialization to the target + /// structure failed. + /// + /// # Example + /// + /// ``` + /// use flareon::request::{Request, RequestExt}; + /// use flareon::response::{Response, ResponseExt}; + /// use serde::{Deserialize, Serialize}; + /// + /// #[derive(Serialize, Deserialize)] + /// struct MyData { + /// hello: String, + /// } + /// + /// async fn my_handler(mut request: Request) -> flareon::Result { + /// let data: MyData = request.json().await?; + /// Ok(Response::new_json(flareon::StatusCode::OK, &data)?) + /// } + /// ``` + #[cfg(feature = "json")] + async fn json(&mut self) -> Result; #[must_use] fn content_type(&self) -> Option<&http::HeaderValue>; @@ -116,6 +149,7 @@ impl RequestExt for Request { self.extensions_mut().get_or_insert_default::() } + #[cfg(feature = "db")] fn db(&self) -> &Database { self.context().database() } @@ -149,6 +183,16 @@ impl RequestExt for Request { } } + #[cfg(feature = "json")] + async fn json(&mut self) -> Result { + self.expect_content_type(JSON_CONTENT_TYPE)?; + + let body = std::mem::take(self.body_mut()); + let bytes = body.into_bytes().await?; + + Ok(serde_json::from_slice(&bytes)?) + } + fn content_type(&self) -> Option<&http::HeaderValue> { self.headers().get(http::header::CONTENT_TYPE) } @@ -201,3 +245,55 @@ impl PathParams { pub(crate) fn query_pairs(bytes: &Bytes) -> impl Iterator, Cow)> { form_urlencoded::parse(bytes.as_ref()) } + +#[cfg(test)] +mod tests { + use super::*; + + #[tokio::test] + async fn test_form_data() { + let mut request = http::Request::builder() + .method(http::Method::POST) + .header(http::header::CONTENT_TYPE, FORM_CONTENT_TYPE) + .body(Body::fixed("hello=world")) + .unwrap(); + + let bytes = request.form_data().await.unwrap(); + assert_eq!(bytes, Bytes::from_static(b"hello=world")); + } + + #[cfg(feature = "json")] + #[tokio::test] + async fn test_json() { + let mut request = http::Request::builder() + .method(http::Method::POST) + .header(http::header::CONTENT_TYPE, JSON_CONTENT_TYPE) + .body(Body::fixed(r#"{"hello":"world"}"#)) + .unwrap(); + + let data: serde_json::Value = request.json().await.unwrap(); + assert_eq!(data, serde_json::json!({"hello": "world"})); + } + + #[test] + fn test_path_params() { + let mut path_params = PathParams::new(); + path_params.insert("name".into(), "world".into()); + + assert_eq!(path_params.get("name"), Some("world")); + assert_eq!(path_params.get("missing"), None); + } + + #[test] + fn test_query_pairs() { + let bytes = Bytes::from_static(b"hello=world&foo=bar"); + let pairs: Vec<_> = query_pairs(&bytes).collect(); + assert_eq!( + pairs, + vec![ + (Cow::from("hello"), Cow::from("world")), + (Cow::from("foo"), Cow::from("bar")) + ] + ); + } +} diff --git a/flareon/src/response.rs b/flareon/src/response.rs index c7ebfc7..64da482 100644 --- a/flareon/src/response.rs +++ b/flareon/src/response.rs @@ -13,6 +13,8 @@ //! ``` use crate::headers::HTML_CONTENT_TYPE; +#[cfg(feature = "json")] +use crate::headers::JSON_CONTENT_TYPE; use crate::{Body, StatusCode}; const RESPONSE_BUILD_FAILURE: &str = "Failed to build response"; @@ -31,13 +33,45 @@ mod private { /// /// This trait is sealed since it doesn't make sense to be implemented for types /// outside the context of Flareon. -pub trait ResponseExt: private::Sealed { +pub trait ResponseExt: Sized + private::Sealed { #[must_use] fn builder() -> http::response::Builder; #[must_use] fn new_html(status: StatusCode, body: Body) -> Self; + /// Create a new JSON response. + /// + /// This function will create a new response with a content type of + /// `application/json` and a body that is the JSON-serialized version of the + /// provided instance of a type implementing `serde::Serialize`. + /// + /// # Errors + /// + /// This function will return an error if the data could not be serialized + /// to JSON. + /// + /// # Examples + /// + /// ``` + /// use flareon::response::{Response, ResponseExt}; + /// use flareon::{Body, StatusCode}; + /// use serde::Serialize; + /// + /// #[derive(Serialize)] + /// struct MyData { + /// hello: String, + /// } + /// + /// let data = MyData { + /// hello: String::from("world"), + /// }; + /// let response = Response::new_json(StatusCode::OK, &data)?; + /// # Ok::<(), flareon::Error>(()) + /// ``` + #[cfg(feature = "json")] + fn new_json(status: StatusCode, data: &T) -> crate::Result; + #[must_use] fn new_redirect>(location: T) -> Self; } @@ -59,6 +93,15 @@ impl ResponseExt for Response { .expect(RESPONSE_BUILD_FAILURE) } + #[cfg(feature = "json")] + fn new_json(status: StatusCode, data: &T) -> crate::Result { + Ok(http::Response::builder() + .status(status) + .header(http::header::CONTENT_TYPE, JSON_CONTENT_TYPE) + .body(Body::fixed(serde_json::to_string(data)?)) + .expect(RESPONSE_BUILD_FAILURE)) + } + #[must_use] fn new_redirect>(location: T) -> Self { http::Response::builder() @@ -74,6 +117,7 @@ mod tests { use super::*; use crate::headers::HTML_CONTENT_TYPE; use crate::response::{Response, ResponseExt}; + use crate::BodyInner; #[test] fn response_new_html() { @@ -86,6 +130,33 @@ mod tests { ); } + #[test] + #[cfg(feature = "json")] + fn response_new_json() { + #[derive(serde::Serialize)] + struct MyData { + hello: String, + } + + let data = MyData { + hello: String::from("world"), + }; + let response = Response::new_json(StatusCode::OK, &data).unwrap(); + assert_eq!(response.status(), StatusCode::OK); + assert_eq!( + response.headers().get(http::header::CONTENT_TYPE).unwrap(), + JSON_CONTENT_TYPE + ); + match &response.body().inner { + BodyInner::Fixed(fixed) => { + assert_eq!(fixed, r#"{"hello":"world"}"#); + } + _ => { + panic!("Expected fixed body"); + } + } + } + #[test] fn response_new_redirect() { let location = "http://example.com"; diff --git a/flareon/src/router/path.rs b/flareon/src/router/path.rs index 3d43176..4fe59a7 100644 --- a/flareon/src/router/path.rs +++ b/flareon/src/router/path.rs @@ -2,7 +2,6 @@ use std::collections::HashMap; use std::fmt::Display; use log::debug; -use regex::Regex; use thiserror::Error; #[derive(Debug, Clone)] @@ -15,30 +14,41 @@ impl PathMatcher { pub(crate) fn new>(path_pattern: T) -> Self { let path_pattern = path_pattern.into(); - let mut last_end = 0; + #[derive(Debug, Copy, Clone)] + enum State { + Literal { start: usize }, + Param { start: usize }, + } + let mut parts = Vec::new(); - let param_regex = Regex::new(":([^/]+)").expect("Invalid regex"); - for capture in param_regex.captures_iter(&path_pattern) { - let full_match = capture.get(0).expect("Could not get regex match"); - let start = full_match.start(); - if start > last_end { - parts.push(PathPart::Literal(path_pattern[last_end..start].to_string())); + let mut state = State::Literal { start: 0 }; + + for (index, ch) in path_pattern.chars().map(Some).chain([None]).enumerate() { + match (ch, state) { + (Some('/') | None, State::Param { start }) => { + let param_name = &path_pattern[start..index]; + assert!( + Self::is_param_name_valid(param_name), + "Invalid parameter name: `{param_name}`" + ); + + parts.push(PathPart::Param { + name: param_name.to_string(), + }); + state = State::Literal { start: index }; + } + (Some(':') | None, State::Literal { start }) => { + let literal = &path_pattern[start..index]; + if !literal.is_empty() { + parts.push(PathPart::Literal(literal.to_string())); + } + state = State::Param { start: index + 1 }; + } + (Some(':'), State::Param { .. }) => { + panic!("Consecutive parameters are not allowed"); + } + _ => {} } - - let name = capture - .get(1) - .expect("Could not get regex capture") - .as_str() - .to_owned(); - assert!( - Self::is_param_name_valid(&name), - "Invalid parameter name: `{name}`" - ); - parts.push(PathPart::Param { name }); - last_end = start + full_match.len(); - } - if last_end < path_pattern.len() { - parts.push(PathPart::Literal(path_pattern[last_end..].to_string())); } Self { parts } @@ -238,6 +248,12 @@ impl<'a> PathParam<'a> { mod tests { use super::*; + #[test] + fn reverse_param_map_default() { + let map = ReverseParamMap::default(); + assert_eq!(map.params.len(), 0); + } + #[test] fn path_parser_no_params() { let path_parser = PathMatcher::new("/users"); @@ -294,6 +310,30 @@ mod tests { ); } + #[test] + #[should_panic(expected = "Consecutive parameters are not allowed")] + fn path_parser_consecutive_params() { + let _ = PathMatcher::new("/users/:id:post_id"); + } + + #[test] + #[should_panic(expected = "Invalid parameter name: ``")] + fn path_parser_invalid_name_empty() { + let _ = PathMatcher::new("/users/:"); + } + + #[test] + #[should_panic(expected = "Invalid parameter name: `123`")] + fn path_parser_invalid_name_numeric() { + let _ = PathMatcher::new("/users/:123"); + } + + #[test] + #[should_panic(expected = "Invalid parameter name: `abc#$%`")] + fn path_parser_invalid_name_non_alphanumeric() { + let _ = PathMatcher::new("/users/:abc#$%"); + } + #[test] fn reverse_with_valid_params() { let path_parser = PathMatcher::new("/users/:id/posts/:post_id"); diff --git a/flareon/src/static_files.rs b/flareon/src/static_files.rs index 97e8a51..77d1451 100644 --- a/flareon/src/static_files.rs +++ b/flareon/src/static_files.rs @@ -257,6 +257,7 @@ mod tests { use crate::{FlareonApp, FlareonProject}; #[test] + #[cfg_attr(miri, ignore)] // unsupported operation: can't call foreign function `sqlite3_open_v2` fn static_files_add_and_get_file() { let mut static_files = StaticFiles::new(); static_files.add_file("test.txt", "This is a test file"); @@ -338,6 +339,7 @@ mod tests { } #[tokio::test] + #[cfg_attr(miri, ignore)] // unsupported operation: can't call foreign function `sqlite3_open_v2` async fn static_files_middleware_from_app_context() { struct App1; impl FlareonApp for App1 { diff --git a/flareon/src/test.rs b/flareon/src/test.rs index 3614bf6..db5498a 100644 --- a/flareon/src/test.rs +++ b/flareon/src/test.rs @@ -4,35 +4,35 @@ use std::future::poll_fn; use std::sync::Arc; use derive_more::Debug; -use flareon::{prepare_request, FlareonProject}; use tower::Service; use tower_sessions::{MemoryStore, Session}; +#[cfg(feature = "db")] use crate::auth::db::DatabaseUserBackend; use crate::config::ProjectConfig; -use crate::db::migrations::{DynMigration, MigrationEngine, MigrationWrapper}; +#[cfg(feature = "db")] +use crate::db::migrations::{ + DynMigration, MigrationDependency, MigrationEngine, MigrationWrapper, Operation, +}; +#[cfg(feature = "db")] use crate::db::Database; use crate::request::{Request, RequestExt}; use crate::response::Response; use crate::router::Router; -use crate::{AppContext, Body, Error, Result}; +use crate::{prepare_request, AppContext, Body, BoxedHandler, FlareonProject, Result}; /// A test client for making requests to a Flareon project. /// /// Useful for End-to-End testing Flareon projects. #[derive(Debug)] -pub struct Client { +pub struct Client { context: Arc, - handler: S, + handler: BoxedHandler, } -impl Client -where - S: Service + Send + Sync + Clone + 'static, - S::Future: Send, -{ +impl Client { #[must_use] - pub fn new(project: FlareonProject) -> Self { + pub fn new(project: FlareonProject) -> Self { let (context, handler) = project.into_context(); Self { context: Arc::new(context), @@ -63,8 +63,11 @@ pub struct TestRequestBuilder { url: String, session: Option, config: Option>, + #[cfg(feature = "db")] database: Option>, form_data: Option>, + #[cfg(feature = "json")] + json_data: Option, } impl TestRequestBuilder { @@ -112,11 +115,13 @@ impl TestRequestBuilder { self } + #[cfg(feature = "db")] pub fn database(&mut self, database: Arc) -> &mut Self { self.database = Some(database); self } + #[cfg(feature = "db")] pub fn with_db_auth(&mut self, db: Arc) -> &mut Self { let auth_backend = DatabaseUserBackend; let config = ProjectConfig::builder().auth_backend(auth_backend).build(); @@ -137,6 +142,12 @@ impl TestRequestBuilder { self } + #[cfg(feature = "json")] + pub fn json(&mut self, data: &T) -> &mut Self { + self.json_data = Some(serde_json::to_string(data).expect("Failed to serialize JSON")); + self + } + #[must_use] pub fn build(&mut self) -> http::Request { let mut request = http::Request::builder() @@ -149,6 +160,7 @@ impl TestRequestBuilder { self.config.clone().unwrap_or_default(), Vec::new(), Arc::new(Router::empty()), + #[cfg(feature = "db")] self.database.clone(), ); prepare_request(&mut request, Arc::new(app_context)); @@ -174,49 +186,230 @@ impl TestRequestBuilder { ); } + #[cfg(feature = "json")] + if let Some(json_data) = &self.json_data { + *request.body_mut() = Body::fixed(json_data.clone()); + request.headers_mut().insert( + http::header::CONTENT_TYPE, + http::HeaderValue::from_static("application/json"), + ); + } + request } } +#[cfg(feature = "db")] #[derive(Debug)] -pub struct TestDatabaseBuilder { +pub struct TestDatabase { + database: Arc, + kind: TestDatabaseKind, migrations: Vec, } -impl Default for TestDatabaseBuilder { - fn default() -> Self { - Self::new() - } -} - -impl TestDatabaseBuilder { - #[must_use] - pub fn new() -> Self { +#[cfg(feature = "db")] +impl TestDatabase { + fn new(database: Database, kind: TestDatabaseKind) -> TestDatabase { Self { + database: Arc::new(database), + kind, migrations: Vec::new(), } } - #[must_use] + /// Create a new in-memory SQLite database for testing. + pub async fn new_sqlite() -> Result { + let database = Database::new("sqlite::memory:").await?; + Ok(Self::new(database, TestDatabaseKind::Sqlite)) + } + + /// Create a new PostgreSQL database for testing and connects to it. + /// + /// The database URL is read from the `POSTGRES_URL` environment variable. + /// Note that it shouldn't include the database name — the function will + /// create a new database for the test by connecting to the `postgres` + /// database. If no URL is provided, it defaults to + /// `postgresql://flareon:flareon@localhost`. + /// + /// The database is created with the name `test_flareon__{test_name}`. + /// Make sure that `test_name` is unique for each test so that the databases + /// don't conflict with each other. + /// + /// The database is dropped when `self.cleanup()` is called. Note that this + /// means that the database will not be dropped if the test panics. + pub async fn new_postgres(test_name: &str) -> Result { + let db_url = std::env::var("POSTGRES_URL") + .unwrap_or_else(|_| "postgresql://flareon:flareon@localhost".to_string()); + let database = Database::new(format!("{db_url}/postgres")).await?; + + let test_database_name = format!("test_flareon__{test_name}"); + database + .raw(&format!("DROP DATABASE IF EXISTS {test_database_name}")) + .await?; + database + .raw(&format!("CREATE DATABASE {test_database_name}")) + .await?; + database.close().await?; + + let database = Database::new(format!("{db_url}/{test_database_name}")).await?; + + Ok(Self::new( + database, + TestDatabaseKind::Postgres { + db_url, + db_name: test_database_name, + }, + )) + } + + /// Create a new MySQL database for testing and connects to it. + /// + /// The database URL is read from the `MYSQL_URL` environment variable. + /// Note that it shouldn't include the database name — the function will + /// create a new database for the test by connecting to the `mysql` + /// database. If no URL is provided, it defaults to + /// `mysql://root:@localhost`. + /// + /// The database is created with the name `test_flareon__{test_name}`. + /// Make sure that `test_name` is unique for each test so that the databases + /// don't conflict with each other. + /// + /// The database is dropped when `self.cleanup()` is called. Note that this + /// means that the database will not be dropped if the test panics. + pub async fn new_mysql(test_name: &str) -> Result { + let db_url = + std::env::var("MYSQL_URL").unwrap_or_else(|_| "mysql://root:@localhost".to_string()); + let database = Database::new(format!("{db_url}/mysql")).await?; + + let test_database_name = format!("test_flareon__{test_name}"); + database + .raw(&format!("DROP DATABASE IF EXISTS {test_database_name}")) + .await?; + database + .raw(&format!("CREATE DATABASE {test_database_name}")) + .await?; + database.close().await?; + + let database = Database::new(format!("{db_url}/{test_database_name}")).await?; + + Ok(Self::new( + database, + TestDatabaseKind::MySql { + db_url, + db_name: test_database_name, + }, + )) + } + pub fn add_migrations>( - mut self, + &mut self, migrations: V, - ) -> Self { + ) -> &mut Self { self.migrations .extend(migrations.into_iter().map(MigrationWrapper::new)); self } + #[cfg(feature = "db")] + pub fn with_auth(&mut self) -> &mut Self { + self.add_migrations(flareon::auth::db::migrations::MIGRATIONS.to_vec()); + self + } + + pub async fn run_migrations(&mut self) -> &mut Self { + if !self.migrations.is_empty() { + let engine = MigrationEngine::new(std::mem::take(&mut self.migrations)).unwrap(); + engine.run(&self.database()).await.unwrap(); + } + self + } + #[must_use] - pub fn with_auth(self) -> Self { - self.add_migrations(flareon::auth::db::migrations::MIGRATIONS.to_vec()) + pub fn database(&self) -> Arc { + self.database.clone() } + pub async fn cleanup(&self) -> Result<()> { + self.database.close().await?; + match &self.kind { + TestDatabaseKind::Sqlite => {} + TestDatabaseKind::Postgres { db_url, db_name } => { + let database = Database::new(format!("{db_url}/postgres")).await?; + + database.raw(&format!("DROP DATABASE {db_name}")).await?; + database.close().await?; + } + TestDatabaseKind::MySql { db_url, db_name } => { + let database = Database::new(format!("{db_url}/mysql")).await?; + + database.raw(&format!("DROP DATABASE {db_name}")).await?; + database.close().await?; + } + } + + Ok(()) + } +} + +#[cfg(feature = "db")] +impl std::ops::Deref for TestDatabase { + type Target = Database; + + fn deref(&self) -> &Self::Target { + &self.database + } +} + +#[cfg(feature = "db")] +#[derive(Debug, Clone)] +enum TestDatabaseKind { + Sqlite, + Postgres { db_url: String, db_name: String }, + MySql { db_url: String, db_name: String }, +} + +#[cfg(feature = "db")] +#[derive(Debug, Clone)] +pub struct TestMigration { + app_name: &'static str, + name: &'static str, + dependencies: Vec, + operations: Vec, +} + +#[cfg(feature = "db")] +impl TestMigration { #[must_use] - pub async fn build(self) -> Database { - let engine = MigrationEngine::new(self.migrations); - let database = Database::new("sqlite::memory:").await.unwrap(); - engine.run(&database).await.unwrap(); - database + pub fn new>, O: Into>>( + app_name: &'static str, + name: &'static str, + dependencies: D, + operations: O, + ) -> Self { + Self { + app_name, + name, + dependencies: dependencies.into(), + operations: operations.into(), + } + } +} + +#[cfg(feature = "db")] +impl DynMigration for TestMigration { + fn app_name(&self) -> &str { + self.app_name + } + + fn name(&self) -> &str { + self.name + } + + fn dependencies(&self) -> &[MigrationDependency] { + &self.dependencies + } + + fn operations(&self) -> &[Operation] { + &self.operations } } diff --git a/flareon/tests/auth.rs b/flareon/tests/auth.rs index d7d9eac..9b968c9 100644 --- a/flareon/tests/auth.rs +++ b/flareon/tests/auth.rs @@ -1,14 +1,12 @@ -use std::sync::Arc; - use flareon::auth::db::{DatabaseUser, DatabaseUserCredentials}; use flareon::auth::{AuthRequestExt, Password}; -use flareon::test::{TestDatabaseBuilder, TestRequestBuilder}; +use flareon::test::{TestDatabase, TestRequestBuilder}; -#[tokio::test] -async fn database_user() { - let db = Arc::new(TestDatabaseBuilder::new().with_auth().build().await); +#[flareon_macros::dbtest] +async fn database_user(test_db: &mut TestDatabase) { + test_db.with_auth().run_migrations().await; let mut request_builder = TestRequestBuilder::get("/"); - request_builder.with_db_auth(db.clone()); + request_builder.with_db_auth(test_db.database()); // Anonymous user let mut request = request_builder.clone().with_session().build(); @@ -16,9 +14,13 @@ async fn database_user() { assert!(!user.is_authenticated()); // Authenticated user - DatabaseUser::create_user(&*db, "testuser".to_string(), &Password::new("password123")) - .await - .unwrap(); + DatabaseUser::create_user( + &**test_db, + "testuser".to_string(), + &Password::new("password123"), + ) + .await + .unwrap(); let user = request .authenticate(&DatabaseUserCredentials::new( diff --git a/flareon/tests/db.rs b/flareon/tests/db.rs index 05d44dd..d365a2a 100644 --- a/flareon/tests/db.rs +++ b/flareon/tests/db.rs @@ -1,59 +1,61 @@ +#![cfg(feature = "fake")] +#![cfg_attr(miri, ignore)] + +use fake::rand::rngs::StdRng; +use fake::rand::SeedableRng; use fake::{Dummy, Fake, Faker}; use flareon::db::migrations::{Field, Operation}; use flareon::db::query::ExprEq; -use flareon::db::{model, query, Database, DatabaseField, Identifier, Model}; -use rand::rngs::StdRng; -use rand::SeedableRng; - -#[tokio::test] -async fn model_crud() { - let db = test_sqlite_db().await; +use flareon::db::{model, query, Database, DatabaseField, Identifier, LimitedString, Model}; +use flareon::test::TestDatabase; - migrate_test_model(&db).await; +#[flareon_macros::dbtest] +async fn model_crud(test_db: &mut TestDatabase) { + migrate_test_model(&*test_db).await; - assert_eq!(TestModel::objects().all(&db).await.unwrap(), vec![]); + assert_eq!(TestModel::objects().all(&**test_db).await.unwrap(), vec![]); let mut model = TestModel { id: 0, name: "test".to_owned(), }; - model.save(&db).await.unwrap(); - let objects = TestModel::objects().all(&db).await.unwrap(); + model.save(&**test_db).await.unwrap(); + let objects = TestModel::objects().all(&**test_db).await.unwrap(); assert_eq!(objects.len(), 1); assert_eq!(objects[0].name, "test"); TestModel::objects() .filter(::Fields::id.eq(1)) - .delete(&db) + .delete(&**test_db) .await .unwrap(); - assert_eq!(TestModel::objects().all(&db).await.unwrap(), vec![]); - - db.close().await.unwrap(); + assert_eq!(TestModel::objects().all(&**test_db).await.unwrap(), vec![]); } -#[tokio::test] -async fn model_macro_filtering() { - let db = test_sqlite_db().await; - - migrate_test_model(&db).await; +#[flareon_macros::dbtest] +async fn model_macro_filtering(test_db: &mut TestDatabase) { + migrate_test_model(&*test_db).await; - assert_eq!(TestModel::objects().all(&db).await.unwrap(), vec![]); + assert_eq!(TestModel::objects().all(&**test_db).await.unwrap(), vec![]); let mut model = TestModel { id: 0, name: "test".to_owned(), }; - model.save(&db).await.unwrap(); - let objects = query!(TestModel, $name == "test").all(&db).await.unwrap(); + model.save(&**test_db).await.unwrap(); + let objects = query!(TestModel, $name == "test") + .all(&**test_db) + .await + .unwrap(); assert_eq!(objects.len(), 1); assert_eq!(objects[0].name, "test"); - let objects = query!(TestModel, $name == "t").all(&db).await.unwrap(); + let objects = query!(TestModel, $name == "t") + .all(&**test_db) + .await + .unwrap(); assert!(objects.is_empty()); - - db.close().await.unwrap(); } #[derive(Debug, PartialEq)] @@ -83,12 +85,14 @@ macro_rules! all_fields_migration_field { Identifier::new(concat!("field_", stringify!($name))), <$ty as DatabaseField>::TYPE, ) + .set_null(<$ty as DatabaseField>::NULLABLE) }; ($ty:ty) => { Field::new( Identifier::new(concat!("field_", stringify!($ty))), <$ty as DatabaseField>::TYPE, ) + .set_null(<$ty as DatabaseField>::NULLABLE) }; } @@ -113,10 +117,14 @@ struct AllFieldsModel { field_f64: f64, field_date: chrono::NaiveDate, field_time: chrono::NaiveTime, + #[dummy(faker = "fake::chrono::Precision::<6>")] field_datetime: chrono::NaiveDateTime, + #[dummy(faker = "fake::chrono::Precision::<6>")] field_datetime_timezone: chrono::DateTime, field_string: String, field_blob: Vec, + field_option: Option, + field_limited_string: LimitedString<10>, } async fn migrate_all_fields_model(db: &Database) { @@ -146,33 +154,46 @@ const CREATE_ALL_FIELDS_MODEL: Operation = Operation::create_model() all_fields_migration_field!(datetime_timezone, chrono::DateTime), all_fields_migration_field!(string, String), all_fields_migration_field!(blob, Vec), + all_fields_migration_field!(option, Option), + all_fields_migration_field!(limited_string, LimitedString<10>), ]) .build(); -#[tokio::test] -async fn all_fields_model() { - let db = test_sqlite_db().await; - +#[flareon_macros::dbtest] +async fn all_fields_model(db: &mut TestDatabase) { migrate_all_fields_model(&db).await; - assert_eq!(AllFieldsModel::objects().all(&db).await.unwrap(), vec![]); + assert_eq!(AllFieldsModel::objects().all(&**db).await.unwrap(), vec![]); let r = &mut StdRng::seed_from_u64(123_785); let mut models = (0..100) .map(|_| Faker.fake_with_rng(r)) .collect::>(); for model in &mut models { - model.save(&db).await.unwrap(); + model.save(&**db).await.unwrap(); } - let mut models_from_db: Vec<_> = AllFieldsModel::objects().all(&db).await.unwrap(); + let mut models_from_db: Vec<_> = AllFieldsModel::objects().all(&**db).await.unwrap(); models_from_db.iter_mut().for_each(|model| model.id = 0); - - assert_eq!(models, models_from_db); - - db.close().await.unwrap(); + normalize_datetimes(&mut models); + normalize_datetimes(&mut models_from_db); + + assert_eq!(models.len(), models_from_db.len()); + for model in &models { + assert!( + models_from_db.contains(model), + "Could not find model {:?} in models_from_db: {:?}", + model, + models_from_db + ); + } } -async fn test_sqlite_db() -> Database { - Database::new("sqlite::memory:").await.unwrap() +/// Normalize the datetimes to UTC. +fn normalize_datetimes(data: &mut Vec) { + for model in data { + model.field_datetime_timezone = model.field_datetime_timezone.with_timezone( + &chrono::FixedOffset::east_opt(0).expect("UTC timezone is always valid"), + ); + } } diff --git a/flareon/tests/flareon_project.rs b/flareon/tests/flareon_project.rs index 3780bc8..967b369 100644 --- a/flareon/tests/flareon_project.rs +++ b/flareon/tests/flareon_project.rs @@ -9,6 +9,7 @@ async fn hello(_request: Request) -> flareon::Result { } #[tokio::test] +#[cfg_attr(miri, ignore)] // unsupported operation: can't call foreign function `sqlite3_open_v2` async fn flareon_project_router_sub_path() { struct App1; impl FlareonApp for App1 { diff --git a/flareon/tests/router.rs b/flareon/tests/router.rs index 5b9fae5..18e48ba 100644 --- a/flareon/tests/router.rs +++ b/flareon/tests/router.rs @@ -1,7 +1,7 @@ use bytes::Bytes; use flareon::request::{Request, RequestExt}; use flareon::response::{Response, ResponseExt}; -use flareon::router::{Route, Router, RouterService}; +use flareon::router::{Route, Router}; use flareon::test::Client; use flareon::{Body, FlareonApp, FlareonProject, StatusCode}; @@ -19,6 +19,7 @@ async fn parameterized(request: Request) -> flareon::Result { } #[tokio::test] +#[cfg_attr(miri, ignore)] // unsupported operation: can't call foreign function `sqlite3_open_v2` async fn test_index() { let mut client = Client::new(project().await); @@ -31,6 +32,7 @@ async fn test_index() { } #[tokio::test] +#[cfg_attr(miri, ignore)] // unsupported operation: can't call foreign function `sqlite3_open_v2` async fn path_params() { let mut client = Client::new(project().await); @@ -43,7 +45,7 @@ async fn path_params() { } #[must_use] -async fn project() -> FlareonProject { +async fn project() -> FlareonProject { struct RouterApp; impl FlareonApp for RouterApp { fn name(&self) -> &'static str { diff --git a/rustfmt.toml b/rustfmt.toml index 254e96e..b419fe5 100644 --- a/rustfmt.toml +++ b/rustfmt.toml @@ -2,7 +2,6 @@ format_code_in_doc_comments = true group_imports = "StdExternalCrate" imports_granularity = "module" normalize_comments = true -reorder_impl_items = true unstable_features = true use_field_init_shorthand = true wrap_comments = true