diff --git a/.cargo/config.toml b/.cargo/config.toml new file mode 100644 index 00000000..35049cbc --- /dev/null +++ b/.cargo/config.toml @@ -0,0 +1,2 @@ +[alias] +xtask = "run --package xtask --" diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index c85f96ad..3620e8d0 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -80,3 +80,20 @@ jobs: run: cargo binstall --no-confirm --no-symlinks cargo-deny - run: cargo deny check + + cargo-about: + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 + + - name: Setup Rust + run: rustup update stable && rustup default stable && rustup component add clippy + + - name: Get cargo-binstall + run: | + curl -L https://github.com/cargo-bins/cargo-binstall/releases/latest/download/cargo-binstall-x86_64-unknown-linux-musl.tgz | tar -zxf - && mv cargo-binstall $HOME/.cargo/bin/ + + - name: Install required cargo addons + run: cargo binstall --no-confirm --no-symlinks cargo-about + + - run: mkdir target && cargo about generate about.hbs > target/license.html diff --git a/.github/workflows/mdbook.yml b/.github/workflows/mdbook.yml new file mode 100644 index 00000000..7c357ece --- /dev/null +++ b/.github/workflows/mdbook.yml @@ -0,0 +1,32 @@ +name: Mdbook + +on: + push: + branches: [ "main" ] + pull_request: + branches: [ "main" ] + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +jobs: + test: + name: Test + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@master + - name: Install Rust + run: | + rustup set profile minimal + rustup toolchain install stable + rustup default stable + - name: Install latest mdbook + run: | + tag=$(curl 'https://api.github.com/repos/rust-lang/mdbook/releases/latest' | jq -r '.tag_name') + url="https://github.com/rust-lang/mdbook/releases/download/${tag}/mdbook-${tag}-x86_64-unknown-linux-gnu.tar.gz" + mkdir bin + curl -sSL $url | tar -xz --directory=bin + echo "$(pwd)/bin" >> $GITHUB_PATH + - name: Run tests + run: cd doc && mdbook test diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index a7ae791b..3ed637b0 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -31,7 +31,27 @@ defaults: shell: bash jobs: - upload-assets: + + cargo-about: + if: github.repository_owner == 'VorpalBlade' && (startsWith(github.event.release.name, 'paketkoll-v') || startsWith(github.event.release.name, 'konfigkoll-v')) + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 + - name: Setup Rust + run: rustup update stable && rustup default stable && rustup component add clippy + - name: Get cargo-binstall + run: | + curl -L https://github.com/cargo-bins/cargo-binstall/releases/latest/download/cargo-binstall-x86_64-unknown-linux-musl.tgz | tar -zxf - && mv cargo-binstall $HOME/.cargo/bin/ + - name: Install required cargo addons + run: cargo binstall --no-confirm --no-symlinks cargo-about + - run: mkdir target && cargo about generate about.hbs > target/licenses.html + - name: Upload licenses.html + run: GITHUB_TOKEN="${token}" retry gh release upload "${tag}" target/licenses.html --clobber + env: + token: ${{ secrets.GITHUB_TOKEN }} + tag: ${{ github.event.release.tag_name }} + + upload-paketkoll: name: ${{ matrix.target }} if: github.repository_owner == 'VorpalBlade' && startsWith(github.event.release.name, 'paketkoll-v') runs-on: ubuntu-22.04 @@ -63,7 +83,40 @@ jobs: with: subject-path: "${{ steps.upload-rust-binary-action.outputs.archive }}.*" - upload-aur: + upload-konfigkoll: + name: ${{ matrix.target }} + if: github.repository_owner == 'VorpalBlade' && startsWith(github.event.release.name, 'konfigkoll-v') + runs-on: ubuntu-22.04 + strategy: + matrix: + include: + - target: aarch64-unknown-linux-musl + - target: armv7-unknown-linux-musleabihf + - target: i686-unknown-linux-musl + - target: riscv64gc-unknown-linux-gnu + - target: x86_64-unknown-linux-musl + timeout-minutes: 60 + steps: + - name: Checkout repository + uses: actions/checkout@v4 + - name: Install Rust toolchain + uses: dtolnay/rust-toolchain@stable + - uses: taiki-e/install-action@cross + - uses: taiki-e/upload-rust-binary-action@v1.21.1 + id: upload-rust-binary-action + with: + bin: konfigkoll, konfigkoll_rune + target: ${{ matrix.target }} + # Include version number. + archive: $bin-$tag-$target + token: ${{ secrets.GITHUB_TOKEN }} + - name: Generate artifact attestation + uses: actions/attest-build-provenance@v1 + with: + subject-path: "${{ steps.upload-rust-binary-action.outputs.archive }}.*" + + upload-aur-paketkoll: + if: github.repository_owner == 'VorpalBlade' && startsWith(github.event.release.name, 'paketkoll-v') runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -85,3 +138,27 @@ jobs: commit_email: ${{ secrets.AUR_EMAIL }} ssh_private_key: ${{ secrets.AUR_SSH_PRIVATE_KEY }} commit_message: New upstream release (automatic update from GitHub Actions) + + #upload-aur-konfigkoll: + # if: github.repository_owner == 'VorpalBlade' && startsWith(github.event.release.name, 'konfigkoll-v') + # runs-on: ubuntu-latest + # steps: + # - uses: actions/checkout@v4 + # - name: Get AUR repo + # run: git clone https://aur.archlinux.org/konfigkoll.git aur + # - name: Update PKGBUILD + # run: | + # sed -i '/^_pkgver/s/=.*$/='${RELEASE_TAG#refs/tags/konfigkoll-v}'/' "aur/PKGBUILD" + # sed -i '/^pkgrel/s/=.*$/=1/' "aur/PKGBUILD" + # env: + # RELEASE_TAG: ${{ github.ref }} + # - name: Publish AUR package + # uses: KSXGitHub/github-actions-deploy-aur@v2.7.2 + # with: + # pkgname: konfigkoll + # pkgbuild: aur/PKGBUILD + # updpkgsums: true + # commit_username: ${{ secrets.AUR_USERNAME }} + # commit_email: ${{ secrets.AUR_EMAIL }} + # ssh_private_key: ${{ secrets.AUR_SSH_PRIVATE_KEY }} + # commit_message: New upstream release (automatic update from GitHub Actions) diff --git a/.github/workflows/site-release.yml b/.github/workflows/site-release.yml new file mode 100644 index 00000000..040555e5 --- /dev/null +++ b/.github/workflows/site-release.yml @@ -0,0 +1,56 @@ +name: Website + +on: + push: + tags: + - v[0-9]+.* + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +jobs: + deploy: + runs-on: ubuntu-latest + permissions: + contents: write # To push a branch + pages: write # To push to a GitHub Pages site + id-token: write # To update the deployment status + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Install latest mdbook + run: | + tag=$(curl 'https://api.github.com/repos/rust-lang/mdbook/releases/latest' | jq -r '.tag_name') + url="https://github.com/rust-lang/mdbook/releases/download/${tag}/mdbook-${tag}-x86_64-unknown-linux-gnu.tar.gz" + mkdir mdbook + curl -sSL $url | tar -xz --directory=./mdbook + echo `pwd`/mdbook >> $GITHUB_PATH + - run: mkdir -p target/site/ + - name: Build Book + run: | + (cd doc && mdbook build -d ../target/site/book) + - name: Add static landing page + run: cp -r site/* target/site/ + + - name: Install Rust + run: rustup update stable && rustup default stable + - name: Cache builds + uses: Swatinem/rust-cache@v2.7.3 + with: + key: website + - name: Build Rune API docs + run: cargo run --bin konfigkoll-rune -- doc --output target/site/api + + - name: Setup Pages + uses: actions/configure-pages@v5 + - name: Upload artifact + uses: actions/upload-pages-artifact@v3 + with: + # Upload book directory + path: 'target/site' + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v4 diff --git a/.gitignore b/.gitignore index dab02fec..065e70c5 100644 --- a/.gitignore +++ b/.gitignore @@ -1,9 +1,12 @@ .gdb_history .idea +*.code-workspace /.vscode +/patches /target +/test_config flamegraph.svg heaptrack.* memory-profiling_* perf.data -perf.data.old +perf.data.old \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock index 92c31f51..c35741b6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -39,6 +39,18 @@ dependencies = [ "memchr", ] +[[package]] +name = "aliasable" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "250f629c0161ad8107cf89319e990051fae62832fd343083bea452d93e2205fd" + +[[package]] +name = "allocator-api2" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" + [[package]] name = "anstream" version = "0.6.14" @@ -124,6 +136,18 @@ dependencies = [ "rustc-demangle", ] +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + [[package]] name = "base64-simd" version = "0.8.0" @@ -134,6 +158,15 @@ dependencies = [ "vsimd", ] +[[package]] +name = "bincode" +version = "1.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" +dependencies = [ + "serde", +] + [[package]] name = "bitflags" version = "1.3.2" @@ -165,10 +198,28 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "05efc5cfd9110c8416e471df0e96702d58690178e206e61b7173706673c93706" dependencies = [ "memchr", - "regex-automata", + "regex-automata 0.4.7", "serde", ] +[[package]] +name = "bumpalo" +version = "3.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "bytes" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a12916984aab3fa6e39d655a33e09c0071eb36d6ab3aea5c2d78551f1df6d952" + [[package]] name = "bzip2" version = "0.4.4" @@ -190,6 +241,29 @@ dependencies = [ "pkg-config", ] +[[package]] +name = "cached" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4d73155ae6b28cf5de4cfc29aeb02b8a1c6dab883cb015d15cd514e42766846" +dependencies = [ + "ahash", + "directories", + "hashbrown 0.14.5", + "once_cell", + "rmp-serde", + "serde", + "sled", + "thiserror", + "web-time", +] + +[[package]] +name = "camino" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0ec6b951b160caa93cc0c7b209e5a3bff7aae9062213451ac99493cd844c239" + [[package]] name = "castaway" version = "0.2.3" @@ -201,15 +275,20 @@ dependencies = [ [[package]] name = "cc" -version = "1.0.106" +version = "1.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "066fce287b1d4eafef758e89e09d724a24808a9196fe9756b8ca90e86d0719a2" +checksum = "2aba8f4e9906c7ce3c73463f62a7f0c65183ada1a2d47e397cc8810827f9694f" dependencies = [ "jobserver", "libc", - "once_cell", ] +[[package]] +name = "cesu8" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" + [[package]] name = "cfg-if" version = "1.0.0" @@ -259,7 +338,7 @@ version = "4.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2bac35c6dafb060fd4d275d9a4ffae97917c13a6327903a8be2153cd964f7085" dependencies = [ - "heck", + "heck 0.5.0", "proc-macro2", "quote", "syn", @@ -281,6 +360,12 @@ dependencies = [ "roff", ] +[[package]] +name = "clru" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cbd0f76e066e64fdc5631e3bb46381254deab9ef1158292f27c8c57e3bf3fe59" + [[package]] name = "cmake" version = "0.1.50" @@ -290,27 +375,61 @@ dependencies = [ "cc", ] +[[package]] +name = "codespan-reporting" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e" +dependencies = [ + "termcolor", + "unicode-width", +] + [[package]] name = "colorchoice" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b6a852b24ab71dffc585bcb46eaf7959d175cb865a7152e35b348d1b2960422" +[[package]] +name = "combine" +version = "4.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" +dependencies = [ + "bytes", + "memchr", +] + [[package]] name = "compact_str" -version = "0.7.1" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f86b9c4c00838774a6d902ef931eff7470720c51d90c2e32cfe15dc304737b3f" +checksum = "6050c3a16ddab2e412160b31f2c871015704239bca62f72f6e5f0be631d3f644" dependencies = [ "castaway", "cfg-if", "itoa", + "rustversion", "ryu", "serde", "smallvec", "static_assertions", ] +[[package]] +name = "console" +version = "0.15.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb" +dependencies = [ + "encode_unicode", + "lazy_static", + "libc", + "unicode-width", + "windows-sys 0.52.0", +] + [[package]] name = "const-random" version = "0.1.18" @@ -331,6 +450,31 @@ dependencies = [ "tiny-keccak", ] +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" + +[[package]] +name = "cpufeatures" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504" +dependencies = [ + "libc", +] + [[package]] name = "crc32fast" version = "1.4.2" @@ -340,6 +484,12 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "critical-section" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7059fff8937831a9ae6f0fe4d658ffabf58f2ca96aa9dec1c889f936f705f216" + [[package]] name = "crossbeam-deque" version = "0.8.5" @@ -383,9 +533,9 @@ dependencies = [ [[package]] name = "darling" -version = "0.20.9" +version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83b2eb4d90d12bdda5ed17de686c2acb4c57914f8f921b8da7e112b5a36f3fe1" +checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989" dependencies = [ "darling_core", "darling_macro", @@ -393,9 +543,9 @@ dependencies = [ [[package]] name = "darling_core" -version = "0.20.9" +version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "622687fe0bac72a04e5599029151f5796111b90f1baaa9b544d807a5e31cd120" +checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5" dependencies = [ "fnv", "ident_case", @@ -407,9 +557,9 @@ dependencies = [ [[package]] name = "darling_macro" -version = "0.20.9" +version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "733cabb43482b1a1b53eee8583c2b9e8684d592215ea83efd305dd31bc2f0178" +checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", @@ -426,7 +576,7 @@ dependencies = [ "hashbrown 0.14.5", "lock_api", "once_cell", - "parking_lot_core", + "parking_lot_core 0.9.10", ] [[package]] @@ -440,10 +590,19 @@ dependencies = [ "hashbrown 0.14.5", "lock_api", "once_cell", - "parking_lot_core", + "parking_lot_core 0.9.10", "rayon", ] +[[package]] +name = "deranged" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +dependencies = [ + "powerfmt", +] + [[package]] name = "derive_builder" version = "0.20.0" @@ -491,6 +650,15 @@ dependencies = [ "crypto-common", ] +[[package]] +name = "directories" +version = "5.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a49173b84e034382284f27f1af4dcbbd231ffa358c0fe316541a7337f376a35" +dependencies = [ + "dirs-sys", +] + [[package]] name = "dirs" version = "5.0.1" @@ -521,12 +689,30 @@ dependencies = [ "const-random", ] +[[package]] +name = "duct" +version = "0.13.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4ab5718d1224b63252cd0c6f74f6480f9ffeb117438a2e0f5cf6d9a4798929c" +dependencies = [ + "libc", + "once_cell", + "os_pipe", + "shared_child", +] + [[package]] name = "either" version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" +[[package]] +name = "encode_unicode" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" + [[package]] name = "env_filter" version = "0.1.0" @@ -550,6 +736,12 @@ dependencies = [ "log", ] +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + [[package]] name = "errno" version = "0.3.9" @@ -566,6 +758,12 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2a2b11eda1d40935b26cf18f6833c526845ae8c41e58d09af6adeb6f0269183" +[[package]] +name = "fastrand" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a" + [[package]] name = "filetime" version = "0.2.23" @@ -604,6 +802,58 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "fs2" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "futures-core" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" + +[[package]] +name = "futures-task" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" + +[[package]] +name = "futures-util" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" +dependencies = [ + "futures-core", + "futures-task", + "pin-project-lite", + "pin-utils", +] + +[[package]] +name = "fxhash" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" +dependencies = [ + "byteorder", +] + [[package]] name = "generic-array" version = "0.14.7" @@ -614,6 +864,15 @@ dependencies = [ "version_check", ] +[[package]] +name = "getopts" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14dbbfd5c71d70241ecf9e6f13737f7b5ce823821063188d7e46c41d371eebd5" +dependencies = [ + "unicode-width", +] + [[package]] name = "getrandom" version = "0.2.15" @@ -646,8 +905,22 @@ dependencies = [ "aho-corasick", "bstr", "log", - "regex-automata", - "regex-syntax", + "regex-automata 0.4.7", + "regex-syntax 0.8.4", +] + +[[package]] +name = "handlebars" +version = "4.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "faa67bab9ff362228eb3d00bd024a4965d8231bbb7921167f0cfa66c6626b225" +dependencies = [ + "log", + "pest", + "pest_derive", + "serde", + "serde_json", + "thiserror", ] [[package]] @@ -664,6 +937,16 @@ name = "hashbrown" version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" +dependencies = [ + "ahash", + "allocator-api2", +] + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" [[package]] name = "heck" @@ -677,6 +960,15 @@ version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" +[[package]] +name = "home" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" +dependencies = [ + "windows-sys 0.52.0", +] + [[package]] name = "humantime" version = "2.1.0" @@ -689,6 +981,16 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" +[[package]] +name = "idna" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + [[package]] name = "ignore" version = "0.4.22" @@ -699,12 +1001,22 @@ dependencies = [ "globset", "log", "memchr", - "regex-automata", + "regex-automata 0.4.7", "same-file", "walkdir", "winapi-util", ] +[[package]] +name = "indexmap" +version = "2.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" +dependencies = [ + "equivalent", + "hashbrown 0.14.5", +] + [[package]] name = "indoc" version = "2.0.5" @@ -712,84 +1024,271 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b248f5224d1d606005e02c97f5aa4e88eeb230488bcc03bc9ca4d7991399f2b5" [[package]] -name = "is_terminal_polyfill" -version = "1.70.0" +name = "instant" +version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8478577c03552c21db0e2724ffb8986a5ce7af88107e6be5d2ee6e158c12800" +checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" +dependencies = [ + "cfg-if", +] [[package]] -name = "itoa" -version = "1.0.11" +name = "is_terminal_polyfill" +version = "1.70.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" +checksum = "f8478577c03552c21db0e2724ffb8986a5ce7af88107e6be5d2ee6e158c12800" [[package]] -name = "jobserver" -version = "0.1.31" +name = "itertools" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2b099aaa34a9751c5bf0878add70444e1ed2dd73f347be99003d4577277de6e" +checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" dependencies = [ - "libc", + "either", ] [[package]] -name = "lasso" -version = "0.7.2" +name = "itertools" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4644821e1c3d7a560fe13d842d13f587c07348a1a05d3a797152d41c90c56df2" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" dependencies = [ - "ahash", - "dashmap 5.5.3", - "hashbrown 0.13.2", + "either", ] [[package]] -name = "libc" -version = "0.2.155" +name = "itoa" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" +checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] -name = "libmimalloc-sys" -version = "0.1.39" +name = "jni" +version = "0.21.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23aa6811d3bd4deb8a84dde645f943476d13b248d818edcf8ce0b2f37f036b44" +checksum = "1a87aa2bb7d2af34197c04845522473242e1aa17c12f4935d5856491a7fb8c97" dependencies = [ - "cc", - "libc", + "cesu8", + "cfg-if", + "combine", + "jni-sys", + "log", + "thiserror", + "walkdir", + "windows-sys 0.45.0", ] [[package]] -name = "libredox" -version = "0.1.3" +name = "jni-sys" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" -dependencies = [ - "bitflags 2.6.0", - "libc", -] +checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" [[package]] -name = "libz-ng-sys" -version = "1.1.15" +name = "jobserver" +version = "0.1.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6409efc61b12687963e602df8ecf70e8ddacf95bc6576bcf16e3ac6328083c5" +checksum = "d2b099aaa34a9751c5bf0878add70444e1ed2dd73f347be99003d4577277de6e" dependencies = [ - "cmake", "libc", ] [[package]] -name = "linux-raw-sys" -version = "0.4.14" +name = "js-sys" +version = "0.3.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" +checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" +dependencies = [ + "wasm-bindgen", +] [[package]] -name = "lock_api" -version = "0.4.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" +name = "konfigkoll" +version = "0.1.0" +dependencies = [ + "ahash", + "anyhow", + "camino", + "clap", + "compact_str", + "directories", + "either", + "itertools 0.13.0", + "konfigkoll_core", + "konfigkoll_script", + "konfigkoll_types", + "mimalloc", + "ouroboros", + "paketkoll_cache", + "paketkoll_core", + "paketkoll_types", + "rayon", + "rune", + "tokio", + "tracing", + "tracing-log", + "tracing-subscriber", +] + +[[package]] +name = "konfigkoll_core" +version = "0.1.0" +dependencies = [ + "ahash", + "anyhow", + "camino", + "clru", + "compact_str", + "console", + "duct", + "either", + "indoc", + "itertools 0.13.0", + "konfigkoll_types", + "libc", + "nix", + "paketkoll_types", + "paketkoll_utils", + "parking_lot 0.12.3", + "pretty_assertions", + "rayon", + "regex", + "smallvec", + "strum", + "tracing", +] + +[[package]] +name = "konfigkoll_hwinfo" +version = "0.1.0" +dependencies = [ + "ahash", + "anyhow", + "indoc", + "itertools 0.13.0", + "pretty_assertions", + "rune", + "winnow 0.6.15", +] + +[[package]] +name = "konfigkoll_script" +version = "0.1.0" +dependencies = [ + "ahash", + "anyhow", + "camino", + "compact_str", + "glob", + "indoc", + "itertools 0.13.0", + "konfigkoll_core", + "konfigkoll_hwinfo", + "konfigkoll_types", + "nix", + "paketkoll_core", + "paketkoll_types", + "paketkoll_utils", + "parking_lot 0.12.3", + "pretty_assertions", + "regex", + "rune", + "rune-modules", + "rust-ini", + "smallvec", + "sysinfo", + "tempfile", + "thiserror", + "tokio", + "tracing", + "winnow 0.6.15", +] + +[[package]] +name = "konfigkoll_types" +version = "0.1.0" +dependencies = [ + "ahash", + "anyhow", + "bitflags 2.6.0", + "camino", + "compact_str", + "either", + "paketkoll_types", + "paketkoll_utils", + "strum", +] + +[[package]] +name = "lasso" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4644821e1c3d7a560fe13d842d13f587c07348a1a05d3a797152d41c90c56df2" +dependencies = [ + "ahash", + "dashmap 5.5.3", + "hashbrown 0.13.2", +] + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" + +[[package]] +name = "libc" +version = "0.2.155" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" + +[[package]] +name = "libmimalloc-sys" +version = "0.1.39" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23aa6811d3bd4deb8a84dde645f943476d13b248d818edcf8ce0b2f37f036b44" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "libredox" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" +dependencies = [ + "bitflags 2.6.0", + "libc", +] + +[[package]] +name = "libz-ng-sys" +version = "1.1.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6409efc61b12687963e602df8ecf70e8ddacf95bc6576bcf16e3ac6328083c5" +dependencies = [ + "cmake", + "libc", +] + +[[package]] +name = "linked-hash-map" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" + +[[package]] +name = "linux-raw-sys" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" + +[[package]] +name = "lock_api" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" dependencies = [ "autocfg", "scopeguard", @@ -801,6 +1300,19 @@ version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" +[[package]] +name = "lsp-types" +version = "0.94.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c66bfd44a06ae10647fe3f8214762e9369fd4248df1350924b4ef9e770a85ea1" +dependencies = [ + "bitflags 1.3.2", + "serde", + "serde_json", + "serde_repr", + "url", +] + [[package]] name = "lzma-sys" version = "0.1.20" @@ -812,6 +1324,24 @@ dependencies = [ "pkg-config", ] +[[package]] +name = "malloc_buf" +version = "0.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62bb907fe88d54d8d9ce32a3cceab4218ed2f6b7d35617cafe9adf84e43919cb" +dependencies = [ + "libc", +] + +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata 0.1.10", +] + [[package]] name = "md-5" version = "0.10.6" @@ -846,6 +1376,17 @@ dependencies = [ "adler", ] +[[package]] +name = "mio" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" +dependencies = [ + "libc", + "wasi", + "windows-sys 0.48.0", +] + [[package]] name = "mtree2" version = "0.6.1" @@ -853,8 +1394,54 @@ dependencies = [ "bitflags 2.6.0", "faster-hex", "memchr", + "smallvec", +] + +[[package]] +name = "musli" +version = "0.0.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c21124dd24833900879114414b877f2136f4b7b7a3b49756ecc5c36eca332bb" +dependencies = [ + "musli-macros", +] + +[[package]] +name = "musli-common" +version = "0.0.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "178446623aa62978aa0f894b2081bc11ea77c2119ccfe35be428ab9ddb495dfc" +dependencies = [ + "musli", +] + +[[package]] +name = "musli-macros" +version = "0.0.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f1ab0e4ac2721bc4fa3528a6a2640c1c30c36c820f8c85159252fbf6c2fac24" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "musli-storage" +version = "0.0.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2fc1f80b166f611c462e1344220e9b3a9ad37c885e43039d5d2e6887445937c" +dependencies = [ + "musli", + "musli-common", ] +[[package]] +name = "ndk-context" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "27b02d87554356db9e9a873add8782d4ea6e3e58ea071a9adb9a2e8ddb884a8b" + [[package]] name = "nix" version = "0.29.0" @@ -867,6 +1454,104 @@ dependencies = [ "libc", ] +[[package]] +name = "ntapi" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8a3895c6391c39d7fe7ebc444a87eb2991b2a0bc718fdabd071eec617fc68e4" +dependencies = [ + "winapi", +] + +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + +[[package]] +name = "num" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35bd024e8b2ff75562e5f34e7f4905839deb4b22955ef5e73d2fea1b9813cb23" +dependencies = [ + "num-bigint", + "num-complex", + "num-integer", + "num-iter", + "num-rational", + "num-traits", +] + +[[package]] +name = "num-bigint" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" +dependencies = [ + "num-integer", + "num-traits", +] + +[[package]] +name = "num-complex" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73f88a1307638156682bada9d7604135552957b7818057dcef22705b4d509495" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + +[[package]] +name = "num-integer" +version = "0.1.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-iter" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-rational" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824" +dependencies = [ + "num-bigint", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", +] + [[package]] name = "num_cpus" version = "1.16.0" @@ -877,6 +1562,15 @@ dependencies = [ "libc", ] +[[package]] +name = "objc" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "915b1b472bc21c53464d6c8461c9d3af805ba1ef837e1cac254428f4a77177b1" +dependencies = [ + "malloc_buf", +] + [[package]] name = "object" version = "0.36.1" @@ -891,6 +1585,32 @@ name = "once_cell" version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" +dependencies = [ + "critical-section", + "portable-atomic", +] + +[[package]] +name = "onig" +version = "6.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c4b31c8722ad9171c6d77d3557db078cab2bd50afcc9d09c8b315c59df8ca4f" +dependencies = [ + "bitflags 1.3.2", + "libc", + "once_cell", + "onig_sys", +] + +[[package]] +name = "onig_sys" +version = "69.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b829e3d7e9cc74c7e315ee8edb185bf4190da5acde74afd7fc59c35b1f086e7" +dependencies = [ + "cc", + "pkg-config", +] [[package]] name = "option-ext" @@ -918,12 +1638,53 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "os_pipe" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29d73ba8daf8fac13b0501d1abeddcfe21ba7401ada61a819144b6c2a4f32209" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "ouroboros" +version = "0.18.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "944fa20996a25aded6b4795c6d63f10014a7a83f8be9828a11860b08c5fc4a67" +dependencies = [ + "aliasable", + "ouroboros_macro", + "static_assertions", +] + +[[package]] +name = "ouroboros_macro" +version = "0.18.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39b0deead1528fd0e5947a8546a9642a9777c25f6e1e26f34c97b204bbb465bd" +dependencies = [ + "heck 0.4.1", + "itertools 0.12.1", + "proc-macro2", + "proc-macro2-diagnostics", + "quote", + "syn", +] + [[package]] name = "outref" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4030760ffd992bef45b0ae3f10ce1aba99e33464c90d14dd7c039884963ddc7a" +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + [[package]] name = "paketkoll" version = "0.2.3" @@ -931,20 +1692,32 @@ dependencies = [ "ahash", "anyhow", "clap", - "clap_complete", - "clap_mangen", "compact_str", "env_logger", "log", "mimalloc", "os_info", "paketkoll_core", + "paketkoll_types", "proc-exit", "rayon", "serde", "serde_json", ] +[[package]] +name = "paketkoll_cache" +version = "0.1.0" +dependencies = [ + "ahash", + "anyhow", + "cached", + "compact_str", + "dashmap 6.0.1", + "paketkoll_types", + "tracing", +] + [[package]] name = "paketkoll_core" version = "0.4.1" @@ -994,15 +1767,19 @@ dependencies = [ name = "paketkoll_types" version = "0.1.0" dependencies = [ + "ahash", "anyhow", "bitflags 2.6.0", "compact_str", + "dashmap 6.0.1", "derive_builder", "faster-hex", "lasso", "nix", "serde", "smallvec", + "strum", + "thiserror", ] [[package]] @@ -1014,6 +1791,41 @@ dependencies = [ "ring", ] +[[package]] +name = "parking_lot" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" +dependencies = [ + "instant", + "lock_api", + "parking_lot_core 0.8.6", +] + +[[package]] +name = "parking_lot" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" +dependencies = [ + "lock_api", + "parking_lot_core 0.9.10", +] + +[[package]] +name = "parking_lot_core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" +dependencies = [ + "cfg-if", + "instant", + "libc", + "redox_syscall 0.2.16", + "smallvec", + "winapi", +] + [[package]] name = "parking_lot_core" version = "0.9.10" @@ -1022,11 +1834,68 @@ checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.5.2", + "redox_syscall 0.5.3", "smallvec", "windows-targets 0.52.6", ] +[[package]] +name = "paste" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" + +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "pest" +version = "2.7.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd53dff83f26735fdc1ca837098ccf133605d794cdae66acfc2bfac3ec809d95" +dependencies = [ + "memchr", + "thiserror", + "ucd-trie", +] + +[[package]] +name = "pest_derive" +version = "2.7.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a548d2beca6773b1c244554d36fcf8548a8a58e74156968211567250e48e49a" +dependencies = [ + "pest", + "pest_generator", +] + +[[package]] +name = "pest_generator" +version = "2.7.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c93a82e8d145725dcbaf44e5ea887c8a869efdcc28706df2d08c69e17077183" +dependencies = [ + "pest", + "pest_meta", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "pest_meta" +version = "2.7.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a941429fea7e08bedec25e4f6785b6ffaacc6b755da98df5ef3e7dcf4a124c4f" +dependencies = [ + "once_cell", + "pest", + "sha2", +] + [[package]] name = "phf" version = "0.11.2" @@ -1069,12 +1938,75 @@ dependencies = [ "siphasher", ] +[[package]] +name = "pin-project" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + [[package]] name = "pkg-config" version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" +[[package]] +name = "plist" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42cf17e9a1800f5f396bc67d193dc9411b59012a5876445ef450d449881e1016" +dependencies = [ + "base64 0.22.1", + "indexmap", + "quick-xml", + "serde", + "time", +] + +[[package]] +name = "portable-atomic" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da544ee218f0d287a911e9c99a39a8c9bc8fcad3cb8db5959940044ecfc67265" + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + [[package]] name = "pretty_assertions" version = "1.4.0" @@ -1082,7 +2014,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af7cee1a6c8a5b9208b3cb1061f10c0cb689087b3d8ce85fb9d2dd7a29b6ba66" dependencies = [ "diff", - "yansi", + "yansi 0.5.1", ] [[package]] @@ -1100,6 +2032,40 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "proc-macro2-diagnostics" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af066a9c399a26e020ada66a034357a868728e72cd426f3adcd35f80d88d88c8" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "version_check", + "yansi 1.0.1", +] + +[[package]] +name = "pulldown-cmark" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57206b407293d2bcd3af849ce869d52068623f19e1b5ff8e8778e3309439682b" +dependencies = [ + "bitflags 2.6.0", + "getopts", + "memchr", + "unicase", +] + +[[package]] +name = "quick-xml" +version = "0.32.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d3a6e5838b60e0e8fa7a43f22ade549a37d61f8bdbe636d0d7816191de969c2" +dependencies = [ + "memchr", +] + [[package]] name = "quote" version = "1.0.36" @@ -1115,6 +2081,18 @@ version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", "rand_core", ] @@ -1123,6 +2101,15 @@ name = "rand_core" version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "raw-window-handle" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2ff9a1f06a88b01621b7ae906ef0211290d1c8a168a15542486a8f61c0833b9" [[package]] name = "rayon" @@ -1144,6 +2131,15 @@ dependencies = [ "crossbeam-utils", ] +[[package]] +name = "redox_syscall" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" +dependencies = [ + "bitflags 1.3.2", +] + [[package]] name = "redox_syscall" version = "0.4.1" @@ -1155,9 +2151,9 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c82cf8cff14456045f55ec4241383baeff27af886adb72ffb2162f99911de0fd" +checksum = "2a908a6e00f1fdd0dfd9c0eb08ce85126f6d8bbda50017e74bc4a4b7d4a926a4" dependencies = [ "bitflags 2.6.0", ] @@ -1181,8 +2177,17 @@ checksum = "b91213439dad192326a0d7c6ee3955910425f441d7038e0d6933b0aec5c4517f" dependencies = [ "aho-corasick", "memchr", - "regex-automata", - "regex-syntax", + "regex-automata 0.4.7", + "regex-syntax 0.8.4", +] + +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax 0.6.29", ] [[package]] @@ -1193,35 +2198,224 @@ checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" dependencies = [ "aho-corasick", "memchr", - "regex-syntax", + "regex-syntax 0.8.4", +] + +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + +[[package]] +name = "regex-syntax" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" + +[[package]] +name = "relative-path" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba39f3699c378cd8970968dcbff9c43159ea4cfbd88d43c00b22f2ef10a435d2" +dependencies = [ + "serde", +] + +[[package]] +name = "ring" +version = "0.17.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +dependencies = [ + "cc", + "cfg-if", + "getrandom", + "libc", + "spin", + "untrusted", + "windows-sys 0.52.0", +] + +[[package]] +name = "rmp" +version = "0.8.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "228ed7c16fa39782c3b3468e974aec2795e9089153cd08ee2e9aefb3613334c4" +dependencies = [ + "byteorder", + "num-traits", + "paste", +] + +[[package]] +name = "rmp-serde" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52e599a477cf9840e92f2cde9a7189e67b42c57532749bf90aea6ec10facd4db" +dependencies = [ + "byteorder", + "rmp", + "serde", +] + +[[package]] +name = "roff" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b833d8d034ea094b1ea68aa6d5c740e0d04bad9d16568d08ba6f76823a114316" + +[[package]] +name = "ropey" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93411e420bcd1a75ddd1dc3caf18c23155eda2c090631a85af21ba19e97093b5" +dependencies = [ + "smallvec", + "str_indices", +] + +[[package]] +name = "rune" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d21925ac4f8974395d0d9e43f96a34c778e71ed86fe96d0313b2211102537234" +dependencies = [ + "anyhow", + "base64 0.21.7", + "bincode", + "clap", + "codespan-reporting", + "futures-core", + "futures-util", + "handlebars", + "itoa", + "linked-hash-map", + "lsp-types", + "musli", + "musli-storage", + "num", + "once_cell", + "parking_lot 0.12.3", + "percent-encoding", + "pin-project", + "pulldown-cmark", + "rand", + "relative-path", + "ropey", + "rune-alloc", + "rune-core", + "rune-macros", + "rust-embed", + "ryu", + "semver", + "serde", + "serde-hashkey", + "serde_json", + "sha2", + "similar", + "syntect", + "tokio", + "toml", + "tracing", + "tracing-subscriber", + "url", + "webbrowser", +] + +[[package]] +name = "rune-alloc" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e85c26e19f7efb91c6e19afc68b008f04685fdb2852e96ce8fbd3cf4a0b4e76c" +dependencies = [ + "ahash", + "pin-project", + "rune-alloc-macros", + "serde", +] + +[[package]] +name = "rune-alloc-macros" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "810588952a8710959d35ad17c933804d60f96c3792f216277cda68c1a9887120" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "rune-core" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d30fa78b6cb15d1560bb4cb18f4b99a9097b08ade3a5fc23e5ae7311f97c537b" +dependencies = [ + "byteorder", + "musli", + "rune-alloc", + "serde", + "twox-hash", +] + +[[package]] +name = "rune-macros" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1b91e53bae3804e4d72e2b04fa5d5108bd93e880ca597c0cae0fb0a662fe198" +dependencies = [ + "proc-macro2", + "quote", + "rune-core", + "syn", +] + +[[package]] +name = "rune-modules" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2be1152db68e40ef9b76c1eb2119509da9e11fc58756b5d6894c1d79da744f0f" +dependencies = [ + "rune", + "serde_json", + "tokio", + "toml", ] [[package]] -name = "regex-syntax" -version = "0.8.4" +name = "rust-embed" +version = "6.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" +checksum = "a36224c3276f8c4ebc8c20f158eca7ca4359c8db89991c4925132aaaf6702661" +dependencies = [ + "rust-embed-impl", + "rust-embed-utils", + "walkdir", +] [[package]] -name = "ring" -version = "0.17.8" +name = "rust-embed-impl" +version = "6.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +checksum = "49b94b81e5b2c284684141a2fb9e2a31be90638caf040bf9afbc5a0416afe1ac" dependencies = [ - "cc", - "cfg-if", - "getrandom", - "libc", - "spin", - "untrusted", - "windows-sys 0.52.0", + "proc-macro2", + "quote", + "rust-embed-utils", + "syn", + "walkdir", ] [[package]] -name = "roff" -version = "0.2.1" +name = "rust-embed-utils" +version = "7.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b833d8d034ea094b1ea68aa6d5c740e0d04bad9d16568d08ba6f76823a114316" +checksum = "9d38ff6bf570dc3bb7100fce9f7b60c33fa71d80e88da3f2580df4ff2bdded74" +dependencies = [ + "sha2", + "walkdir", +] [[package]] name = "rust-ini" @@ -1280,6 +2474,15 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" +[[package]] +name = "semver" +version = "1.0.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" +dependencies = [ + "serde", +] + [[package]] name = "serde" version = "1.0.204" @@ -1289,6 +2492,15 @@ dependencies = [ "serde_derive", ] +[[package]] +name = "serde-hashkey" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c13a90d3c31ebd0b83e38600c8117083ec4c4e1a7a0cab364e79e19706ade04e" +dependencies = [ + "serde", +] + [[package]] name = "serde_derive" version = "1.0.204" @@ -1311,12 +2523,96 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_repr" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_spanned" +version = "0.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79e674e01f999af37c49f70a6ede167a8a60b2503e56c5599532a65baa5969a0" +dependencies = [ + "serde", +] + +[[package]] +name = "sha2" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "shared_child" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0d94659ad3c2137fef23ae75b03d5241d633f8acded53d672decfa0e6e0caef" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "signal-hook-registry" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" +dependencies = [ + "libc", +] + +[[package]] +name = "similar" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1de1d4f81173b03af4c0cbed3c898f6bff5b870e4a7f5d6f4057d62a7a4b686e" +dependencies = [ + "bstr", +] + [[package]] name = "siphasher" version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" +[[package]] +name = "sled" +version = "0.34.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f96b4737c2ce5987354855aed3797279def4ebf734436c6aa4552cf8e169935" +dependencies = [ + "crc32fast", + "crossbeam-epoch", + "crossbeam-utils", + "fs2", + "fxhash", + "libc", + "log", + "parking_lot 0.11.2", +] + [[package]] name = "smallvec" version = "1.13.2" @@ -1341,89 +2637,303 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" +[[package]] +name = "str_indices" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9557cb6521e8d009c51a8666f09356f4b817ba9ba0981a305bd86aee47bd35c" + [[package]] name = "strsim" version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "strum" +version = "0.26.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" +dependencies = [ + "strum_macros", +] + +[[package]] +name = "strum_macros" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" +dependencies = [ + "heck 0.5.0", + "proc-macro2", + "quote", + "rustversion", + "syn", +] + +[[package]] +name = "syn" +version = "2.0.72" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc4b9b9bf2add8093d3f2c0204471e951b2285580335de42f9d2534f3ae7a8af" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syntect" +version = "5.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "874dcfa363995604333cf947ae9f751ca3af4522c60886774c4963943b4746b1" +dependencies = [ + "bincode", + "bitflags 1.3.2", + "flate2", + "fnv", + "once_cell", + "onig", + "plist", + "regex-syntax 0.8.4", + "serde", + "serde_derive", + "serde_json", + "thiserror", + "walkdir", + "yaml-rust", +] + +[[package]] +name = "sysinfo" +version = "0.30.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a5b4ddaee55fb2bea2bf0e5000747e5f5c0de765e5a5ff87f4cd106439f4bb3" +dependencies = [ + "cfg-if", + "core-foundation-sys", + "libc", + "ntapi", + "once_cell", + "rayon", + "windows", +] + +[[package]] +name = "systemd_tmpfiles" +version = "0.1.1" +dependencies = [ + "base64-simd", + "bitflags 2.6.0", + "compact_str", + "dirs", + "indoc", + "libc", + "memchr", + "nix", + "pretty_assertions", + "smallvec", + "strum", + "thiserror", + "winnow 0.6.15", +] + +[[package]] +name = "tar" +version = "0.4.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb797dad5fb5b76fcf519e702f4a589483b5ef06567f160c392832c1f5e44909" +dependencies = [ + "filetime", + "libc", + "xattr", +] + +[[package]] +name = "tempfile" +version = "3.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" +dependencies = [ + "cfg-if", + "fastrand", + "rustix", + "windows-sys 0.52.0", +] + +[[package]] +name = "termcolor" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "thiserror" +version = "1.0.63" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.63" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "thread_local" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" +dependencies = [ + "cfg-if", + "once_cell", +] + +[[package]] +name = "time" +version = "0.3.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" +dependencies = [ + "deranged", + "itoa", + "num-conv", + "powerfmt", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" + +[[package]] +name = "time-macros" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" +dependencies = [ + "num-conv", + "time-core", +] + +[[package]] +name = "tiny-keccak" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" +dependencies = [ + "crunchy", +] + +[[package]] +name = "tinyvec" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] -name = "strum" -version = "0.26.3" +name = "tokio" +version = "1.38.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" +checksum = "eb2caba9f80616f438e09748d5acda951967e1ea58508ef53d9c6402485a46df" dependencies = [ - "strum_macros", + "backtrace", + "bytes", + "libc", + "mio", + "num_cpus", + "parking_lot 0.12.3", + "pin-project-lite", + "signal-hook-registry", + "tokio-macros", + "windows-sys 0.48.0", ] [[package]] -name = "strum_macros" -version = "0.26.4" +name = "tokio-macros" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" +checksum = "5f5ae998a069d4b5aba8ee9dad856af7d520c3699e6159b185c2acd48155d39a" dependencies = [ - "heck", "proc-macro2", "quote", - "rustversion", "syn", ] [[package]] -name = "syn" -version = "2.0.69" +name = "toml" +version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "201fcda3845c23e8212cd466bfebf0bd20694490fc0356ae8e428e0824a915a6" +checksum = "dd79e69d3b627db300ff956027cc6c3798cef26d22526befdfcd12feeb6d2257" dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit", ] [[package]] -name = "systemd_tmpfiles" -version = "0.1.1" +name = "toml_datetime" +version = "0.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4badfd56924ae69bcc9039335b2e017639ce3f9b001c393c1b2d1ef846ce2cbf" dependencies = [ - "base64-simd", - "bitflags 2.6.0", - "compact_str", - "dirs", - "indoc", - "libc", - "memchr", - "nix", - "pretty_assertions", - "smallvec", - "strum", - "thiserror", - "winnow", + "serde", ] [[package]] -name = "tar" -version = "0.4.41" +name = "toml_edit" +version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb797dad5fb5b76fcf519e702f4a589483b5ef06567f160c392832c1f5e44909" +checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "filetime", - "libc", - "xattr", + "indexmap", + "serde", + "serde_spanned", + "toml_datetime", + "winnow 0.5.40", ] [[package]] -name = "thiserror" -version = "1.0.63" +name = "tracing" +version = "0.1.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" dependencies = [ - "thiserror-impl", + "pin-project-lite", + "tracing-attributes", + "tracing-core", ] [[package]] -name = "thiserror-impl" -version = "1.0.63" +name = "tracing-attributes" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" +checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", @@ -1431,12 +2941,43 @@ dependencies = [ ] [[package]] -name = "tiny-keccak" -version = "2.0.2" +name = "tracing-core" +version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" dependencies = [ - "crunchy", + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "parking_lot 0.12.3", + "regex", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", ] [[package]] @@ -1445,30 +2986,94 @@ version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "343e926fc669bc8cde4fa3129ab681c63671bae288b1f1081ceee6d9d37904fc" +[[package]] +name = "twox-hash" +version = "1.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675" +dependencies = [ + "cfg-if", + "static_assertions", +] + [[package]] name = "typenum" version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" +[[package]] +name = "ucd-trie" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9" + +[[package]] +name = "unicase" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89" +dependencies = [ + "version_check", +] + +[[package]] +name = "unicode-bidi" +version = "0.3.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" + [[package]] name = "unicode-ident" version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" +[[package]] +name = "unicode-normalization" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-width" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d" + [[package]] name = "untrusted" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" +[[package]] +name = "url" +version = "2.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + [[package]] name = "utf8parse" version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + [[package]] name = "version_check" version = "0.9.4" @@ -1497,6 +3102,113 @@ version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +[[package]] +name = "wasm-bindgen" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" + +[[package]] +name = "web-sys" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webbrowser" +version = "0.8.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db67ae75a9405634f5882791678772c94ff5f16a66535aae186e26aa0841fc8b" +dependencies = [ + "core-foundation", + "home", + "jni", + "log", + "ndk-context", + "objc", + "raw-window-handle", + "url", + "web-sys", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + [[package]] name = "winapi-util" version = "0.1.8" @@ -1506,6 +3218,40 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e48a53791691ab099e5e2ad123536d0fff50652600abaf43bbf952894110d0be" +dependencies = [ + "windows-core", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-core" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets 0.42.2", +] + [[package]] name = "windows-sys" version = "0.48.0" @@ -1524,6 +3270,21 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "windows-targets" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + [[package]] name = "windows-targets" version = "0.48.5" @@ -1555,6 +3316,12 @@ dependencies = [ "windows_x86_64_msvc 0.52.6", ] +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" + [[package]] name = "windows_aarch64_gnullvm" version = "0.48.5" @@ -1567,6 +3334,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" + [[package]] name = "windows_aarch64_msvc" version = "0.48.5" @@ -1579,6 +3352,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" +[[package]] +name = "windows_i686_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" + [[package]] name = "windows_i686_gnu" version = "0.48.5" @@ -1597,6 +3376,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" +[[package]] +name = "windows_i686_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" + [[package]] name = "windows_i686_msvc" version = "0.48.5" @@ -1609,6 +3394,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" + [[package]] name = "windows_x86_64_gnu" version = "0.48.5" @@ -1621,6 +3412,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" + [[package]] name = "windows_x86_64_gnullvm" version = "0.48.5" @@ -1633,6 +3430,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" + [[package]] name = "windows_x86_64_msvc" version = "0.48.5" @@ -1647,9 +3450,18 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.6.14" +version = "0.5.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" +dependencies = [ + "memchr", +] + +[[package]] +name = "winnow" +version = "0.6.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "374ec40a2d767a3c1b4972d9475ecd557356637be906f2cb3f7fe17a6eb5e22f" +checksum = "557404e450152cd6795bb558bca69e43c585055f4606e3bcae5894fc6dac9ba0" dependencies = [ "memchr", ] @@ -1665,6 +3477,21 @@ dependencies = [ "rustix", ] +[[package]] +name = "xtask" +version = "0.1.0" +dependencies = [ + "anyhow", + "camino", + "clap", + "clap_complete", + "clap_mangen", + "env_logger", + "konfigkoll", + "log", + "paketkoll", +] + [[package]] name = "xz2" version = "0.1.7" @@ -1674,12 +3501,27 @@ dependencies = [ "lzma-sys", ] +[[package]] +name = "yaml-rust" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85" +dependencies = [ + "linked-hash-map", +] + [[package]] name = "yansi" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" +[[package]] +name = "yansi" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" + [[package]] name = "zerocopy" version = "0.7.35" diff --git a/Cargo.toml b/Cargo.toml index 0f2afa5f..1d77eb16 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,14 +10,20 @@ base64-simd = "0.8.0" bitflags = "2.6.0" bstr = "1.9.1" bzip2 = "0.4.4" +cached = { version = "0.53.1", default-features = false } +camino = "1.1.7" cfg-if = "1.0.0" clap = "4.5.9" clap_complete = "4.5.8" clap_mangen = "0.2.22" -compact_str = "0.7.1" +clru = "0.6.2" +compact_str = "0.8.0" +console = "0.15.8" dashmap = "6.0.1" derive_builder = "0.20.0" +directories = "5.0.1" dirs = "5.0.1" +duct = "0.13.7" either = "1.13.0" env_logger = "0.11.3" faster-hex = { version = "0.9.0", default-features = false } @@ -28,6 +34,7 @@ flume = { version = "0.11.0", default-features = false } glob = "0.3.1" ignore = "0.4.22" indoc = "2.0.5" +itertools = "0.13.0" lasso = "0.7.2" libc = "0.2.155" log = "0.4.22" @@ -37,21 +44,35 @@ mimalloc = "0.1.43" nix = { version = "0.29.0", default-features = false } num_cpus = "1.16.0" os_info = { version = "3.8.2", default-features = false } +ouroboros = "0.18.4" +parking_lot = "0.12.3" phf = "0.11.2" pretty_assertions = "1.4.0" proc-exit = "2.0.1" rayon = "1.10.0" regex = "1.10.5" ring = "0.17.8" +rune = "0.13.4" +rune-modules = "0.13.4" rust-ini = "0.21.0" scopeguard = "1.2.0" serde = "1.0.204" serde_json = "1.0.120" -smallvec = "1.13.2" -strum = "0.26.3" +smallvec = { version = "1.13.2", features = [ + "const_generics", + "const_new", + "union", +] } +strum = { version = "0.26.3", features = ["derive"] } +sysinfo = "0.30.13" tar = "0.4.41" +tempfile = "3.10.1" thiserror = "1.0.63" -winnow = "0.6.14" +tokio = "1.38.1" +tracing = "0.1.40" +tracing-log = "0.2.0" +tracing-subscriber = "0.3.18" +winnow = "0.6.15" xz2 = "0.1.7" zstd = "0.13.2" @@ -63,6 +84,8 @@ doc_markdown = "warn" needless_pass_by_value = "warn" redundant_closure_for_method_calls = "warn" semicolon_if_nothing_returned = "warn" +undocumented_unsafe_blocks = "warn" +unnecessary_safety_doc = "warn" unwrap_used = "warn" wildcard_imports = "warn" @@ -72,15 +95,29 @@ split-debuginfo = "unpacked" [profile.release] codegen-units = 1 -lto = "fat" +lto = "thin" opt-level = 2 [profile.profiling] debug = 2 inherits = "release" +lto = false -[profile.dev.package.ring] -opt-level = 2 +[profile.dev.package] +# Needed for reasonable performance +flate2.opt-level = 2 +libz-ng-sys.opt-level = 2 +md-5.opt-level = 2 +proc-macro2.opt-level = 2 +quote.opt-level = 2 +ring.opt-level = 2 +rune-macros.opt-level = 2 +serde_derive.opt-level = 2 +syn.opt-level = 2 +zstd-safe.opt-level = 2 +zstd-sys.opt-level = 2 +zstd.opt-level = 2 -[profile.dev.package.md-5] -opt-level = 2 +#[patch.crates-io] +## Rune +#rune = { path = "patches/rune/crates/rune" } diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..af7058c1 --- /dev/null +++ b/Makefile @@ -0,0 +1,53 @@ +# This makefile exists to allow for an install target, since it seems +# cargo install is too basic to handle installing support files + +CARGO_FLAGS ?= +DESTDIR ?= +PREFIX ?= /usr/local +BINDIR ?= $(PREFIX)/bin +DATADIR ?= $(PREFIX)/share +BASHDIR ?= $(DATADIR)/bash-completion/completions +ZSHDIR ?= $(DATADIR)/zsh/site-functions +FISHDIR ?= $(DATADIR)/fish/vendor_completions.d +MANDIR ?= $(DATADIR)/man/man1 + +PROGS := target/release/paketkoll target/release/konfigkoll target/release/konfigkoll-rune target/release/xtask + +all: $(PROGS) + +target/release/paketkoll: build-cargo +target/release/konfigkoll: build-cargo +target/release/konfigkoll-rune: build-cargo +target/release/xtask: build-cargo + +build-cargo: + # Let cargo figure out if a build is needed + cargo build --locked --release $(CARGO_FLAGS) + +test: + cargo test --locked --release $(CARGO_FLAGS) + +install: install-paketkoll install-konfigkoll + +install-paketkoll: target/release/paketkoll target/release/xtask install-dirs + install $< $(DESTDIR)$(BINDIR) + ./target/release/xtask man --output $(DESTDIR)$(MANDIR) paketkoll + ./target/release/xtask completions --output target/completions paketkoll + install -Dm644 target/completions/paketkoll.bash $(DESTDIR)$(BASHDIR)/paketkoll + install -Dm644 target/completions/paketkoll.fish $(DESTDIR)$(FISHDIR)/paketkoll.fish + install -Dm644 target/completions/_paketkoll $(DESTDIR)$(ZSHDIR)/_paketkoll + + +install-konfigkoll: target/release/konfigkoll target/release/konfigkoll-rune target/release/xtask install-dirs + install $< $(DESTDIR)$(BINDIR) + install target/release/konfigkoll-rune $(DESTDIR)$(BINDIR) + ./target/release/xtask man --output $(DESTDIR)$(MANDIR) konfigkoll + ./target/release/xtask completions --output target/completions konfigkoll + install -Dm644 target/completions/konfigkoll.bash $(DESTDIR)$(BASHDIR)/konfigkoll + install -Dm644 target/completions/konfigkoll.fish $(DESTDIR)$(FISHDIR)/konfigkoll.fish + install -Dm644 target/completions/_konfigkoll $(DESTDIR)$(ZSHDIR)/_konfigkoll + +install-dirs: + install -d $(DESTDIR)$(BINDIR) $(DESTDIR)$(BASHDIR) $(DESTDIR)$(ZSHDIR) $(DESTDIR)$(FISHDIR) $(DESTDIR)$(MANDIR) + +.PHONY: all build-cargo test install install-paketkoll install-konfigkoll install-dirs $(PROGS) diff --git a/README.md b/README.md index 25ac6b9c..e7a636e8 100644 --- a/README.md +++ b/README.md @@ -1,180 +1,24 @@ -# Paketkoll - -[ [lib.rs] ] [ [crates.io] ] [ [AUR] ] - -This is a Rust replacement for `debsums` (on Debian/Ubuntu/...) and `paccheck` -(on Arch Linux and derivatives). It is much faster than those thanks to using -all your CPU cores in parallel. (It is also much much faster than `pacman -Qkk` -which is much slower than `paccheck` even.) - -What it does is compare installed files to what the package manager installed and -report any discrepancies. - -* On Arch Linux it will report changed mode, owner, group, mtimes, symlink target, - file content (sha256) or missing files. -* On Debian it will only report if file content differs for regular files. That - is the only information available on Debian unfortunately (the md5sum). - -Additional features: - -* There is a flag to include or not include "config files" (those marked as such - by the package manager, which is not all files in `/etc` as one might think). -* On Arch Linux you can pass `--trust-mtime` to not check the contents of files - where the mtime matches. This makes the check ultra-fast. -* Doesn't depend on any distro specific libraries for interacting with the package - database. We do our own parsing. This makes it possible to be way faster - (parallelism!) and also to make a cross platform binary that will run on either - distro without any dependencies apart from libc. -* You can also use this to find unmanaged files (not installed by the package - manager) using `paketkoll check-unexpected`, though some work is required, - since there are many legitimately unmanaged files. You may need to find a set - of `--ignore` flags suitable for your system. Only some simple basics ignores - are built in (`/proc`, `/sys`, `/home`, etc). - -Caveats: - -* This is not a drop-in replacement for either debsums nor paccheck, since - command line flags and output format differs. Additionally, debsums have some - extra features that this doesn't, such as filtering out files removed by localepurge. -* This uses much more memory than `paccheck` (3x). This is largely unavoidable due - to memory-speed tradeoffs, though there is room for *some* improvements still. -* paketkoll will not report quite the same errors as `paccheck`. For example, if - it finds that the size differs, it will not bother computing the checksums, - since they can never match. - -## Benchmarks - -Note: CPU time is actually comparable to the original tools (slightly better in -general). But due to parallelism the wall time is *way* better, especially -without `--trust-mtime` (where the runtime is quite small to begin with). - -* All of the runs were performed on warm disk cache. -* Distro-installed versions of paccheck and debsums were used. -* Musl builds built using cross was used across the board for best portability. -* The same build flags as used for binary releases in this were used (opt level 2, fat LTO) - -### Arch Linux (x64-64 AMD desktop) - -* CPU: AMD Ryzen 5 5600X 6-Core Processor (6 cores, 12 threads) -* RAM: 32 GB, 2 DIMMs DDR4, 3600 MHz -* Disk: NVME Gen4 (WD Black SN850 1TB) -* Kernel: 6.7.5-arch1-1 -* `pacman -Q | wc -l` indicates 2211 packages installed - -When only checking file properties and trusting mtime (these should be the most similar options): - -```console -$ hyperfine -i -N --warmup 1 "paketkoll --trust-mtime check" "paccheck --file-properties --quiet" -Benchmark 1: paketkoll --trust-mtime - Time (mean ± σ): 249.4 ms ± 4.8 ms [User: 1194.5 ms, System: 1216.2 ms] - Range (min … max): 242.1 ms … 259.7 ms 12 runs - -Benchmark 2: paccheck --file-properties --quiet - Time (mean ± σ): 2.561 s ± 0.020 s [User: 1.504 s, System: 1.053 s] - Range (min … max): 2.527 s … 2.598 s 10 runs - - Warning: Ignoring non-zero exit code. - -Summary - paketkoll --trust-mtime ran - 10.27 ± 0.21 times faster than paccheck --file-properties --quiet -``` - -The speedup isn't quite as impressive when checking the checksums also, but it is still large: - -```console -$ hyperfine -i -N --warmup 1 "paketkoll check" "paccheck --sha256sum --quiet" -Benchmark 1: paketkoll - Time (mean ± σ): 9.986 s ± 1.329 s [User: 17.368 s, System: 19.087 s] - Range (min … max): 8.196 s … 11.872 s 10 runs - -Benchmark 2: paccheck --sha256sum --quiet - Time (mean ± σ): 68.976 s ± 0.339 s [User: 16.661 s, System: 17.816 s] - Range (min … max): 68.413 s … 69.604 s 10 runs - - Warning: Ignoring non-zero exit code. - -Summary - paketkoll ran - 6.91 ± 0.92 times faster than paccheck --sha256sum --quiet -``` - -* Many and large packages installed -* 6 cores, 12 thread means a decent speed up from multi-threading is possible. -* I don't know what paccheck was doing such that it took 68 seconds but didn't use very much CPU. Presumably waiting for IO? - -### Debian (ARM64 Raspberry Pi) - -* Raspberry Pi 5 (8 GB RAM) -* CPU: Cortex-A76 (4 cores, 4 threads) -* Disk: USB boot from SATA SSD in USB 3.0 enclosure: Samsung SSD 850 PRO 512GB -* Kernel: 6.1.0-rpi8-rpi-2712 -* `dpkg-query -l | grep ii | wc -l` indicates 749 packages installed - -```console -$ hyperfine -i -N --warmup 1 "paketkoll check" "debsums -c" -Benchmark 1: paketkoll - Time (mean ± σ): 2.664 s ± 0.102 s [User: 3.937 s, System: 1.116 s] - Range (min … max): 2.543 s … 2.813 s 10 runs - -Benchmark 2: debsums -c - Time (mean ± σ): 8.893 s ± 0.222 s [User: 5.453 s, System: 1.350 s] - Range (min … max): 8.637 s … 9.199 s 10 runs - - Warning: Ignoring non-zero exit code. - -Summary - 'paketkoll' ran - 3.34 ± 0.15 times faster than 'debsums -c' -``` - -* There aren't a ton of packages installed on this system (it is acting as a headless server). This means that neither command is terribly slow. -* A Pi only has 4 cores also, which limits the maximum possible speedup. - -### Ubuntu 22.04 (x86-64 Intel laptop) - -* CPU: 12th Gen Intel(R) Core(TM) i9-12950HX (8 P-cores with 16 threads + 8 E-cores with 8 threads) -* RAM: 64 GB, 2 DIMMs DDR4, 3600 MHz -* Disk: NVME Gen4 (WD SN810 2 TB) -* Kernel: 6.5.0-17-generic (HWE kernel) -* `dpkg-query -l | grep ii | wc -l` indicates 4012 packages installed - -```console -$ hyperfine -i -N --warmup 1 "paketkoll check" "debsums -c" -Benchmark 1: paketkoll - Time (mean ± σ): 5.341 s ± 0.174 s [User: 42.553 s, System: 33.049 s] - Range (min … max): 5.082 s … 5.586 s 10 runs - -Benchmark 2: debsums -c - Time (mean ± σ): 92.839 s ± 7.332 s [User: 47.664 s, System: 15.697 s] - Range (min … max): 82.872 s … 103.710 s 10 runs - - Warning: Ignoring non-zero exit code. - -Summary - paketkoll ran - 17.38 ± 1.49 times faster than debsums -c -``` - -## Future improvements - -Most future improvements will happen in the [`paketkoll_core`](crates/paketkoll_core) -crate, to make it suitable for another project idea I have (basically that project -needs this as a library). - -I consider the program itself mostly feature complete. The main changes would be -bug fixes and possibly supporting additional Linux distributions and package managers. - -## MSRV (Minimum Supported Rust Version) policy - -The MSRV may be bumped as needed. It is guaranteed that this program will at -least build on the current stable Rust release. An MSRV change is not considered -a breaking change and as such may change even in a patch version. - -## What does the name mean? - -paketkoll is Swedish for "package check", though the translation to English isn't perfect ("ha koll på" means "keep an eye on" for example). - -[crates.io]: https://crates.io/crates/paketkoll -[lib.rs]: https://lib.rs/crates/paketkoll -[AUR]: https://aur.archlinux.org/packages/paketkoll +# Paketkoll and konfigkoll + +This repository is home to two projects: + +* Paketkoll:\ + A Rust replacement for `debsums` (on Debian/Ubuntu/...) and `paccheck` + (on Arch Linux and derivatives). It is much faster than those thanks to using + all your CPU cores in parallel. (It is also much much faster than `pacman -Qkk` + which is much slower than `paccheck` even.)\ + \ + Additionally it has some other commands such as finding what package owns a file, + etc. This program is pretty much done. See + [the README for paketkoll](crates/paketkoll/README.md) for more information. +* Konfigkoll:\ + A personal system configuration manager. This is for "Oh no, I have too many + computers and want to sync my configuration files between them using git". + It differs from ansible and similar (designed for sysadmins). This is [chezmoi] + for the whole computer. It is heavily inspired by [aconfmgr], but supports more + than just Arch Linux (specifically Debian and derivatives as well).\ + **This program is very much a work in progress.**\ + See [the README for konfigkoll](crates/konfigkoll/README.md) for more information. + +[chezmoi]: https://github.com/twpayne/chezmoi +[aconfmgr]: https://github.com/CyberShadow/aconfmgr diff --git a/Rune.toml b/Rune.toml new file mode 100644 index 00000000..e69de29b diff --git a/about.hbs b/about.hbs new file mode 100644 index 00000000..d2df889a --- /dev/null +++ b/about.hbs @@ -0,0 +1,70 @@ + + + + + + + +
+
+

Third Party Licenses

+

This page lists the licenses of the projects used in konfigkoll and paketkoll.

+
+ +

Overview of licenses:

+ + +

All license text:

+ +
+ + + diff --git a/about.toml b/about.toml new file mode 100644 index 00000000..55be61c6 --- /dev/null +++ b/about.toml @@ -0,0 +1,23 @@ +accepted = [ + "MPL-2.0", + "Apache-2.0", + "MIT", + "BSD-2-Clause", + "BSD-3-Clause", + "ISC", + "OpenSSL", + "CC0-1.0", + "Unicode-DFS-2016", +] +targets = [ + "aarch64-unknown-linux-musl", + "armv7-unknown-linux-musleabihf", + "i686-unknown-linux-musl", + "riscv64gc-unknown-linux-gnu", + "x86_64-unknown-linux-musl", +] +ignore-dev-dependencies = true +private.ignore = true +workarounds = [ + "ring", +] diff --git a/clippy.toml b/clippy.toml index 154626ef..65c188a2 100644 --- a/clippy.toml +++ b/clippy.toml @@ -1 +1,2 @@ allow-unwrap-in-tests = true +check-private-items = true diff --git a/crates/konfigkoll/Cargo.toml b/crates/konfigkoll/Cargo.toml new file mode 100644 index 00000000..3b56bf90 --- /dev/null +++ b/crates/konfigkoll/Cargo.toml @@ -0,0 +1,74 @@ +[package] +categories = [ + "command-line-utilities", + "filesystem", + "os::linux-apis", + "config", +] +description = "Konfigkoll is a configuration management tool for Arch Linux and Debian (and derivatives)" +edition = "2021" +keywords = ["apt", "arch-linux", "debian", "pacman", "config-management"] +license = "MPL-2.0" +name = "konfigkoll" +repository = "https://github.com/VorpalBlade/paketkoll" +rust-version = "1.79.0" +version = "0.1.0" + +[[bin]] +name = "konfigkoll" +path = "src/main.rs" + +[[bin]] +name = "konfigkoll-rune" +path = "src/bin/rune.rs" + +[features] +# Default features +default = ["arch_linux", "debian", "vendored"] + +# Include the Arch Linux backend +arch_linux = ["konfigkoll_script/arch_linux"] + +# Include support for the Debian backend +debian = ["konfigkoll_script/debian"] + +# Vendor C/C++ dependencies instead of linking them dynamically +vendored = ["paketkoll_core/vendored"] + +[dependencies] +ahash.workspace = true +anyhow = { workspace = true, features = ["backtrace"] } +camino.workspace = true +clap = { workspace = true, features = ["derive"] } +compact_str.workspace = true +directories.workspace = true +either.workspace = true +itertools.workspace = true +konfigkoll_core = { version = "0.1.0", path = "../konfigkoll_core" } +konfigkoll_script = { version = "0.1.0", path = "../konfigkoll_script" } +konfigkoll_types = { version = "0.1.0", path = "../konfigkoll_types" } +ouroboros.workspace = true +paketkoll_cache = { version = "0.1.0", path = "../paketkoll_cache" } +paketkoll_core = { version = "0.4.1", path = "../paketkoll_core" } +paketkoll_types = { version = "0.1.0", path = "../paketkoll_types" } +rayon.workspace = true +rune = { workspace = true, features = ["cli"] } +tokio = { workspace = true, features = [ + "macros", + "parking_lot", + "process", + "rt", + "sync", +] } +tracing-log.workspace = true +tracing-subscriber = { workspace = true, features = ["env-filter", "parking_lot"] } +tracing.workspace = true + +[target.'cfg(target_env = "musl")'.dependencies] +# The allocator on musl is attrociously slow, so we use a custom one. +# Jemalloc doesn't work reliably on Aarch64 due to varying page size, so use +# the slightly slower mimalloc instead. +mimalloc.workspace = true + +[lints] +workspace = true diff --git a/crates/konfigkoll/README.md b/crates/konfigkoll/README.md new file mode 100644 index 00000000..0d946868 --- /dev/null +++ b/crates/konfigkoll/README.md @@ -0,0 +1,56 @@ +# Konfigkoll + +[Documentation] [ [lib.rs] ] [ [crates.io] ] [ [AUR] ] + +Konfigkoll is a work in progress cross distro configuration manager. It aims to solve the problem +"I have too many computers and want to keep the system configs in sync", rather than +"I am a sysadmin and want to manage a fleet". As such it is a *personal* system configuration manager. + +The design of konfigkoll is heavily inspired by the excellent [Aconfmgr](https://github.com/CyberShadow/aconfmgr), +but with a few key differences: + +* Aconfmgr is Arch Linux specific, konfigkoll aims to be cross distro + (currently Arch Linux + work in progress support for Debian & derivatives). +* Aconfmgr is written in Bash, and is rather slow. Konfigkoll is written in Rust, and is much faster.\ + As an example, applying my personal config with aconfmgr on my system takes about 30 seconds, while konfigkoll + takes about 2 seconds for the equivalent config. (This is assuming `--trust-mtime`, both are + significantly slowed down if checksums are verified for every file). +* Aconfmgr uses bash as the configuration language, konfigkoll uses [Rune]. + +Please see [the documentation](https://vorpalblade.github.io/paketkoll/book#konfigkoll) for more information. + +## Installed binaries + +This crate consists of two binaries: + +### konfigkoll + +This is the main binary you will be interacting with + +### konfigkoll-rune + +This is a helper binary for [konfigkoll] that provides Rune support (the embedded +scripting language used by konfigkoll) functions such as: + +* Documentation generation +* LSP language server +* Formatting of rune files +* Syntax checking +* etc + +## MSRV (Minimum Supported Rust Version) policy + +The MSRV may be bumped as needed. It is guaranteed that this program will at +least build on the current stable Rust release. An MSRV change is not considered +a breaking change and as such may change even in a patch version. + +## What does the name mean? + +konfigkoll is a Swedish for "config check/tracking", though +the translation to English isn't perfect ("ha koll på" means "keep an eye on" +for example). Some nuance is lost in the translation! + +[Documentation]: https://vorpalblade.github.io/paketkoll/book +[crates.io]: https://crates.io/crates/konfigkoll +[lib.rs]: https://lib.rs/crates/konfigkoll +[AUR]: https://aur.archlinux.org/packages/konfigkoll diff --git a/crates/konfigkoll/data/template/_gitignore b/crates/konfigkoll/data/template/_gitignore new file mode 100644 index 00000000..02865404 --- /dev/null +++ b/crates/konfigkoll/data/template/_gitignore @@ -0,0 +1,8 @@ +*.dpkg-* +*.old +*.pacnew +*.pacorig +*.pacsave +*.ucf-* +*~ +/unsorted.rn diff --git a/crates/konfigkoll/data/template/main.rn b/crates/konfigkoll/data/template/main.rn new file mode 100644 index 00000000..403cb257 --- /dev/null +++ b/crates/konfigkoll/data/template/main.rn @@ -0,0 +1,73 @@ +// This is the main script for konfigkoll + +/// System configuration +/// +/// Parameters: +/// - props: A persistent properties object that the script can use to store +/// data between phases +/// - settings: Settings for konfigkoll +pub async fn phase_system_discovery(props, settings) { + let sysinfo = sysinfo::SysInfo::new(); + let os_id = sysinfo.os_id(); + println!("Configuring for host {} (distro: {})", sysinfo.host_name()?, os_id); + + // We need to enable the backends that we want to use + match os_id { + "arch" => { + settings.enable_pkg_backend("pacman")?; + settings.set_file_backend("pacman")? + }, + "debian" => { + settings.enable_pkg_backend("apt")?; + settings.set_file_backend("apt")? + }, + "ubuntu" => { + settings.enable_pkg_backend("apt")?; + settings.set_file_backend("apt")? + }, + _ => return Err("Unsupported OS")?, + } + // Also enable flatpak + settings.enable_pkg_backend("flatpak")?; + + Ok(()) +} + +/// Ignored paths +pub async fn phase_ignores(props, cmds) { + // Note! Some ignores are built in to konfigkoll, so you don't need to add them here: + // These are things like /dev, /proc, /sys, /home etc. See documentation for + // current list of built in ignores. + + // Ignore some common paths + cmds.ignore_path("/var/cache")?; + cmds.ignore_path("/var/spool")?; + // It is generally best to ignore the state directories of package managers, + // as they are managed separately. + cmds.ignore_path("/var/lib/flatpak")?; + cmds.ignore_path("/var/lib/pacman")?; + cmds.ignore_path("/var/lib/apt")?; + cmds.ignore_path("/var/lib/dpkg")?; + // Add more paths to ignore here + Ok(()) +} + +/// Early package phase, this is for packages that is needed by the script +/// itself (e.g. if we need to call out to a command from that package) +pub async fn phase_script_dependencies(props, cmds) { + Ok(()) +} + +/// Main phase, this is where the bulk of your configration should go +/// +/// It is recommended to use the "save" sub-command to create an initial +/// `unsorted.rn` file that you can then copy the parts you want from into here. +/// +/// A tip is to use `konfigkoll -p dry-run save` the first few times to not +/// *actually* save all the files, this helps you figure out what ignores to add +/// above in `phase_ignores()` without copying a ton of files. Once you are happy +/// with the ignores, you can remove the `-p dry-run` part. +pub async fn phase_main(props, cmds, package_managers) { + + Ok(()) +} \ No newline at end of file diff --git a/crates/konfigkoll/data/template/unsorted.rn b/crates/konfigkoll/data/template/unsorted.rn new file mode 100644 index 00000000..99ee0aae --- /dev/null +++ b/crates/konfigkoll/data/template/unsorted.rn @@ -0,0 +1,9 @@ +//! This file will be overwritten when you use `konfigkoll save` +//! +//! It is recommended to use the "save" sub-command to create an initial +//! `unsorted.rn` file that you can then copy the parts you want from into `main.rn`. +//! +//! A tip is to use `konfigkoll -p dry-run save` the first few times to not +//! *actually* save all the files, this helps you figure out what ignores to add +//! above in `phase_ignores()` without copying a ton of files. Once you are happy +//! with the ignores, you can remove the `-p dry-run` part. diff --git a/crates/konfigkoll/src/apply.rs b/crates/konfigkoll/src/apply.rs new file mode 100644 index 00000000..1d4d561a --- /dev/null +++ b/crates/konfigkoll/src/apply.rs @@ -0,0 +1,46 @@ +//! Code for applying the configuration to the system. + +use std::{collections::BTreeMap, sync::Arc}; + +use either::Either; +use konfigkoll::cli::Paranoia; +use konfigkoll_core::apply::Applicator; +use paketkoll_types::{ + backend::{Backend, Files, PackageBackendMap, PackageMap}, + intern::Interner, +}; + +#[allow(clippy::too_many_arguments)] +pub(crate) fn create_applicator( + confirmation: Paranoia, + force_dry_run: bool, + backend_map: &PackageBackendMap, + interner: &Arc, + package_maps: &BTreeMap>, + files_backend: &Arc, + diff_command: Vec, + pager_command: Vec, +) -> Box { + // TODO: This is where privilege separation would happen (well, one of the locations) + let inner_applicator = if force_dry_run { + Either::Left(konfigkoll_core::apply::NoopApplicator::default()) + } else { + Either::Right(konfigkoll_core::apply::InProcessApplicator::new( + backend_map.clone(), + interner, + package_maps, + files_backend, + )) + }; + // Create applicator based on paranoia setting + let applicator: Box = match confirmation { + Paranoia::Yolo => Box::new(inner_applicator), + Paranoia::Ask => Box::new(konfigkoll_core::apply::InteractiveApplicator::new( + inner_applicator, + diff_command, + pager_command, + )), + Paranoia::DryRun => Box::new(konfigkoll_core::apply::NoopApplicator::default()), + }; + applicator +} diff --git a/crates/konfigkoll/src/bin/rune.rs b/crates/konfigkoll/src/bin/rune.rs new file mode 100644 index 00000000..c9b1331a --- /dev/null +++ b/crates/konfigkoll/src/bin/rune.rs @@ -0,0 +1,22 @@ +//! This is a helper binary for konfigkoll that provides Rune support functions +//! such as: +//! +//! * Documentation generation +//! * LSP langauge server +//! * Formatting of rune files +//! * Syntax checking +use konfigkoll_script::ScriptEngine; + +#[cfg(target_env = "musl")] +use mimalloc::MiMalloc; + +#[cfg(target_env = "musl")] +#[cfg_attr(target_env = "musl", global_allocator)] +static GLOBAL: MiMalloc = MiMalloc; + +fn main() { + rune::cli::Entry::new() + .about(format_args!("konfigkoll rune cli")) + .context(&mut |_opts| ScriptEngine::create_context()) + .run(); +} diff --git a/crates/konfigkoll/src/cli.rs b/crates/konfigkoll/src/cli.rs new file mode 100644 index 00000000..9ab23fa5 --- /dev/null +++ b/crates/konfigkoll/src/cli.rs @@ -0,0 +1,52 @@ +use camino::Utf8PathBuf; +use clap::{Parser, Subcommand}; + +#[derive(Debug, Parser)] +#[command(version, about, long_about = None)] +#[command(propagate_version = true)] +#[clap(disable_help_subcommand = true)] +pub struct Cli { + /// Path to config directory (if not the current directory) + #[arg(long, short = 'c')] + pub config_path: Option, + /// Trust mtime (don't check checksum if mtime matches (not supported on Debian)) + #[arg(long)] + pub trust_mtime: bool, + /// Decribe how much to ask for confirmation + #[arg(long, short = 'p', default_value = "ask")] + pub confirmation: Paranoia, + /// For debugging: force a dry run applicator + #[arg(long, hide = true, default_value = "false")] + pub debug_force_dry_run: bool, + /// Operation to perform + #[command(subcommand)] + pub command: Commands, +} + +#[derive(Debug, Subcommand)] +pub enum Commands { + /// Create a new template config directory + Init {}, + /// Save config to unsorted.rn script (for you to merge into your config) + Save {}, + /// Check package files and search for unexpected files + Apply {}, + /// Check for syntax errors and other issues + Check {}, + /// Diff a specific path + Diff { + /// Path to diff + path: Utf8PathBuf, + }, +} + +#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Hash, clap::ValueEnum)] +pub enum Paranoia { + /// Don't ask, just do it + Yolo, + /// Ask for groups of changes + #[default] + Ask, + /// Dry run, don't do anything + DryRun, +} diff --git a/crates/konfigkoll/src/fs_scan.rs b/crates/konfigkoll/src/fs_scan.rs new file mode 100644 index 00000000..f70f7ae4 --- /dev/null +++ b/crates/konfigkoll/src/fs_scan.rs @@ -0,0 +1,80 @@ +//! Scan the file system + +use std::sync::Arc; + +use anyhow::Context; +use compact_str::CompactString; +use konfigkoll_types::FsInstruction; +use ouroboros::self_referencing; +use paketkoll_core::config::{ + CheckAllFilesConfiguration, CommonFileCheckConfiguration, ConfigFiles, +}; +use paketkoll_core::file_ops::{ + canonicalize_file_entries, create_path_map, mismatching_and_unexpected_files, +}; +use paketkoll_types::backend::Files; +use paketkoll_types::files::FileEntry; +use paketkoll_types::files::PathMap; +use paketkoll_types::intern::Interner; + +#[self_referencing] +pub(crate) struct ScanResult { + pub files: Vec, + #[borrows(files)] + #[covariant] + pub path_map: PathMap<'this>, +} + +#[tracing::instrument(skip_all)] +pub(crate) fn scan_fs( + interner: &Arc, + backend: &Arc, + ignores: &[CompactString], + trust_mtime: bool, +) -> anyhow::Result<(ScanResult, Vec)> { + tracing::debug!("Scanning filesystem"); + let mut fs_instructions_sys = vec![]; + let mut files = backend.files(interner).with_context(|| { + format!( + "Failed to collect information from backend {}", + backend.name() + ) + })?; + if backend.may_need_canonicalization() { + tracing::debug!("Canonicalizing file entries"); + canonicalize_file_entries(&mut files); + } + // Drop mutability + let files = files; + + tracing::debug!("Building path map"); + let scan_result = ScanResultBuilder { + files, + path_map_builder: |files| create_path_map(files.as_slice()), + } + .build(); + + tracing::debug!("Checking for unexpected files"); + let common_config = CommonFileCheckConfiguration::builder() + .trust_mtime(trust_mtime) + .config_files(ConfigFiles::Include) + .build()?; + let unexpected_config = CheckAllFilesConfiguration::builder() + .canonicalize_paths(backend.may_need_canonicalization()) + .ignored_paths(ignores.to_owned()) + .build()?; + + let issues = mismatching_and_unexpected_files( + scan_result.borrow_files(), + scan_result.borrow_path_map(), + &common_config, + &unexpected_config, + )?; + + // Convert issues to an instruction stream + fs_instructions_sys + .extend(konfigkoll_core::conversion::convert_issues_to_fs_instructions(issues)?); + // Ensure instructions are sorted + fs_instructions_sys.sort(); + Ok((scan_result, fs_instructions_sys)) +} diff --git a/crates/konfigkoll/src/lib.rs b/crates/konfigkoll/src/lib.rs new file mode 100644 index 00000000..1e227bf7 --- /dev/null +++ b/crates/konfigkoll/src/lib.rs @@ -0,0 +1,4 @@ +//! This is only a bin+lib for technical reasons. Do not use this as a library. + +#[doc(hidden)] +pub mod cli; diff --git a/crates/konfigkoll/src/main.rs b/crates/konfigkoll/src/main.rs new file mode 100644 index 00000000..585f38c3 --- /dev/null +++ b/crates/konfigkoll/src/main.rs @@ -0,0 +1,385 @@ +use ahash::AHashSet; +use anyhow::Context; +use apply::create_applicator; +use camino::Utf8Path; +use camino::Utf8PathBuf; +use clap::Parser; +use compact_str::CompactString; +use itertools::Itertools; +use konfigkoll::cli::Cli; +use konfigkoll::cli::Commands; +use konfigkoll::cli::Paranoia; +use konfigkoll_core::apply::apply_files; +use konfigkoll_core::apply::apply_packages; +use konfigkoll_core::diff::show_fs_instr_diff; +use konfigkoll_core::state::DiffGoal; +use konfigkoll_script::Phase; +use paketkoll_cache::FilesCache; +use paketkoll_core::backend::ConcreteBackend; +use paketkoll_core::paketkoll_types::intern::Interner; +use paketkoll_types::backend::Files; +use paketkoll_types::backend::PackageBackendMap; +use paketkoll_types::backend::Packages; +use std::io::BufWriter; +use std::io::Write; +use std::sync::Arc; + +mod apply; +mod fs_scan; +mod pkgs; +mod save; + +#[cfg(target_env = "musl")] +use mimalloc::MiMalloc; + +#[cfg(target_env = "musl")] +#[cfg_attr(target_env = "musl", global_allocator)] +static GLOBAL: MiMalloc = MiMalloc; + +#[tokio::main(flavor = "current_thread")] +async fn main() -> anyhow::Result<()> { + // Set up logging with tracing + let filter = tracing_subscriber::EnvFilter::builder() + .with_default_directive(tracing::level_filters::LevelFilter::INFO.into()) + .from_env()?; + let subscriber = tracing_subscriber::fmt::Subscriber::builder() + .with_env_filter(filter) + .finish(); + tracing::subscriber::set_global_default(subscriber)?; + // Compatibility for log crate + tracing_log::LogTracer::init()?; + + let cli = Cli::parse(); + + let config_path = match cli.config_path { + Some(v) => v, + None => std::env::current_dir()?.try_into()?, + }; + + if let Commands::Init {} = cli.command { + init_directory(&config_path)?; + return Ok(()); + } + + let mut script_engine = konfigkoll_script::ScriptEngine::new_with_files(&config_path)?; + + match cli.command { + Commands::Init {} | Commands::Save {} | Commands::Apply {} | Commands::Diff { .. } => (), + Commands::Check {} => { + println!("Scripts loaded successfully"); + return Ok(()); + } + } + + // Script: Do system discovery and configuration + script_engine.run_phase(Phase::SystemDiscovery).await?; + + let proj_dirs = directories::ProjectDirs::from("", "", "konfigkoll") + .context("Failed to get directories for disk cache")?; + + // Create backends + tracing::info!("Creating backends"); + let interner = Arc::new(Interner::new()); + let file_backend_id = script_engine + .state() + .settings() + .file_backend() + .ok_or_else(|| anyhow::anyhow!("A file backend must be set"))?; + let pkg_backend_ids = script_engine + .state() + .settings() + .enabled_pkg_backends() + .collect_vec(); + let backend_cfg = paketkoll_core::backend::BackendConfiguration::builder() + .build() + .context("Failed to build backend config")?; + let backends_pkg: Arc = Arc::new( + pkg_backend_ids + .iter() + .map(|b| { + let b: ConcreteBackend = (*b) + .try_into() + .context("Backend is not supported by current build")?; + let backend = b + .create_packages(&backend_cfg, &interner) + .with_context(|| format!("Failed to create backend {b}"))?; + let b: Arc = Arc::from(backend); + Ok(b) + }) + .map(|b| b.map(|b| (b.as_backend_enum(), b))) + .collect::>()?, + ); + + let backend_files: Arc = { + let b: ConcreteBackend = file_backend_id + .try_into() + .context("Backend is not supported by current build")?; + let backend = b + .create_files(&backend_cfg, &interner) + .with_context(|| format!("Failed to create backend {b}"))?; + let backend = FilesCache::from_path(backend, proj_dirs.cache_dir()) + .context("Failed to create disk cache")?; + Arc::new(backend) + }; + + // Load installed packages + tracing::info!("Starting package loading background job"); + let package_loader = { + let interner = interner.clone(); + let backends_pkg = backends_pkg.clone(); + tokio::task::spawn_blocking(move || pkgs::load_packages(&interner, &backends_pkg)) + }; + // Script: Get FS ignores + script_engine.run_phase(Phase::Ignores).await?; + + // Do FS scan + tracing::info!("Starting filesystem scan background job"); + let fs_instructions_sys = { + let ignores: Vec = script_engine + .state() + .commands() + .fs_ignores + .iter() + .cloned() + .collect(); + let trust_mtime = cli.trust_mtime; + let interner = interner.clone(); + let backends_files = backend_files.clone(); + tokio::task::spawn_blocking(move || { + fs_scan::scan_fs(&interner, &backends_files, &ignores, trust_mtime) + }) + }; + + // Script: Do early package phase + script_engine.run_phase(Phase::ScriptDependencies).await?; + + tracing::info!("Waiting for package loading results..."); + let (pkgs_sys, package_maps) = package_loader.await??; + tracing::info!("Got package loading results"); + + // Create the set of package managers for use by the script + script_engine.state_mut().setup_package_managers( + &backends_pkg, + file_backend_id, + &backend_files, + &package_maps, + &interner, + ); + + // Apply early packages (if any) + if let Commands::Apply {} = cli.command { + tracing::info!("Applying early packages (if any are missing)"); + let mut applicator = create_applicator( + cli.confirmation, + cli.debug_force_dry_run, + &backends_pkg, + &interner, + &package_maps, + &backend_files, + script_engine.state().settings().diff(), + script_engine.state().settings().pager(), + ); + let pkg_diff = pkgs::package_diff(&pkgs_sys, &script_engine); + let pkgs_changes = pkg_diff.filter_map(|v| match v { + itertools::EitherOrBoth::Both(_, _) => None, + itertools::EitherOrBoth::Left(_) => None, + itertools::EitherOrBoth::Right((id, instr)) => Some((id, instr.clone())), + }); + apply_packages(applicator.as_mut(), pkgs_changes, &package_maps, &interner)?; + } + + // Script: Do main phase + script_engine.run_phase(Phase::Main).await?; + + // Make sure FS actions are sorted + script_engine.state_mut().commands_mut().fs_actions.sort(); + + tracing::info!("Waiting for file system scan results..."); + let (fs_scan_result, fs_instructions_sys) = fs_instructions_sys.await??; + tracing::info!("Got file system scan results"); + + // Compare expected to system + let mut script_fs = konfigkoll_core::state::FsEntries::default(); + let mut sys_fs = konfigkoll_core::state::FsEntries::default(); + let fs_actions = std::mem::take(&mut script_engine.state_mut().commands_mut().fs_actions); + script_fs.apply_instructions(fs_actions.into_iter(), true); + sys_fs.apply_instructions(fs_instructions_sys.into_iter(), false); + + // Packages are so much easier + let pkg_diff = pkgs::package_diff(&pkgs_sys, &script_engine); + + // At the end, decide what we want to do with the results + match cli.command { + Commands::Save {} => { + tracing::info!("Saving changes"); + // Split out additions and removals + let mut fs_additions = + konfigkoll_core::state::diff(&DiffGoal::Save, script_fs, sys_fs)?.collect_vec(); + fs_additions.sort(); + let mut pkg_additions = vec![]; + let mut pkg_removals = vec![]; + pkg_diff.for_each(|v| match v { + itertools::EitherOrBoth::Both(_, _) => (), + itertools::EitherOrBoth::Left((id, instr)) => { + pkg_additions.push((id, instr.clone())); + } + itertools::EitherOrBoth::Right((id, instr)) => { + pkg_removals.push((id, instr.inverted())); + } + }); + + // Open output file (for appending) in config dir + let output_path = config_path.join("unsorted.rn"); + let mut output = BufWriter::new( + std::fs::OpenOptions::new() + .create(true) + .write(true) + .truncate(true) + .open(&output_path) + .with_context(|| format!("Failed to open output file {}", output_path))?, + ); + output.write_all("// This file is generated by konfigkoll\n".as_bytes())?; + output.write_all( + "// You will need to merge the changes you want into your own actual config\n" + .as_bytes(), + )?; + output.write_all("pub fn unsorted_additions(props, cmds) {\n".as_bytes())?; + konfigkoll_core::save::save_packages(&mut output, pkg_additions.into_iter())?; + let files_path = config_path.join("files"); + let sensitive_configs: AHashSet = script_engine + .state() + .settings() + .sensitive_configs() + .collect(); + konfigkoll_core::save::save_fs_changes( + &mut output, + |path, contents| { + if sensitive_configs.contains(path) { + tracing::warn!( + "{} has changes, but it is marked sensitive, won't auto-save", + path + ); + return Ok(()); + } + match cli.confirmation == Paranoia::DryRun { + true => save::noop_file_data_saver(path), + false => save::file_data_saver(&files_path, path, contents), + } + }, + fs_additions.iter(), + )?; + output.write_all("}\n".as_bytes())?; + + output.write_all("\n// These are entries in your config that are not applied to the current system\n".as_bytes())?; + output.write_all( + "// Note that these may not correspond *exactly* to what is in your config\n" + .as_bytes(), + )?; + output.write_all("// (e.g. write and copy will get mixed up).\n".as_bytes())?; + output.write_all("pub fn unsorted_removals(props, cmds) {\n".as_bytes())?; + konfigkoll_core::save::save_packages(&mut output, pkg_removals.into_iter())?; + output.write_all("}\n".as_bytes())?; + } + Commands::Apply {} => { + tracing::info!("Applying changes"); + let mut fs_changes = konfigkoll_core::state::diff( + &DiffGoal::Apply(backend_files.clone(), fs_scan_result.borrow_path_map()), + sys_fs, + script_fs, + )? + .collect_vec(); + fs_changes.sort(); + + let pkgs_changes = pkg_diff.filter_map(|v| match v { + itertools::EitherOrBoth::Both(_, _) => None, + itertools::EitherOrBoth::Left((id, instr)) => Some((id, instr.inverted())), + itertools::EitherOrBoth::Right((id, instr)) => Some((id, instr.clone())), + }); + + let mut applicator = create_applicator( + cli.confirmation, + cli.debug_force_dry_run, + &backends_pkg, + &interner, + &package_maps, + &backend_files, + script_engine.state().settings().diff(), + script_engine.state().settings().pager(), + ); + + // Split into early / late file changes based on settings + let early_configs: AHashSet = + script_engine.state().settings().early_configs().collect(); + let mut early_fs_changes = vec![]; + let mut late_fs_changes = vec![]; + for change in fs_changes { + if early_configs.contains(&change.path) { + early_fs_changes.push(change); + } else { + late_fs_changes.push(change); + } + } + + // Apply early file system + apply_files(applicator.as_mut(), early_fs_changes.iter())?; + + // Apply packages + apply_packages(applicator.as_mut(), pkgs_changes, &package_maps, &interner)?; + + // Apply rest of file system + apply_files(applicator.as_mut(), late_fs_changes.iter())?; + } + Commands::Diff { path } => { + tracing::info!("Computing diff"); + let mut fs_changes = konfigkoll_core::state::diff( + &DiffGoal::Apply(backend_files.clone(), fs_scan_result.borrow_path_map()), + sys_fs, + script_fs, + )? + .collect_vec(); + fs_changes.sort(); + let diff_cmd = script_engine.state().settings().diff(); + let pager_cmd = script_engine.state().settings().pager(); + for change in fs_changes { + if change.path.starts_with(&path) { + show_fs_instr_diff(&change, &diff_cmd, &pager_cmd)?; + } + } + } + Commands::Check {} | Commands::Init {} => unreachable!(), + } + + Ok(()) +} + +fn init_directory(config_path: &Utf8Path) -> anyhow::Result<()> { + std::fs::create_dir_all(config_path).context("Failed to create config directory")?; + std::fs::create_dir_all(config_path.join("files"))?; + + // Create skeleton main script + let main_script = config_path.join("main.rn"); + if !main_script.exists() { + std::fs::write(&main_script, include_bytes!("../data/template/main.rn"))?; + } + // Create skeleton unsorted script + let unsorted_script = config_path.join("unsorted.rn"); + if !unsorted_script.exists() { + std::fs::write( + &unsorted_script, + include_bytes!("../data/template/unsorted.rn"), + )?; + } + // Gitignore + let gitignore = config_path.join(".gitignore"); + if !gitignore.exists() { + std::fs::write(&gitignore, include_bytes!("../data/template/_gitignore"))?; + } + + // Add an empty Rune.toml + let runetoml = config_path.join("Rune.toml"); + if !runetoml.exists() { + std::fs::write(&runetoml, b"")?; + } + + Ok(()) +} diff --git a/crates/konfigkoll/src/pkgs.rs b/crates/konfigkoll/src/pkgs.rs new file mode 100644 index 00000000..68874dc2 --- /dev/null +++ b/crates/konfigkoll/src/pkgs.rs @@ -0,0 +1,72 @@ +//! Package scanning functions + +use std::{collections::BTreeMap, sync::Arc}; + +use anyhow::Context; +use itertools::Itertools; +use konfigkoll_types::PkgInstructions; +use paketkoll_types::{ + backend::{Backend, PackageBackendMap, PackageMap, PackageMapMap}, + intern::Interner, +}; +use rayon::prelude::*; + +#[tracing::instrument(skip_all)] +pub(crate) fn load_packages( + interner: &Arc, + backends_pkg: &PackageBackendMap, +) -> anyhow::Result<(PkgInstructions, PackageMapMap)> { + let mut pkgs_sys = BTreeMap::new(); + let mut package_maps: BTreeMap> = BTreeMap::new(); + let backend_maps: Vec<_> = backends_pkg + .values() + .par_bridge() + .map(|backend| { + let backend_pkgs = backend + .packages(interner) + .with_context(|| { + format!( + "Failed to collect information from backend {}", + backend.name() + ) + }) + .map(|backend_pkgs| { + let pkg_map = Arc::new(paketkoll_types::backend::packages_to_package_map( + backend_pkgs.clone(), + )); + let pkg_instructions = + konfigkoll_core::conversion::convert_packages_to_pkg_instructions( + backend_pkgs.into_iter(), + backend.as_backend_enum(), + interner, + ); + (pkg_map, pkg_instructions) + }); + (backend, backend_pkgs) + }) + .collect(); + for (backend, backend_pkgs) in backend_maps.into_iter() { + let (backend_pkgs_map, pkg_instructions) = backend_pkgs?; + package_maps.insert(backend.as_backend_enum(), backend_pkgs_map); + pkgs_sys.extend(pkg_instructions.into_iter()); + } + + Ok((pkgs_sys, package_maps)) +} + +type PackagePair<'a> = ( + &'a konfigkoll_types::PkgIdent, + &'a konfigkoll_types::PkgInstruction, +); + +/// Get a diff of packages +pub(crate) fn package_diff<'input>( + sorted_pkgs_sys: &'input PkgInstructions, + script_engine: &'input konfigkoll_script::ScriptEngine, +) -> impl Iterator, PackagePair<'input>>> { + let pkg_actions = &script_engine.state().commands().package_actions; + let left = sorted_pkgs_sys.iter(); + let right = pkg_actions.iter().sorted(); + + konfigkoll_core::diff::comm(left, right) +} diff --git a/crates/konfigkoll/src/save.rs b/crates/konfigkoll/src/save.rs new file mode 100644 index 00000000..fdbc56bb --- /dev/null +++ b/crates/konfigkoll/src/save.rs @@ -0,0 +1,36 @@ +//! Code to save config + +use std::io::Write; + +use anyhow::Context; +use camino::Utf8Path; +use konfigkoll_core::utils::safe_path_join; +use konfigkoll_types::FileContents; + +/// Copy files to the config directory, under the "files/". +pub(crate) fn file_data_saver( + files_path: &Utf8Path, + path: &Utf8Path, + contents: &FileContents, +) -> Result<(), anyhow::Error> { + tracing::info!("Saving file data for {}", path); + let full_path = safe_path_join(files_path, path); + std::fs::create_dir_all(full_path.parent().with_context(|| { + format!("Impossible error: joined path should always below config dir: {full_path}") + })?)?; + match contents { + FileContents::Literal { checksum: _, data } => { + let mut file = std::fs::File::create(&full_path)?; + file.write_all(data)?; + } + FileContents::FromFile { checksum: _, path } => { + std::fs::copy(path, &full_path)?; + } + } + Ok(()) +} + +pub(crate) fn noop_file_data_saver(path: &Utf8Path) -> Result<(), anyhow::Error> { + tracing::info!("Would save file data for {}", path); + Ok(()) +} diff --git a/crates/konfigkoll_core/Cargo.toml b/crates/konfigkoll_core/Cargo.toml new file mode 100644 index 00000000..e7a5b092 --- /dev/null +++ b/crates/konfigkoll_core/Cargo.toml @@ -0,0 +1,41 @@ +[package] +description = "Core functionality for Konfigkoll" +edition = "2021" +license = "MPL-2.0" +name = "konfigkoll_core" +repository = "https://github.com/VorpalBlade/paketkoll" +rust-version = "1.79.0" +version = "0.1.0" + +[dependencies] +ahash.workspace = true +anyhow.workspace = true +camino.workspace = true +clru.workspace = true +compact_str.workspace = true +console.workspace = true +duct.workspace = true +either.workspace = true +itertools.workspace = true +konfigkoll_types = { version = "0.1.0", path = "../konfigkoll_types" } +libc.workspace = true +nix = { workspace = true, features = ["user"] } +paketkoll_types = { version = "0.1.0", path = "../paketkoll_types" } +paketkoll_utils = { version = "0.1.0", path = "../paketkoll_utils" } +parking_lot.workspace = true +rayon.workspace = true +regex.workspace = true +smallvec.workspace = true +strum = { workspace = true, features = ["derive"] } +tracing.workspace = true + +[lints] +workspace = true + +[dev-dependencies] +indoc.workspace = true +pretty_assertions.workspace = true + +[[example]] +name = "multi_confirm_demo" +path = "examples/multi_confirm_demo.rs" diff --git a/crates/konfigkoll_core/README.md b/crates/konfigkoll_core/README.md new file mode 100644 index 00000000..03af943b --- /dev/null +++ b/crates/konfigkoll_core/README.md @@ -0,0 +1,5 @@ +# konfigkoll_core + +Core library of konfigkoll. + +**Warning**: This is not a stable API for public consumption. diff --git a/crates/konfigkoll_core/examples/multi_confirm_demo.rs b/crates/konfigkoll_core/examples/multi_confirm_demo.rs new file mode 100644 index 00000000..93b00af5 --- /dev/null +++ b/crates/konfigkoll_core/examples/multi_confirm_demo.rs @@ -0,0 +1,19 @@ +use console::Style; +use konfigkoll_core::confirm::MultiOptionConfirm; + +fn main() -> anyhow::Result<()> { + let mut builder = MultiOptionConfirm::builder(); + builder + .prompt("Are you sure?") + .option('y', "Yes") + .option('n', "No") + .option('d', "show Diff") + .prompt_style(Style::new().green()) + .options_style(Style::new().cyan()) + .default_option_style(Style::new().cyan().underlined()) + .default('N'); + let confirm = builder.build(); + let result = confirm.prompt()?; + dbg!(result); + Ok(()) +} diff --git a/crates/konfigkoll_core/src/apply.rs b/crates/konfigkoll_core/src/apply.rs new file mode 100644 index 00000000..7e50bdc8 --- /dev/null +++ b/crates/konfigkoll_core/src/apply.rs @@ -0,0 +1,518 @@ +//! Apply a stream of instructions to the current system + +use std::{collections::BTreeMap, fs::Permissions, os::unix::fs::PermissionsExt, sync::Arc}; + +use ahash::AHashMap; +use anyhow::Context; +use either::Either; +use itertools::Itertools; +use konfigkoll_types::{FsInstruction, FsOp, FsOpDiscriminants, PkgIdent, PkgInstruction, PkgOp}; +use paketkoll_types::{ + backend::{Backend, Files, OriginalFileQuery, PackageBackendMap, PackageMap, PackageMapMap}, + intern::{Interner, PackageRef}, +}; + +use crate::{ + confirm::MultiOptionConfirm, + diff::show_fs_instr_diff, + utils::{IdKey, NameToNumericResolveCache}, +}; +use console::style; + +/// Applier of system changes +/// +/// Different implementors of this trait handle things like: +/// * Privilege separation +/// * Interactive confirmation +/// * Actual applying to the system +pub trait Applicator { + /// Apply package changes + fn apply_pkgs<'instructions>( + &mut self, + backend: Backend, + install: &[&'instructions str], + mark_explicit: &[&'instructions str], + uninstall: &[&'instructions str], + ) -> anyhow::Result<()>; + + /// Apply file changes + fn apply_files(&mut self, instructions: &[&FsInstruction]) -> anyhow::Result<()>; +} + +impl Applicator for Either +where + L: Applicator, + R: Applicator, +{ + fn apply_pkgs<'instructions>( + &mut self, + backend: Backend, + install: &[&'instructions str], + mark_explicit: &[&'instructions str], + uninstall: &[&'instructions str], + ) -> anyhow::Result<()> { + match self { + Either::Left(inner) => inner.apply_pkgs(backend, install, mark_explicit, uninstall), + Either::Right(inner) => inner.apply_pkgs(backend, install, mark_explicit, uninstall), + } + } + + fn apply_files(&mut self, instructions: &[&FsInstruction]) -> anyhow::Result<()> { + match self { + Either::Left(inner) => inner.apply_files(instructions), + Either::Right(inner) => inner.apply_files(instructions), + } + } +} + +/// Apply with no privilege separation +#[derive(Debug)] +pub struct InProcessApplicator { + package_backends: PackageBackendMap, + file_backend: Arc, + interner: Arc, + package_maps: BTreeMap>, + id_resolver: NameToNumericResolveCache, +} + +impl InProcessApplicator { + pub fn new( + package_backends: PackageBackendMap, + interner: &Arc, + package_maps: &BTreeMap>, + file_backend: &Arc, + ) -> Self { + Self { + package_backends, + file_backend: file_backend.clone(), + interner: Arc::clone(interner), + package_maps: package_maps.clone(), + id_resolver: NameToNumericResolveCache::new(), + } + } +} + +impl Applicator for InProcessApplicator { + fn apply_pkgs<'instructions>( + &mut self, + backend: Backend, + install: &[&'instructions str], + mark_explicit: &[&'instructions str], + uninstall: &[&'instructions str], + ) -> anyhow::Result<()> { + tracing::info!( + "Proceeding with installing {:?} and uninstalling {:?} with backend {:?}", + install, + uninstall, + backend + ); + let backend = self + .package_backends + .get(&backend) + .ok_or_else(|| anyhow::anyhow!("Unknown backend: {:?}", backend))?; + + tracing::info!("Installing packages..."); + backend.transact(install, &[], true)?; + tracing::info!("Marking packages explicit..."); + backend.mark(&[], mark_explicit)?; + tracing::info!("Attempting to mark unwanted packages as dependencies..."); + match backend.mark(uninstall, &[]) { + Ok(()) => { + tracing::info!("Successfully marked unwanted packages as dependencies"); + tracing::info!("Removing unused packages..."); + backend.remove_unused(true)?; + } + Err(paketkoll_types::backend::PackageManagerError::UnsupportedOperation(_)) => { + tracing::info!("Marking unwanted packages as dependencies not supported, using uninstall instead"); + backend.transact(&[], uninstall, true)?; + } + Err(e) => return Err(e.into()), + } + + Ok(()) + } + + fn apply_files(&mut self, instructions: &[&FsInstruction]) -> anyhow::Result<()> { + let pkg_map = self + .package_maps + .get(&self.file_backend.as_backend_enum()) + .ok_or_else(|| { + anyhow::anyhow!( + "No package map for file backend {:?}", + self.file_backend.as_backend_enum() + ) + })?; + for instr in instructions { + tracing::info!("Applying: {}: {}", instr.path, instr.op); + match &instr.op { + FsOp::Remove => { + let existing = std::fs::symlink_metadata(&instr.path); + if let Ok(metadata) = existing { + if metadata.is_dir() { + match std::fs::remove_dir(&instr.path) { + Ok(_) => (), + Err(err) => match err.raw_os_error() { + Some(libc::ENOTEMPTY) => { + Err(err).context("Failed to remove directory: it is not empty (possibly it contains some ignored files). You will have to investigate and resolve this yourself, since we don't want to delete things we shouldn't.")?; + } + Some(_) | None => { + Err(err).context("Failed to remove directory")?; + } + }, + } + } else { + std::fs::remove_file(&instr.path)?; + } + } + } + FsOp::CreateDirectory => { + std::fs::create_dir_all(&instr.path)?; + } + FsOp::CreateFile(contents) => match contents { + konfigkoll_types::FileContents::Literal { checksum: _, data } => { + std::fs::write(&instr.path, data)?; + } + konfigkoll_types::FileContents::FromFile { checksum: _, path } => { + std::fs::copy(path, &instr.path)?; + } + }, + FsOp::CreateSymlink { target } => { + std::os::unix::fs::symlink(target, &instr.path)?; + } + FsOp::CreateFifo => { + // Since we split out mode in general, we don't know what to put here. + // Use empty, and let later instructions set it correctly. + nix::unistd::mkfifo(instr.path.as_std_path(), nix::sys::stat::Mode::empty())?; + } + FsOp::CreateBlockDevice { major, minor } => { + // Like with fifo, we don't know mode yet. + nix::sys::stat::mknod( + instr.path.as_std_path(), + nix::sys::stat::SFlag::S_IFBLK, + nix::sys::stat::Mode::empty(), + nix::sys::stat::makedev(*major, *minor), + )?; + } + FsOp::CreateCharDevice { major, minor } => { + // Like with fifo, we don't know mode yet. + nix::sys::stat::mknod( + instr.path.as_std_path(), + nix::sys::stat::SFlag::S_IFCHR, + nix::sys::stat::Mode::empty(), + nix::sys::stat::makedev(*major, *minor), + )?; + } + FsOp::SetMode { mode } => { + let perms = Permissions::from_mode(mode.as_raw()); + std::fs::set_permissions(&instr.path, perms)?; + } + FsOp::SetOwner { owner } => { + let uid = nix::unistd::Uid::from_raw( + self.id_resolver.lookup(&IdKey::User(owner.clone()))?, + ); + nix::unistd::chown(instr.path.as_std_path(), Some(uid), None)?; + } + FsOp::SetGroup { group } => { + let gid = nix::unistd::Gid::from_raw( + self.id_resolver.lookup(&IdKey::Group(group.clone()))?, + ); + nix::unistd::chown(instr.path.as_std_path(), None, Some(gid))?; + } + FsOp::Restore => { + // Get package: + let owners = self + .file_backend + .owning_packages(&[instr.path.as_std_path()].into(), &self.interner) + .with_context(|| format!("Failed to find owner for {}", instr.path))?; + let package = owners + .get(instr.path.as_std_path()) + .with_context(|| format!("Failed to find owner for {}", instr.path))? + .ok_or_else(|| anyhow::anyhow!("No owner for {}", instr.path))?; + let package = package.to_str(&self.interner); + // Get original contents: + let queries = [OriginalFileQuery { + package: package.into(), + path: instr.path.as_str().into(), + }]; + let original_contents = + self.file_backend + .original_files(&queries, pkg_map, &self.interner)?; + // Apply + for query in queries { + let contents = original_contents.get(&query).ok_or_else(|| { + anyhow::anyhow!("No original contents for {:?}", query) + })?; + std::fs::write(&instr.path, contents)?; + } + } + FsOp::Comment => (), + } + } + Ok(()) + } +} + +/// An applicator that asks for confirmation before applying changes +#[derive(Debug)] +pub struct InteractiveApplicator { + inner: Inner, + pkg_confirmer: MultiOptionConfirm, + fs_confirmer: MultiOptionConfirm, + interactive_confirmer: MultiOptionConfirm, + diff_command: Vec, + pager_command: Vec, +} + +impl InteractiveApplicator { + pub fn new(inner: Inner, diff_command: Vec, pager_command: Vec) -> Self { + let mut prompt_builder = MultiOptionConfirm::builder(); + prompt_builder + .prompt("Do you want to apply these changes?") + .option('y', "Yes") + .option('n', "No") + .option('d', "show Diff"); + let pkg_confirmer = prompt_builder.build(); + prompt_builder.option('i', "Interactive (change by change)"); + let fs_confirmer = prompt_builder.build(); + + let mut prompt_builder = MultiOptionConfirm::builder(); + prompt_builder + .prompt("Apply changes to this file?") + .option('y', "Yes") + .option('a', "Abort") + .option('s', "Skip") + .option('d', "show Diff"); + let interactive_confirmer = prompt_builder.build(); + + Self { + inner, + pkg_confirmer, + fs_confirmer, + interactive_confirmer, + diff_command, + pager_command, + } + } +} + +impl Applicator for InteractiveApplicator { + fn apply_pkgs<'instructions>( + &mut self, + backend: Backend, + install: &[&'instructions str], + mark_explicit: &[&'instructions str], + uninstall: &[&'instructions str], + ) -> anyhow::Result<()> { + tracing::info!( + "Will install {:?}, mark {:?} as explicit and uninstall {:?} with backend {backend}", + install.len(), + mark_explicit.len(), + uninstall.len(), + ); + + loop { + match self.pkg_confirmer.prompt()? { + 'y' => { + tracing::info!("Applying changes"); + return self + .inner + .apply_pkgs(backend, install, mark_explicit, uninstall); + } + 'n' => { + tracing::info!("Aborting"); + return Err(anyhow::anyhow!("User aborted")); + } + 'd' => { + println!("With package manager {backend}:"); + for pkg in install { + println!(" {} {}", style("+").green(), pkg); + } + for pkg in mark_explicit { + println!(" {} {} (mark explicit)", style("E").green(), pkg); + } + for pkg in uninstall { + println!(" {} {}", style("-").red(), pkg); + } + } + _ => return Err(anyhow::anyhow!("Unexpected branch (internal error)")), + } + } + } + + fn apply_files(&mut self, instructions: &[&FsInstruction]) -> anyhow::Result<()> { + tracing::info!("Will apply {} file instructions", instructions.len()); + loop { + match self.fs_confirmer.prompt()? { + 'y' => { + tracing::info!("Applying changes"); + return self.inner.apply_files(instructions); + } + 'n' => { + tracing::info!("Aborting"); + return Err(anyhow::anyhow!("User aborted")); + } + 'd' => { + println!("With file system:"); + for instr in instructions { + println!(" {}: {}", style(instr.path.as_str()).blue(), instr.op); + } + } + 'i' => { + for instr in instructions { + self.interactive_apply_single_file(instr)?; + } + return Ok(()); + } + _ => return Err(anyhow::anyhow!("Unexpected branch (internal error)")), + } + } + } +} + +impl InteractiveApplicator { + fn interactive_apply_single_file( + &mut self, + instr: &&FsInstruction, + ) -> Result<(), anyhow::Error> { + println!( + "Under consideration: {} with change {}", + style(instr.path.as_str()).blue(), + instr.op + ); + loop { + match self.interactive_confirmer.prompt()? { + 'y' => { + tracing::info!("Applying change to {}", instr.path); + return self.inner.apply_files(&[instr]); + } + 'a' => { + tracing::info!("Aborting"); + return Err(anyhow::anyhow!("User aborted")); + } + 's' => { + tracing::info!("Skipping {}", instr.path); + return Ok(()); + } + 'd' => { + show_fs_instr_diff( + instr, + self.diff_command.as_slice(), + self.pager_command.as_slice(), + )?; + } + _ => return Err(anyhow::anyhow!("Unexpected branch (internal error)")), + }; + } + } +} + +/// Just print, don't actually apply. +#[derive(Debug, Default)] +pub struct NoopApplicator {} + +impl Applicator for NoopApplicator { + fn apply_pkgs<'instructions>( + &mut self, + backend: Backend, + install: &[&'instructions str], + mark_explicit: &[&'instructions str], + uninstall: &[&'instructions str], + ) -> anyhow::Result<()> { + tracing::info!( + "Would install {:?}, mark {:?} explicit and uninstall {:?} with backend {:?}", + install.len(), + mark_explicit.len(), + uninstall.len(), + backend + ); + + for pkg in install { + tracing::info!(" + {}", pkg); + } + for pkg in mark_explicit { + tracing::info!(" {} (mark explicit)", pkg); + } + for pkg in uninstall { + tracing::info!(" - {}", pkg); + } + Ok(()) + } + + fn apply_files(&mut self, instructions: &[&FsInstruction]) -> anyhow::Result<()> { + tracing::info!("Would apply {} file instructions", instructions.len()); + for instr in instructions { + tracing::info!(" {}: {}", instr.path, instr.op); + } + Ok(()) + } +} + +pub fn apply_files<'instructions>( + applicator: &mut dyn Applicator, + instructions: impl Iterator, +) -> anyhow::Result<()> { + // Sort and group by type of operation, to make changes easier to review + let instructions = instructions + .sorted_by(|a, b| a.op.cmp(&b.op).then_with(|| a.path.cmp(&b.path))) + .collect_vec(); + let chunked_instructions = instructions + .iter() + .chunk_by(|e| FsOpDiscriminants::from(&e.op)); + // Process each chunk separately + for (_discr, chunk) in chunked_instructions.into_iter() { + let chunk = chunk.cloned().collect_vec(); + // Removing things has to be sorted reverse, so we remove contents before the directory they are containers of + let chunk = match chunk[0].op { + FsOp::Remove => chunk.into_iter().rev().collect_vec(), + _ => chunk, + }; + applicator.apply_files(chunk.as_slice())?; + } + Ok(()) +} + +#[derive(Default)] +struct PackageOperations<'a> { + install: Vec<&'a str>, + mark_as_manual: Vec<&'a str>, + uninstall: Vec<&'a str>, +} + +/// Apply package changes +pub fn apply_packages<'instructions>( + applicator: &mut dyn Applicator, + instructions: impl Iterator, + package_maps: &PackageMapMap, + interner: &Interner, +) -> anyhow::Result<()> { + // Sort into backends + let mut sorted = AHashMap::new(); + for (pkg, instr) in instructions { + let backend = pkg.package_manager; + let entry = sorted + .entry(backend) + .or_insert_with(PackageOperations::default); + let sub_map = package_maps + .get(&backend) + .ok_or_else(|| anyhow::anyhow!("No package map for backend {:?}", backend))?; + // Deal with the case where a package is installed as a dependency and we want it explicit + let pkg_ref = PackageRef::get_or_intern(interner, pkg.identifier.as_str()); + let has_pkg = sub_map.get(&pkg_ref).is_some(); + match (instr.op, has_pkg) { + (PkgOp::Install, true) => entry.mark_as_manual.push(pkg.identifier.as_str()), + (PkgOp::Install, false) => entry.install.push(pkg.identifier.as_str()), + (PkgOp::Uninstall, _) => entry.uninstall.push(pkg.identifier.as_str()), + } + } + + // Apply with applicator + for (backend, operations) in sorted { + applicator.apply_pkgs( + backend, + &operations.install, + &operations.mark_as_manual, + &operations.uninstall, + )?; + } + Ok(()) +} diff --git a/crates/konfigkoll_core/src/confirm.rs b/crates/konfigkoll_core/src/confirm.rs new file mode 100644 index 00000000..f2a0bd04 --- /dev/null +++ b/crates/konfigkoll_core/src/confirm.rs @@ -0,0 +1,196 @@ +//! Allows asking for confirmation in the CLI + +use std::io::Write; + +use ahash::AHashSet; +use compact_str::{CompactString, ToCompactString}; +use console::{Key, Style, Term}; +use itertools::Itertools; + +/// A simple multiple choice prompt. Will look something like: +/// +/// ```text +/// Are you sure? [Yes/No/show Diff] +/// ``` +/// +/// Letters that trigger: +/// * Must be unique +/// * Must be available as a unique code point in both upper and lower case. +/// * The convention is to put the trigger letter in uppercase in the string for the option. +#[derive(Debug, Clone)] +pub struct MultiOptionConfirm { + prompt: CompactString, + default: Option, + options: AHashSet, +} + +impl MultiOptionConfirm { + /// Create a builder for this type + pub fn builder() -> MultiOptionConfirmBuilder { + MultiOptionConfirmBuilder::new() + } + + /// Run the prompt and return the user choice + pub fn prompt(&self) -> anyhow::Result { + let mut term = Term::stdout(); + loop { + term.write_all(self.prompt.as_bytes())?; + let key = term.read_key()?; + match key { + Key::Char(c) => term.write_line(format!("{c}").as_str())?, + _ => term.write_line("")?, + } + + match key { + console::Key::Enter => { + if let Some(default) = self.default { + return Ok(default); + } else { + term.write_line("Please select an option (this prompt has no default)")?; + } + } + console::Key::Char(c) => { + let lower_case: AHashSet<_> = c.to_lowercase().collect(); + let found = self.options.intersection(&lower_case).count() > 0; + if found { + return Ok(c); + } else { + term.write_line("Invalid option, try again")?; + } + } + console::Key::Escape => { + term.write_line("Aborted")?; + anyhow::bail!("User aborted with Escape"); + } + console::Key::CtrlC => { + term.write_line("Aborted")?; + anyhow::bail!("User aborted with Ctrl-C"); + } + _ => { + term.write_line("Unkown input, try again")?; + } + } + } + } +} + +/// Builder for [`MultiOptionConfirm`]. +/// +/// Use [`MultiOptionConfirm::builder()`] to create a new instance. +/// +/// The default style uses colours and highlights the default option with bold. +#[derive(Debug, Clone)] +pub struct MultiOptionConfirmBuilder { + prompt: Option, + default: Option, + prompt_style: Style, + options_style: Style, + default_option_style: Style, + options: Vec<(char, CompactString)>, +} + +impl MultiOptionConfirmBuilder { + fn new() -> Self { + Self { + prompt: None, + default: None, + prompt_style: Style::new().green(), + options_style: Style::new().cyan(), + default_option_style: Style::new().cyan().bold(), + options: Vec::new(), + } + } + + /// Set prompt to use. Required. + pub fn prompt(&mut self, prompt: &str) -> &mut Self { + self.prompt = Some(prompt.to_compact_string()); + self + } + + /// Set default choice. Optional. + pub fn default(&mut self, default: char) -> &mut Self { + self.default = Some( + default + .to_lowercase() + .next() + .expect("Letter is not available as lower case"), + ); + self + } + + /// Add an option. At least two are required. + pub fn option(&mut self, key: char, value: &str) -> &mut Self { + self.options.push(( + key.to_lowercase() + .next() + .expect("Letter is not available as lower case"), + value.to_compact_string(), + )); + self + } + + /// Set style for question part of the prompt. + pub fn prompt_style(&mut self, style: Style) -> &mut Self { + self.prompt_style = style; + self + } + + /// Set style for the options. + pub fn options_style(&mut self, style: Style) -> &mut Self { + self.options_style = style; + self + } + + /// Set style for the default option. + pub fn default_option_style(&mut self, style: Style) -> &mut Self { + self.default_option_style = style; + self + } + + fn render_prompt(&self) -> CompactString { + let mut prompt = self + .prompt_style + .apply_to(&self.prompt.as_ref().expect("A prompt must be set")) + .to_compact_string(); + + prompt.push_str( + self.options_style + .apply_to(" [") + .to_compact_string() + .as_str(), + ); + let formatted = self.options.iter().map(|(key, description)| { + if Some(*key) == self.default { + self.default_option_style + .apply_to(description) + .to_compact_string() + } else { + self.options_style.apply_to(description).to_compact_string() + } + }); + let options = Itertools::intersperse( + formatted, + self.options_style.apply_to("/").to_compact_string(), + ) + .collect::(); + prompt.push_str(options.as_str()); + prompt.push_str( + self.options_style + .apply_to("] ") + .to_compact_string() + .as_str(), + ); + prompt + } + + pub fn build(&self) -> MultiOptionConfirm { + if self.options.len() < 2 { + panic!("At least two options are required"); + } + MultiOptionConfirm { + prompt: self.render_prompt(), + default: self.default, + options: self.options.iter().map(|(key, _)| *key).collect(), + } + } +} diff --git a/crates/konfigkoll_core/src/conversion.rs b/crates/konfigkoll_core/src/conversion.rs new file mode 100644 index 00000000..158de668 --- /dev/null +++ b/crates/konfigkoll_core/src/conversion.rs @@ -0,0 +1,426 @@ +//! Conversion from paketkoll issues into konfigkoll instruction stream + +use std::{ + fs::File, + io::{BufReader, Read, Seek}, + os::unix::fs::{FileTypeExt, MetadataExt}, + sync::atomic::AtomicU32, +}; + +use anyhow::Context; +use camino::Utf8Path; +use compact_str::format_compact; +use konfigkoll_types::{ + FileContents, FsInstruction, FsOp, PkgIdent, PkgInstruction, PkgInstructions, PkgOp, +}; +use paketkoll_types::{ + backend::Backend, + files::{Checksum, Gid, Mode, Uid}, + intern::{Interner, PackageRef}, + issue::Issue, + package::{InstallReason, PackageInterned}, +}; +use paketkoll_utils::{checksum::sha256_readable, MODE_MASK}; +use parking_lot::Mutex; +use rayon::prelude::*; + +use crate::utils::{IdKey, NumericToNameResolveCache}; + +pub fn convert_issues_to_fs_instructions( + issues: Vec<(Option, Issue)>, +) -> anyhow::Result> { + tracing::debug!("Starting conversion of {} issues", issues.len()); + let error_count = AtomicU32::new(0); + let id_resolver = Mutex::new(NumericToNameResolveCache::new()); + + let converted: Vec = issues + .into_par_iter() + .map(|issue| { + let mut results = vec![]; + let (_pkg, issue) = issue; + match convert_issue(&issue, &mut results, &id_resolver) { + Ok(()) => (), + Err(err) => { + tracing::error!( + "Error converting issue: {err:?} for {}", + issue.path().display() + ); + error_count.fetch_add(1, std::sync::atomic::Ordering::Relaxed); + } + } + results + }) + .flatten() + .collect(); + + tracing::debug!("Conversion done, length: {}", converted.len()); + let error_count = error_count.load(std::sync::atomic::Ordering::Relaxed); + if error_count > 0 { + anyhow::bail!("{error_count} errors were encountered while converting, see log"); + } + + Ok(converted) +} + +fn convert_issue( + issue: &Issue, + results: &mut Vec, + id_resolver: &Mutex, +) -> Result<(), anyhow::Error> { + let path: &Utf8Path = issue.path().try_into()?; + for kind in issue.kinds() { + match kind { + paketkoll_types::issue::IssueKind::Missing => results.push(FsInstruction { + path: path.into(), + op: FsOp::Remove, + comment: None, + }), + paketkoll_types::issue::IssueKind::Exists + | paketkoll_types::issue::IssueKind::Unexpected => { + results.extend(from_fs(path, id_resolver)?); + } + paketkoll_types::issue::IssueKind::PermissionDenied => { + anyhow::bail!("Permission denied on {:?}", issue.path()); + } + paketkoll_types::issue::IssueKind::TypeIncorrect { + actual: _, + expected: _, + } => { + results.push(FsInstruction { + path: path.into(), + op: FsOp::Remove, + comment: Some(format_compact!("Removed due to type confict")), + }); + results.extend(from_fs(path, id_resolver)?); + } + paketkoll_types::issue::IssueKind::SizeIncorrect { .. } => { + results.push(FsInstruction { + path: path.into(), + op: FsOp::CreateFile( + fs_load_contents(path, None) + .with_context(|| format!("Failed to read {path:?}"))?, + ), + comment: None, + }); + } + paketkoll_types::issue::IssueKind::ChecksumIncorrect { + actual, + expected: _, + } => { + results.push(FsInstruction { + path: path.into(), + op: FsOp::CreateFile( + fs_load_contents(path, Some(actual)) + .with_context(|| format!("Failed to read {path:?}"))?, + ), + comment: None, + }); + } + paketkoll_types::issue::IssueKind::SymlinkTarget { + actual, + expected: _, + } => { + let actual: &Utf8Path = actual.as_path().try_into()?; + results.push(FsInstruction { + path: path.into(), + op: FsOp::CreateSymlink { + target: actual.into(), + }, + comment: None, + }); + } + paketkoll_types::issue::IssueKind::WrongOwner { + actual, + expected: _, + } => results.push(FsInstruction { + path: path.into(), + op: FsOp::SetOwner { + owner: id_resolver.lock().lookup(&IdKey::User(*actual))?, + }, + comment: None, + }), + paketkoll_types::issue::IssueKind::WrongGroup { + actual, + expected: _, + } => results.push(FsInstruction { + path: path.into(), + op: FsOp::SetGroup { + group: id_resolver.lock().lookup(&IdKey::Group(*actual))?, + }, + comment: None, + }), + paketkoll_types::issue::IssueKind::WrongMode { + actual, + expected: _, + } => results.push(FsInstruction { + path: path.into(), + op: FsOp::SetMode { mode: *actual }, + comment: None, + }), + paketkoll_types::issue::IssueKind::WrongDeviceNodeId { + actual: (dev_type, major, minor), + expected: _, + } => results.push(FsInstruction { + path: path.into(), + op: match dev_type { + paketkoll_types::files::DeviceType::Block => FsOp::CreateBlockDevice { + major: *major, + minor: *minor, + }, + paketkoll_types::files::DeviceType::Char => FsOp::CreateCharDevice { + major: *major, + minor: *minor, + }, + }, + comment: None, + }), + paketkoll_types::issue::IssueKind::MetadataError(_) => todo!(), + paketkoll_types::issue::IssueKind::FsCheckError(_) => todo!(), + _ => todo!(), + }; + } + Ok(()) +} + +/// Create all required instructions for a file on the file system +fn from_fs( + path: &Utf8Path, + id_resolver: &Mutex, +) -> anyhow::Result> { + let metadata = path + .symlink_metadata() + .with_context(|| anyhow::anyhow!("Failed to get metadata"))?; + + let mut results = vec![]; + + if metadata.is_file() { + results.push(FsInstruction { + path: path.into(), + op: FsOp::CreateFile( + fs_load_contents(path, None).with_context(|| format!("Failed to load {path}"))?, + ), + comment: None, + }); + } else if metadata.is_dir() { + results.push(FsInstruction { + path: path.into(), + op: FsOp::CreateDirectory, + comment: None, + }); + } else if metadata.file_type().is_symlink() { + results.push(FsInstruction { + path: path.into(), + op: FsOp::CreateSymlink { + target: std::fs::read_link(path) + .with_context(|| anyhow::anyhow!("Failed to read symlink target"))? + .try_into()?, + }, + comment: None, + }); + } else if metadata.file_type().is_fifo() { + results.push(FsInstruction { + path: path.into(), + op: FsOp::CreateFifo, + comment: None, + }); + } else if metadata.file_type().is_block_device() { + let rdev = metadata.rdev(); + results.push(FsInstruction { + path: path.into(), + op: FsOp::CreateBlockDevice { + // SAFETY: rdev is a valid device number + major: unsafe { libc::major(rdev) } as u64, + // SAFETY: rdev is a valid device number + minor: unsafe { libc::minor(rdev) } as u64, + }, + comment: None, + }); + } else if metadata.file_type().is_char_device() { + let rdev = metadata.rdev(); + results.push(FsInstruction { + path: path.into(), + op: FsOp::CreateCharDevice { + // SAFETY: rdev is a valid device number + major: unsafe { libc::major(rdev) } as u64, + // SAFETY: rdev is a valid device number + minor: unsafe { libc::minor(rdev) } as u64, + }, + comment: None, + }); + } else if metadata.file_type().is_socket() { + // Socket files can only be created by a running program and gets + // removed on program end. We can't do anything with them. + tracing::warn!("Ignoring socket file: {:?}", path); + return Ok(results.into_iter()); + } else { + anyhow::bail!("Unsupported file type: {:?}", path); + } + + // Set metadata + if !metadata.is_symlink() { + results.push(FsInstruction { + path: path.into(), + op: FsOp::SetMode { + mode: Mode::new(metadata.mode() & MODE_MASK), + }, + comment: None, + }); + } + results.push(FsInstruction { + path: path.into(), + op: FsOp::SetOwner { + owner: id_resolver + .lock() + .lookup(&IdKey::User(Uid::new(metadata.uid())))?, + }, + comment: None, + }); + results.push(FsInstruction { + path: path.into(), + op: FsOp::SetGroup { + group: id_resolver + .lock() + .lookup(&IdKey::Group(Gid::new(metadata.gid())))?, + }, + comment: None, + }); + + Ok(results.into_iter()) +} + +/// Load real contents from file system +fn fs_load_contents(path: &Utf8Path, checksum: Option<&Checksum>) -> anyhow::Result { + let mut reader = BufReader::new(File::open(path)?); + // Always use sha256, recompute if we were given an MD5. + // This is needed to normalise the checksums for diffing later on. + let checksum = match checksum { + Some(c @ Checksum::Sha256(_)) => c.clone(), + Some(_) | None => sha256_readable(&mut reader)?, + }; + let size = path.metadata()?.size(); + // I don't like this, but I don't see much of a better option to avoid running out of memory + if size > 1024 * 1024 { + Ok(FileContents::FromFile { + checksum, + path: path.into(), + }) + } else { + reader.rewind()?; + let mut buf = Vec::with_capacity(size as usize); + reader.read_to_end(&mut buf)?; + Ok(FileContents::Literal { + checksum, + data: buf.into_boxed_slice(), + }) + } +} + +pub fn convert_packages_to_pkg_instructions( + packages: impl Iterator, + package_manager: Backend, + interner: &Interner, +) -> PkgInstructions { + let mut results = PkgInstructions::default(); + + for package in packages { + // We only consider explicitly installed packages + if package.reason == Some(InstallReason::Dependency) { + continue; + } + let identifier = if package.ids.is_empty() { + package.name.to_str(interner).into() + } else { + package.ids[0].to_str(interner).into() + }; + results.insert( + PkgIdent { + package_manager, + identifier, + }, + PkgInstruction { + op: PkgOp::Install, + comment: package.desc.clone(), + }, + ); + } + + results +} + +#[cfg(test)] +mod tests { + use itertools::Itertools; + use paketkoll_types::package::PackageInstallStatus; + + use super::*; + + #[test] + fn test_convert_packages_to_pkg_instructions() { + let interner = Interner::new(); + let packages = vec![ + PackageInterned { + name: PackageRef::get_or_intern(&interner, "foo"), + version: "1.0".into(), + desc: Some("A package".into()), + depends: vec![], + provides: vec![], + reason: Some(InstallReason::Explicit), + status: PackageInstallStatus::Installed, + ids: smallvec::smallvec![], + architecture: None, + }, + PackageInterned { + name: PackageRef::get_or_intern(&interner, "bar"), + version: "1.0".into(), + desc: Some("Another package".into()), + depends: vec![], + provides: vec![], + reason: Some(InstallReason::Dependency), + status: PackageInstallStatus::Installed, + ids: smallvec::smallvec![], + architecture: None, + }, + PackageInterned { + name: PackageRef::get_or_intern(&interner, "quux"), + architecture: None, + version: "2.0".into(), + desc: Some("Yet another package".into()), + depends: vec![], + provides: vec![], + reason: Some(InstallReason::Explicit), + status: PackageInstallStatus::Installed, + ids: smallvec::smallvec![PackageRef::get_or_intern(&interner, "quux/x86-64")], + }, + ]; + + let instructions = + convert_packages_to_pkg_instructions(packages.into_iter(), Backend::Apt, &interner); + + assert_eq!(instructions.len(), 2); + assert_eq!( + instructions.iter().sorted().collect::>(), + vec![ + ( + &PkgIdent { + package_manager: Backend::Apt, + identifier: "foo".into() + }, + &PkgInstruction { + op: PkgOp::Install, + comment: Some("A package".into()) + } + ), + ( + &PkgIdent { + package_manager: Backend::Apt, + identifier: "quux/x86-64".into() + }, + &PkgInstruction { + op: PkgOp::Install, + comment: Some("Yet another package".into()) + } + ) + ] + ); + } +} diff --git a/crates/konfigkoll_core/src/diff.rs b/crates/konfigkoll_core/src/diff.rs new file mode 100644 index 00000000..2e36d9de --- /dev/null +++ b/crates/konfigkoll_core/src/diff.rs @@ -0,0 +1,209 @@ +//! Diff two sets of instructions +//! +//! This module implements a generic algorithm similar to comm(1) + +use std::{ + iter::FusedIterator, + os::unix::fs::{MetadataExt, PermissionsExt}, +}; + +use camino::{Utf8Path, Utf8PathBuf}; +use console::style; +use itertools::{EitherOrBoth, Itertools}; +use konfigkoll_types::{FsInstruction, FsOp}; +use paketkoll_utils::MODE_MASK; + +/// Compare two sorted slices of items +pub fn comm(left: L, right: R) -> impl FusedIterator> +where + L: Iterator, + R: Iterator, + L::Item: Ord, + L::Item: PartialEq, +{ + left.merge_join_by(right, Ord::cmp) +} + +pub fn show_fs_instr_diff( + instr: &FsInstruction, + diff_command: &[String], + pager_command: &[String], +) -> Result<(), anyhow::Error> { + match &instr.op { + FsOp::CreateFile(contents) => { + show_file_diff(&instr.path, contents, diff_command, pager_command)?; + } + FsOp::Remove => { + println!( + "{}: Would apply action: {}", + instr.path, + style(&instr.op).red() + ); + } + FsOp::CreateDirectory + | FsOp::CreateFifo + | FsOp::CreateBlockDevice { .. } + | FsOp::CreateCharDevice { .. } => { + println!( + "{}: Would apply action: {}", + instr.path, + style(&instr.op).green() + ); + } + FsOp::CreateSymlink { target } => { + // Get old target + let old_target = match std::fs::read_link(&instr.path) { + Ok(target) => Utf8PathBuf::from_path_buf(target) + .map_err(|p| anyhow::anyhow!("Failed to convert path to UTF-8: {:?}", p))? + .to_string(), + Err(error) => match error.kind() { + std::io::ErrorKind::NotFound => "".to_string(), + _ => return Err(error.into()), + }, + }; + // Show diff + println!( + "{}: Would change symlink target: {} -> {}", + instr.path, + style(old_target).red(), + style(target).green() + ); + } + FsOp::SetMode { mode } => { + // Get old + let old_mode = std::fs::symlink_metadata(&instr.path) + .map(|m| m.permissions().mode() & MODE_MASK) + .unwrap_or(0); + // Show diff + println!( + "{}: Would change mode: {} -> {}", + instr.path, + style(format!("{:o}", old_mode)).red(), + style(format!("{:o}", mode.as_raw())).green() + ); + } + FsOp::SetOwner { owner } => { + // Get old UID + let old_uid = std::fs::symlink_metadata(&instr.path) + .map(|m| m.uid()) + .unwrap_or(0); + // Resolve to old user + let old_user = nix::unistd::User::from_uid(nix::unistd::Uid::from_raw(old_uid))? + .map(|u| u.name) + .unwrap_or_else(|| "".to_string()); + // Resolve new owner to new UID + let new_uid = nix::unistd::User::from_name(owner.as_str())? + .map(|u| u.uid.as_raw()) + .map(|uid| format!("{}", uid)) + .unwrap_or_else(|| "".to_string()); + // Show diff + println!( + "{}: Would change owner: {} ({}) -> {} ({})", + instr.path, + style(old_user).red(), + style(old_uid).red(), + style(owner).green(), + style(new_uid).green() + ); + } + FsOp::SetGroup { group } => { + // Get old GID + let old_gid = std::fs::symlink_metadata(&instr.path) + .map(|m| m.gid()) + .unwrap_or(0); + // Resolve to old group + let old_group = nix::unistd::Group::from_gid(nix::unistd::Gid::from_raw(old_gid))? + .map(|g| g.name) + .unwrap_or_else(|| "".to_string()); + // Resolve new group to new GID + let new_gid = nix::unistd::Group::from_name(group.as_str())? + .map(|g| g.gid.as_raw()) + .map(|gid| format!("{}", gid)) + .unwrap_or_else(|| "".to_string()); + // Show diff + println!( + "{}: Would change group: {} ({}) -> {} ({})", + instr.path, + style(old_group).red(), + style(old_gid).red(), + style(group).green(), + style(new_gid).green() + ); + } + FsOp::Restore { .. } => { + println!( + "{}: Would restore to original package manager state", + style(&instr.path).color256(202) + ); + } + FsOp::Comment => (), + }; + Ok(()) +} + +fn show_file_diff( + sys_path: &Utf8Path, + contents: &konfigkoll_types::FileContents, + diff_command: &[String], + pager_command: &[String], +) -> anyhow::Result<()> { + let diff = match contents { + konfigkoll_types::FileContents::Literal { checksum: _, data } => duct::cmd( + &diff_command[0], + diff_command[1..] + .iter() + .chain(&[sys_path.to_string(), "/dev/stdin".into()]), + ) + .stdin_bytes(data.clone()), + konfigkoll_types::FileContents::FromFile { checksum: _, path } => duct::cmd( + &diff_command[0], + diff_command[1..] + .iter() + .chain(&[sys_path.to_string(), path.to_string()]), + ), + } + .unchecked(); + let pipeline = diff.pipe(duct::cmd(&pager_command[0], pager_command[1..].iter())); + match pipeline.run() { + Ok(output) => { + if !output.status.success() { + tracing::warn!( + "Diff or pager exited with non-zero status: {}", + output.status + ); + } + Ok(()) + } + Err(err) => { + tracing::error!( + "Diff or pager exited with: {}, kind: {}, OS code {:?}", + err, + err.kind(), + err.raw_os_error() + ); + Ok(()) + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_comm() { + let left = [1, 2, 3, 4, 5, 8]; + let right = [3, 4, 5, 6, 7]; + + let mut comm_iter = comm(left.into_iter(), right.into_iter()); + assert_eq!(comm_iter.next(), Some(EitherOrBoth::Left(1))); + assert_eq!(comm_iter.next(), Some(EitherOrBoth::Left(2))); + assert_eq!(comm_iter.next(), Some(EitherOrBoth::Both(3, 3))); + assert_eq!(comm_iter.next(), Some(EitherOrBoth::Both(4, 4))); + assert_eq!(comm_iter.next(), Some(EitherOrBoth::Both(5, 5))); + assert_eq!(comm_iter.next(), Some(EitherOrBoth::Right(6))); + assert_eq!(comm_iter.next(), Some(EitherOrBoth::Right(7))); + assert_eq!(comm_iter.next(), Some(EitherOrBoth::Left(8))); + assert_eq!(comm_iter.next(), None); + } +} diff --git a/crates/konfigkoll_core/src/lib.rs b/crates/konfigkoll_core/src/lib.rs new file mode 100644 index 00000000..45eeee10 --- /dev/null +++ b/crates/konfigkoll_core/src/lib.rs @@ -0,0 +1,11 @@ +//! Core library of konfigkoll +//! +//! **Warning**: This is not a stable API for public consumption. +pub mod apply; +pub mod confirm; +pub mod conversion; +pub mod diff; +pub mod line_edit; +pub mod save; +pub mod state; +pub mod utils; diff --git a/crates/konfigkoll_core/src/line_edit.rs b/crates/konfigkoll_core/src/line_edit.rs new file mode 100644 index 00000000..790d1f66 --- /dev/null +++ b/crates/konfigkoll_core/src/line_edit.rs @@ -0,0 +1,677 @@ +//! A simple streaming line editor (inspired by sed, but simplified) + +use std::{borrow::Cow, cell::RefCell, fmt::Debug, rc::Rc, str::Lines}; + +use compact_str::CompactString; +use regex::Regex; + +/// A program consists of a bunch of commands and can be applied to a string line by line. +/// +/// Like sed the basic algorithm is to repeatedly (until the input is consumed): +/// 1. Read a line into a "pattern space" buffer +/// 2. For each instruction in the program: +/// 1. Check if selector matches the current line number and/or line contents +/// 2. Apply action on the pattern space +/// 3. Append the pattern space to the output buffer +/// 4. Clear the pattern space +/// +/// This means that the instructions will operate on the pattern space +/// *as changed by any previous instructions* in the program. +#[derive(Debug, Clone)] +pub struct EditProgram { + instructions: Vec, + print_default: bool, +} + +impl Default for EditProgram { + fn default() -> Self { + Self { + instructions: Default::default(), + print_default: true, + } + } +} + +impl EditProgram { + /// Create a new empty program. + pub fn new() -> Self { + Self::default() + } + + /// Add a new instruction to the program. + pub fn add(&mut self, selector: Selector, selector_invert: bool, action: Action) -> &mut Self { + self.instructions.push(Instruction { + selector, + selector_invert, + action, + }); + self + } + + /// Disable the default implicit action of putting the current pattern space into the output. + pub fn disable_default_printing(&mut self) -> &mut Self { + self.print_default = false; + self + } + + /// Helper to implement `NextLine` command + fn advance_line<'lines>( + &self, + pattern_space: &mut String, + output: &mut String, + line: &mut &'lines str, + lines: &mut Lines<'lines>, + line_number: &mut usize, + ) -> bool { + if self.print_default { + output.push_str(pattern_space); + output.push('\n'); + } + pattern_space.clear(); + if let Some(line_) = lines.next() { + *line = line_; + *line_number += 1; + pattern_space.push_str(line); + true + } else { + false + } + } + + /// Apply this program to the given input string. + pub fn apply(&self, input: &str) -> String { + let mut output = String::new(); + let mut line_number = 0; + let mut pattern_space = String::new(); + let mut lines = input.lines(); + 'input: while let Some(line) = lines.next() { + line_number += 1; + pattern_space.push_str(line); + + let prog_action = self.execute_program( + &mut pattern_space, + &mut line_number, + line, + &mut lines, + &mut output, + ); + match prog_action { + ProgramAction::Done => (), + ProgramAction::Stop => break 'input, + ProgramAction::StopAndPrint => { + print_rest_of_input(&mut output, &mut pattern_space, &mut lines); + break 'input; + } + ProgramAction::ShortCircuit => continue 'input, + } + if self.print_default { + output.push_str(&pattern_space); + output.push('\n'); + } + pattern_space.clear(); + } + // Run end of file match: + pattern_space.clear(); + for instr in &self.instructions { + if let Selector::Eof = instr.selector { + match instr.action.apply(&mut pattern_space, &mut output) { + ActionResult::Continue => (), + ActionResult::ShortCircuit => break, + ActionResult::Stop => break, + ActionResult::StopAndPrint => { + print_rest_of_input(&mut output, &mut pattern_space, &mut lines); + break; + } + ActionResult::NextLine => { + tracing::error!("NextLine not allowed in EOF selector"); + } + ActionResult::Subprogram(_) => todo!(), + } + } + } + if !pattern_space.is_empty() { + let pattern_space = if let Some(stripped) = pattern_space.strip_prefix('\n') { + stripped + } else { + &pattern_space + }; + output.push_str(pattern_space); + if !pattern_space.ends_with('\n') { + output.push('\n'); + } + } + output + } + + fn execute_program<'lines>( + &self, + pattern_space: &mut String, + line_number: &mut usize, + mut line: &'lines str, + lines: &mut Lines<'lines>, + output: &mut String, + ) -> ProgramAction { + for instr in &self.instructions { + if instr.matches(LineNo::Line(*line_number), line) { + match instr.action.apply(pattern_space, output) { + ActionResult::Continue => (), + ActionResult::ShortCircuit => return ProgramAction::ShortCircuit, + ActionResult::Stop => return ProgramAction::Stop, + ActionResult::StopAndPrint => return ProgramAction::StopAndPrint, + ActionResult::NextLine => { + self.advance_line(pattern_space, output, &mut line, lines, line_number); + } + ActionResult::Subprogram(sub) => { + let result = sub.borrow().execute_program( + pattern_space, + line_number, + line, + lines, + output, + ); + match result { + ProgramAction::Done => (), + ProgramAction::Stop => return ProgramAction::Stop, + ProgramAction::StopAndPrint => return ProgramAction::StopAndPrint, + // TODO: Is this the sensible semantics? + ProgramAction::ShortCircuit => return ProgramAction::ShortCircuit, + } + } + } + } + } + ProgramAction::Done + } +} + +fn print_rest_of_input(output: &mut String, pattern_space: &mut String, lines: &mut Lines<'_>) { + output.push_str(&*pattern_space); + pattern_space.clear(); + output.push('\n'); + for line in lines.by_ref() { + output.push_str(line); + output.push('\n'); + } +} + +#[derive(Debug)] +enum ProgramAction { + Done, + Stop, + StopAndPrint, + ShortCircuit, +} + +/// An instruction consists of a selector and an action. +#[derive(Debug, Clone)] +struct Instruction { + selector: Selector, + selector_invert: bool, + action: Action, +} + +impl Instruction { + fn matches(&self, line_no: LineNo, line: &str) -> bool { + let matches = self.selector.matches(line_no, line); + if self.selector_invert { + !matches + } else { + matches + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum LineNo { + Line(usize), + Eof, +} + +#[derive(Clone)] +#[non_exhaustive] +pub enum Selector { + /// Match all lines + All, + /// End of file (useful to insert lines at the very end) + Eof, + /// Match a specific line number (1-indexed) + Line(usize), + /// A range of line numbers (1-indexed, inclusive) + Range(usize, usize), + /// A regex to match the line + Regex(Regex), + /// A custom function, passed the line number and current line + #[allow(clippy::type_complexity)] + Function(Rc bool>), +} + +impl Selector { + fn matches(&self, line_no: LineNo, line: &str) -> bool { + match self { + Selector::All => true, + Selector::Eof => line_no == LineNo::Eof, + Selector::Line(v) => line_no == LineNo::Line(*v), + Selector::Range(l, u) => match line_no { + LineNo::Line(line_no) => line_no >= *l && line_no <= *u, + _ => false, + }, + Selector::Regex(re) => re.is_match(line), + Selector::Function(func) => match line_no { + LineNo::Line(line_no) => func(line_no, line), + _ => false, + }, + } + } +} + +impl Debug for Selector { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::All => write!(f, "All"), + Self::Eof => write!(f, "Eof"), + Self::Line(arg0) => f.debug_tuple("Line").field(arg0).finish(), + Self::Range(arg0, arg1) => f.debug_tuple("Range").field(arg0).field(arg1).finish(), + Self::Regex(arg0) => f.debug_tuple("Regex").field(arg0).finish(), + Self::Function(_) => f.debug_tuple("Function").finish(), + } + } +} + +#[derive(Clone)] +#[non_exhaustive] +pub enum Action { + /// Copy the current line to the output. Only needed when auto-print is disabled. + Print, + /// Delete the current line and short circuit the rest of the program (immediately go to the next line) + Delete, + /// Replace pattern space with next line (will print unless auto-print is disabled) + NextLine, + /// Stop processing the input and program and terminate early (do not print rest of file) + Stop, + /// Stop processing the input and program and terminate early (auto-print rest of file) + StopAndPrint, + /// Insert a new line *before* the current line + InsertBefore(CompactString), + /// Insert a new line *after* the current line + InsertAfter(CompactString), + /// Replace the entire current string with the given string + Replace(CompactString), + /// Do a regex search and replace in the current line + /// + /// Capture groups in the replacement string works as with [`Regex::replace`]. + RegexReplace { + regex: Regex, + replacement: CompactString, + replace_all: bool, + }, + /// A sub-program that is executed. Will share pattern space with parent program + Subprogram(Rc>), + /// Call a custom function to determine the new line + #[allow(clippy::type_complexity)] + Function(Rc Cow<'_, str>>), +} + +impl Action { + fn apply(&self, pattern_space: &mut String, output: &mut String) -> ActionResult { + match self { + Action::Print => { + output.push_str(pattern_space); + output.push('\n'); + } + Action::Delete => { + pattern_space.clear(); + return ActionResult::ShortCircuit; + } + Action::Stop => return ActionResult::Stop, + Action::StopAndPrint => return ActionResult::StopAndPrint, + Action::InsertBefore(s) => { + let old_pattern_space = std::mem::take(pattern_space); + *pattern_space = s.to_string(); + pattern_space.push('\n'); + pattern_space.push_str(&old_pattern_space); + } + Action::InsertAfter(s) => { + pattern_space.push('\n'); + pattern_space.push_str(s); + } + Action::Replace(s) => { + *pattern_space = s.to_string(); + } + Action::RegexReplace { + regex, + replacement, + replace_all, + } => { + let ret = if *replace_all { + regex.replace_all(pattern_space, replacement.as_str()) + } else { + regex.replace(pattern_space, replacement.as_str()) + }; + match ret { + Cow::Borrowed(_) => (), + Cow::Owned(new_val) => *pattern_space = new_val, + }; + } + Action::Function(func) => { + let new_val = func(pattern_space); + *pattern_space = new_val.into_owned(); + } + Action::NextLine => return ActionResult::NextLine, + Action::Subprogram(prog) => return ActionResult::Subprogram(prog.clone()), + } + ActionResult::Continue + } +} + +impl Debug for Action { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Print => write!(f, "Print"), + Self::Delete => write!(f, "Delete"), + Self::Stop => write!(f, "Stop"), + Self::StopAndPrint => write!(f, "StopAndPrint"), + Self::InsertBefore(arg0) => f.debug_tuple("InsertBefore").field(arg0).finish(), + Self::InsertAfter(arg0) => f.debug_tuple("InsertAfter").field(arg0).finish(), + Self::Replace(arg0) => f.debug_tuple("Replace").field(arg0).finish(), + Self::RegexReplace { + regex, + replacement, + replace_all, + } => f + .debug_struct("RegexReplace") + .field("regex", regex) + .field("replacement", replacement) + .field("replace_all", &replace_all) + .finish(), + Self::Function(_) => f.debug_tuple("Function").finish(), + Self::NextLine => write!(f, "LoadNextLine"), + Self::Subprogram(arg0) => f.debug_tuple("Subprogram").field(arg0).finish(), + } + } +} + +#[derive(Debug, Clone)] +enum ActionResult { + Continue, + NextLine, + Subprogram(Rc>), + ShortCircuit, + Stop, + StopAndPrint, +} + +#[cfg(test)] +mod tests { + + use super::*; + + #[test] + fn test_regex_replace() { + let mut program = EditProgram::new(); + program.add( + Selector::All, + false, + Action::RegexReplace { + regex: Regex::new("^foo$").unwrap(), + replacement: "bar".into(), + replace_all: false, + }, + ); + let input = "foo\nbar\nbaz"; + let output = program.apply(input); + assert_eq!(output, "bar\nbar\nbaz\n"); + } + + #[test] + fn test_regex_replace_no_anchors() { + let mut program = EditProgram::new(); + program.add( + Selector::All, + false, + Action::RegexReplace { + regex: Regex::new("foo").unwrap(), + replacement: "bar".into(), + replace_all: false, + }, + ); + let input = "foo foo\nbar\nbaz"; + let output = program.apply(input); + assert_eq!(output, "bar foo\nbar\nbaz\n"); + + let mut program = EditProgram::new(); + program.add( + Selector::All, + false, + Action::RegexReplace { + regex: Regex::new("foo").unwrap(), + replacement: "bar".into(), + replace_all: true, + }, + ); + let input = "foo foo\nbar\nbaz"; + let output = program.apply(input); + assert_eq!(output, "bar bar\nbar\nbaz\n"); + } + + #[test] + fn test_regex_replace_capture_groups() { + let mut program = EditProgram::new(); + program.add( + Selector::All, + false, + Action::RegexReplace { + regex: Regex::new("f(a|o)o").unwrap(), + replacement: "b${1}r".into(), + replace_all: true, + }, + ); + let input = "foo\nfao foo fee\nbar\nbaz"; + let output = program.apply(input); + assert_eq!(output, "bor\nbar bor fee\nbar\nbaz\n"); + + let mut program = EditProgram::new(); + program.add( + Selector::All, + false, + Action::RegexReplace { + regex: Regex::new("f(a|o)o").unwrap(), + replacement: "b${1}r".into(), + replace_all: false, + }, + ); + let input = "foo\nfoo\nfao foo fee\nbar\nbaz"; + let output = program.apply(input); + assert_eq!(output, "bor\nbor\nbar foo fee\nbar\nbaz\n"); + } + + #[test] + fn test_insert_before() { + let mut program = EditProgram::new(); + program.add(Selector::Line(2), false, Action::InsertBefore("foo".into())); + let input = "bar\nbaz\nquux"; + let output = program.apply(input); + assert_eq!(output, "bar\nfoo\nbaz\nquux\n"); + } + + #[test] + fn test_insert_after() { + let mut program = EditProgram::new(); + program.add( + Selector::Regex(Regex::new("^q").unwrap()), + false, + Action::InsertAfter("foo".into()), + ); + let input = "bar\nbaz\nquux\nquack"; + let output = program.apply(input); + assert_eq!(output, "bar\nbaz\nquux\nfoo\nquack\nfoo\n"); + } + + #[test] + fn test_replace() { + let mut program = EditProgram::new(); + program.add(Selector::Range(2, 3), false, Action::Replace("foo".into())); + let input = "bar\nbaz\nquux\nquack"; + let output = program.apply(input); + assert_eq!(output, "bar\nfoo\nfoo\nquack\n"); + + // Test inverted selector + let mut program = EditProgram::new(); + program.add(Selector::Range(2, 3), true, Action::Replace("foo".into())); + let output = program.apply(input); + assert_eq!(output, "foo\nbaz\nquux\nfoo\n"); + } + + #[test] + fn test_function() { + let mut program = EditProgram::new(); + program.add( + Selector::All, + false, + Action::Function(Rc::new(|line| { + if line == "bar" { + Cow::Borrowed("baz") + } else { + Cow::Borrowed(line) + } + })), + ); + let input = "bar\nbaz\nquux\nquack"; + let output = program.apply(input); + assert_eq!(output, "baz\nbaz\nquux\nquack\n"); + } + + #[test] + fn test_selector_function() { + let mut program = EditProgram::new(); + program.disable_default_printing(); + program.add( + Selector::Function(Rc::new(|line_no, _line| line_no % 2 == 0)), + false, + Action::Print, + ); + let input = "bar\nbaz\nquux\nquack\nhuzza\nbar"; + let output = program.apply(input); + assert_eq!(output, "baz\nquack\nbar\n"); + + let mut program = EditProgram::new(); + program.add( + Selector::Function(Rc::new(|line_no, _line| line_no % 2 == 0)), + false, + Action::Delete, + ); + let input = "bar\nbaz\nquux\nquack\nhuzza\nbar"; + let output = program.apply(input); + assert_eq!(output, "bar\nquux\nhuzza\n"); + } + + #[test] + fn test_delete() { + let mut program = EditProgram::new(); + program.add(Selector::Line(2), false, Action::Delete); + let input = "bar\nbaz\nquux\nquack"; + let output = program.apply(input); + assert_eq!(output, "bar\nquux\nquack\n"); + + // Test inverted selector + let mut program = EditProgram::new(); + program.add( + Selector::Regex(Regex::new("x$").unwrap()), + true, + Action::Delete, + ); + let output = program.apply(input); + assert_eq!(output, "quux\n"); + } + + #[test] + fn test_stop() { + let mut program = EditProgram::new(); + program.add( + Selector::Regex(Regex::new("x").unwrap()), + false, + Action::Stop, + ); + let input = "bar\nbaz\nquux\nquack"; + let output = program.apply(input); + assert_eq!(output, "bar\nbaz\n"); + + let mut program = EditProgram::new(); + program.add(Selector::All, false, Action::Replace("foo".into())); + program.add( + Selector::Regex(Regex::new("x").unwrap()), + false, + Action::Stop, + ); + let input = "bar\nbaz\nquux\nquack"; + let output = program.apply(input); + assert_eq!(output, "foo\nfoo\n"); + } + + #[test] + fn test_stop_and_print() { + let mut program = EditProgram::new(); + program.add( + Selector::Regex(Regex::new("x").unwrap()), + false, + Action::StopAndPrint, + ); + let input = "bar\nbaz\nquux\nquack"; + let output = program.apply(input); + assert_eq!(output, "bar\nbaz\nquux\nquack\n"); + + let mut program = EditProgram::new(); + program.add(Selector::All, false, Action::Replace("foo".into())); + program.add( + Selector::Regex(Regex::new("x").unwrap()), + false, + Action::StopAndPrint, + ); + let input = "bar\nbaz\nquux\nquack"; + let output = program.apply(input); + assert_eq!(output, "foo\nfoo\nfoo\nquack\n"); + } + + #[test] + fn test_print() { + let mut program = EditProgram::new(); + program.disable_default_printing(); + program.add(Selector::Range(2, 3), false, Action::Print); + program.add(Selector::Range(3, 4), false, Action::Print); + let input = "bar\nbaz\nquux\nquack\nhuzza"; + let output = program.apply(input); + assert_eq!(output, "baz\nquux\nquux\nquack\n"); + } + + #[test] + fn test_eof() { + let mut program = EditProgram::new(); + program.add(Selector::Eof, false, Action::InsertBefore("foo".into())); + let input = "bar\nbaz\nquux\nquack"; + let output = program.apply(input); + assert_eq!(output, "bar\nbaz\nquux\nquack\nfoo\n"); + + let mut program = EditProgram::new(); + program.add(Selector::Eof, false, Action::InsertAfter("foo".into())); + program.add(Selector::Eof, false, Action::InsertAfter("bar".into())); + let input = "bar\nbaz\nquux\nquack"; + let output = program.apply(input); + assert_eq!(output, "bar\nbaz\nquux\nquack\nfoo\nbar\n"); + } + + #[test] + fn test_subprogram() { + let mut subprogram = EditProgram::new(); + subprogram.add(Selector::All, false, Action::Replace("foo".into())); + subprogram.add(Selector::All, false, Action::NextLine); + subprogram.add(Selector::All, false, Action::Replace("bar".into())); + let mut program = EditProgram::new(); + program.add( + Selector::Regex(Regex::new("quux").unwrap()), + false, + Action::Subprogram(Rc::new(RefCell::new(subprogram))), + ); + let input = "bar\nquux\nquack\nx\ny"; + let output = program.apply(input); + assert_eq!(output, "bar\nfoo\nbar\nx\ny\n"); + } +} diff --git a/crates/konfigkoll_core/src/save.rs b/crates/konfigkoll_core/src/save.rs new file mode 100644 index 00000000..b2fbc3b0 --- /dev/null +++ b/crates/konfigkoll_core/src/save.rs @@ -0,0 +1,231 @@ +//! Generate a stream of commands that would create the current system state + +use anyhow::Context; +use camino::Utf8Path; +use compact_str::{format_compact, CompactString}; +use itertools::Itertools; +use konfigkoll_types::{FileContents, FsInstruction, PkgIdent, PkgInstruction}; + +/// Save file system changes +/// +/// Takes a fn that is repsonsible for writing out the file data to a location in the config directory. +/// It should put the file in the standard location (`files/input_file_path`, e.g `files/etc/fstab`) +/// +/// Precondition: The instructions are sorted by default sort order (path, op) +pub fn save_fs_changes<'instruction>( + output: &mut dyn std::io::Write, + mut file_data_saver: impl FnMut(&Utf8Path, &FileContents) -> anyhow::Result<()>, + instructions: impl Iterator, +) -> anyhow::Result<()> { + for instruction in instructions { + let comment = match instruction.comment { + Some(ref comment) => format_compact!(" // {}", comment), + None => CompactString::default(), + }; + match instruction.op { + konfigkoll_types::FsOp::Remove => { + writeln!(output, " cmds.rm(\"{}\")?;{}", instruction.path, comment)?; + } + konfigkoll_types::FsOp::CreateFile(ref contents) => { + file_data_saver(&instruction.path, contents).with_context(|| { + format!("Failed to save {} to config directory", instruction.path) + })?; + writeln!( + output, + " cmds.copy(\"{}\")?;{}", + instruction.path, comment + )?; + } + konfigkoll_types::FsOp::CreateSymlink { ref target } => { + writeln!( + output, + " cmds.symlink(\"{}\", \"{}\")?;{}", + instruction.path, target, comment + )?; + } + konfigkoll_types::FsOp::CreateDirectory => { + writeln!( + output, + " cmds.mkdir(\"{}\")?;{}", + instruction.path, comment + )?; + } + konfigkoll_types::FsOp::CreateFifo => { + writeln!( + output, + " cmds.mkfifo(\"{}\")?;{}", + instruction.path, comment + )?; + } + konfigkoll_types::FsOp::CreateBlockDevice { major, minor } => { + writeln!( + output, + " cmds.mknod(\"{}\", \"b\", {}, {})?;{}", + instruction.path, major, minor, comment + )?; + } + konfigkoll_types::FsOp::CreateCharDevice { major, minor } => { + writeln!( + output, + " cmds.mknod(\"{}\", \"c\", {}, {})?;{}", + instruction.path, major, minor, comment + )?; + } + konfigkoll_types::FsOp::SetMode { mode } => { + writeln!( + output, + " cmds.chmod(\"{}\", 0o{:o})?;{}", + instruction.path, + mode.as_raw(), + comment + )?; + } + konfigkoll_types::FsOp::SetOwner { ref owner } => { + writeln!( + output, + " cmds.chown(\"{}\", \"{}\")?;{}", + instruction.path, owner, comment + )?; + } + konfigkoll_types::FsOp::SetGroup { ref group } => { + writeln!( + output, + " cmds.chgrp(\"{}\", \"{}\")?;{}", + instruction.path, group, comment + )?; + } + konfigkoll_types::FsOp::Comment => { + writeln!(output, " // {}: {}", instruction.path, comment)?; + } + konfigkoll_types::FsOp::Restore { .. } => { + writeln!( + output, + " restore({}) // Restore this file to original package manager state{}", + instruction.path, comment + )?; + } + } + } + Ok(()) +} + +/// Save package changes +pub fn save_packages<'instructions>( + output: &mut dyn std::io::Write, + instructions: impl Iterator, +) -> anyhow::Result<()> { + let instructions = instructions + .into_iter() + .sorted_unstable_by(|(ak, av), (bk, bv)| { + av.op + .cmp(&bv.op) + .then_with(|| ak.package_manager.cmp(&bk.package_manager)) + .then_with(|| ak.identifier.cmp(&bk.identifier)) + }); + + for (pkg_ident, pkg_instruction) in instructions.into_iter() { + let comment = match &pkg_instruction.comment { + Some(comment) => format_compact!(" // {}", comment), + None => CompactString::default(), + }; + match pkg_instruction.op { + konfigkoll_types::PkgOp::Uninstall => { + writeln!( + output, + " cmds.remove_pkg(\"{}\", \"{}\")?;{}", + pkg_ident.package_manager, pkg_ident.identifier, comment + )?; + } + konfigkoll_types::PkgOp::Install => { + writeln!( + output, + " cmds.add_pkg(\"{}\", \"{}\")?;{}", + pkg_ident.package_manager, pkg_ident.identifier, comment + )?; + } + } + } + Ok(()) +} + +#[cfg(test)] +mod tests { + use paketkoll_types::backend::Backend; + use pretty_assertions::assert_eq; + use std::collections::HashMap; + + use camino::{Utf8Path, Utf8PathBuf}; + use konfigkoll_types::{ + FileContents, FsInstruction, FsOp, PkgIdent, PkgInstruction, PkgInstructions, PkgOp, + }; + + use super::*; + + #[test] + fn test_save_fs_changes() { + let mut output = Vec::new(); + let mut file_data = HashMap::new(); + let file_data_saver = |path: &Utf8Path, contents: &FileContents| { + file_data.insert(path.to_owned(), contents.clone()); + Ok(()) + }; + + let instructions = vec![ + FsInstruction { + op: FsOp::CreateFile(FileContents::from_literal("hello".as_bytes().into())), + path: Utf8PathBuf::from("/hello/world"), + comment: None, + }, + FsInstruction { + op: FsOp::Remove, + path: Utf8PathBuf::from("/remove_me"), + comment: Some("For reasons!".into()), + }, + ]; + + save_fs_changes(&mut output, file_data_saver, instructions.iter()).unwrap(); + + let expected = + " cmds.copy(\"/hello/world\")?;\n cmds.rm(\"/remove_me\")?; // For reasons!\n"; + assert_eq!(String::from_utf8(output).unwrap(), expected); + assert_eq!( + file_data.get(Utf8Path::new("/hello/world")).unwrap(), + &FileContents::from_literal("hello".as_bytes().into()) + ); + } + + #[test] + fn test_save_packages() { + let mut output = Vec::new(); + let mut instructions = PkgInstructions::default(); + instructions.insert( + PkgIdent { + package_manager: Backend::Pacman, + identifier: "bash".into(), + }, + PkgInstruction { + op: PkgOp::Install, + comment: None, + }, + ); + instructions.insert( + PkgIdent { + package_manager: Backend::Apt, + identifier: "zsh".into(), + }, + PkgInstruction { + op: PkgOp::Uninstall, + comment: Some("A comment".into()), + }, + ); + + save_packages( + &mut output, + instructions.iter().map(|(a, b)| (a, b.clone())).sorted(), + ) + .unwrap(); + + let expected = " cmds.remove_pkg(\"apt\", \"zsh\")?; // A comment\n cmds.add_pkg(\"pacman\", \"bash\")?;\n"; + assert_eq!(String::from_utf8(output).unwrap(), expected); + } +} diff --git a/crates/konfigkoll_core/src/state.rs b/crates/konfigkoll_core/src/state.rs new file mode 100644 index 00000000..847f9467 --- /dev/null +++ b/crates/konfigkoll_core/src/state.rs @@ -0,0 +1,693 @@ +//! State representation of file system + +use std::{collections::BTreeMap, sync::Arc}; + +use anyhow::anyhow; +use camino::{Utf8Path, Utf8PathBuf}; +use compact_str::CompactString; +use konfigkoll_types::{FileContents, FsInstruction, FsOp}; +use paketkoll_types::{ + backend::Files, + files::{Mode, PathMap, Properties}, +}; + +use crate::utils::{IdKey, NumericToNameResolveCache}; + +const DEFAULT_FILE_MODE: Mode = Mode::new(0o644); +const DEFAULT_DIR_MODE: Mode = Mode::new(0o755); +const ROOT: CompactString = CompactString::const_new("root"); + +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +struct FsNode { + entry: FsEntry, + mode: Option, + owner: Option, + group: Option, + /// Keep track of if this node was removed before being added back. + /// Needed for handling type conflicts correctly. + removed_before_added: bool, + /// Optional comment for saving purposes + comment: Option, +} + +// This is a macro due to partial moving of self +macro_rules! fsnode_into_base_instruction { + ($this:ident, $path:tt) => { + match $this.entry { + FsEntry::Removed => Some(FsInstruction { + path: $path.into(), + op: FsOp::Remove, + comment: $this.comment, + }), + FsEntry::Unchanged => None, + FsEntry::Directory => Some(FsInstruction { + path: $path.into(), + op: FsOp::CreateDirectory, + comment: $this.comment, + }), + FsEntry::File(contents) => Some(FsInstruction { + path: $path.into(), + op: FsOp::CreateFile(contents), + comment: $this.comment, + }), + FsEntry::Symlink { target } => Some(FsInstruction { + path: $path.into(), + op: FsOp::CreateSymlink { target }, + comment: $this.comment, + }), + FsEntry::Fifo => Some(FsInstruction { + path: $path.into(), + op: FsOp::CreateFifo, + comment: $this.comment, + }), + FsEntry::BlockDevice { major, minor } => Some(FsInstruction { + path: $path.into(), + op: FsOp::CreateBlockDevice { major, minor }, + comment: $this.comment, + }), + FsEntry::CharDevice { major, minor } => Some(FsInstruction { + path: $path.into(), + op: FsOp::CreateCharDevice { major, minor }, + comment: $this.comment, + }), + } + }; +} + +impl FsNode { + fn into_instruction(self, path: &Utf8Path) -> impl Iterator { + let mut results = vec![]; + let mut do_metadata = true; + let mut was_symlink = false; + let default_mode = match &self.entry { + FsEntry::Removed => None, + FsEntry::Unchanged => None, + FsEntry::Directory => Some(DEFAULT_DIR_MODE), + FsEntry::File(_) => Some(DEFAULT_FILE_MODE), + FsEntry::Symlink { .. } => None, + FsEntry::Fifo | FsEntry::BlockDevice { .. } | FsEntry::CharDevice { .. } => { + Some(DEFAULT_FILE_MODE) + } + }; + + if self.removed_before_added && self.entry != FsEntry::Removed { + results.push(FsInstruction { + path: path.into(), + op: FsOp::Remove, + comment: Some("Removed (and later recreated) due to file type conflict".into()), + }); + } + match &self.entry { + FsEntry::Removed => { + do_metadata = false; + } + FsEntry::Symlink { .. } => { + was_symlink = true; + } + _ => (), + } + if let Some(instr) = fsnode_into_base_instruction!(self, path) { + results.push(instr); + } + + if do_metadata { + if !was_symlink && self.mode != default_mode { + if let Some(mode) = self.mode { + results.push(FsInstruction { + path: path.into(), + op: FsOp::SetMode { mode }, + comment: None, + }); + } + } + if let Some(owner) = self.owner { + if owner != ROOT { + results.push(FsInstruction { + path: path.into(), + op: FsOp::SetOwner { owner }, + comment: None, + }); + } + } + if let Some(group) = self.group { + if group != ROOT { + results.push(FsInstruction { + path: path.into(), + op: FsOp::SetGroup { group }, + comment: None, + }); + } + } + } + + results.into_iter() + } +} + +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, strum::EnumDiscriminants)] +enum FsEntry { + /// Negative entry: This has been removed + Removed, + /// Unchanged, we only got a mode/owner/group change + Unchanged, + /// A directory + Directory, + /// A file + File(FileContents), + /// A symlink + Symlink { target: camino::Utf8PathBuf }, + /// Create a FIFO + Fifo, + /// Create a block device + BlockDevice { major: u64, minor: u64 }, + /// Create a character device + CharDevice { major: u64, minor: u64 }, +} + +#[derive(Debug, Default, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct FsEntries { + fs: BTreeMap, +} + +impl FsEntries { + /// Apply a stream of instructions to this `FsEntries` + pub fn apply_instructions( + &mut self, + instructions: impl Iterator, + warn_redundant: bool, + ) { + for instr in instructions { + match instr.op { + FsOp::Remove => { + self.fs.insert( + instr.path, + FsNode { + entry: FsEntry::Removed, + mode: Some(DEFAULT_FILE_MODE), + owner: Some(ROOT), + group: Some(ROOT), + removed_before_added: true, + comment: instr.comment, + }, + ); + } + FsOp::CreateDirectory => { + self.replace_node( + instr.path, + FsNode { + entry: FsEntry::Directory, + mode: Some(DEFAULT_DIR_MODE), + owner: Some(ROOT), + group: Some(ROOT), + removed_before_added: false, + comment: instr.comment, + }, + ); + } + FsOp::CreateFile(contents) => { + self.replace_node( + instr.path, + FsNode { + entry: FsEntry::File(contents), + mode: Some(DEFAULT_FILE_MODE), + owner: Some(ROOT), + group: Some(ROOT), + removed_before_added: false, + comment: instr.comment, + }, + ); + } + FsOp::CreateSymlink { target } => { + self.replace_node( + instr.path, + FsNode { + entry: FsEntry::Symlink { target }, + mode: Some(DEFAULT_FILE_MODE), + owner: Some(ROOT), + group: Some(ROOT), + removed_before_added: false, + comment: instr.comment, + }, + ); + } + FsOp::CreateFifo => { + self.replace_node( + instr.path, + FsNode { + entry: FsEntry::Fifo, + mode: Some(DEFAULT_FILE_MODE), + owner: Some(ROOT), + group: Some(ROOT), + removed_before_added: false, + comment: instr.comment, + }, + ); + } + FsOp::CreateBlockDevice { major, minor } => { + self.replace_node( + instr.path, + FsNode { + entry: FsEntry::BlockDevice { major, minor }, + mode: Some(DEFAULT_FILE_MODE), + owner: Some(ROOT), + group: Some(ROOT), + removed_before_added: false, + comment: instr.comment, + }, + ); + } + FsOp::CreateCharDevice { major, minor } => { + self.replace_node( + instr.path, + FsNode { + entry: FsEntry::CharDevice { major, minor }, + mode: Some(DEFAULT_FILE_MODE), + owner: Some(ROOT), + group: Some(ROOT), + removed_before_added: false, + comment: instr.comment, + }, + ); + } + FsOp::SetMode { mode } => { + self.fs + .entry(instr.path.clone()) + .and_modify(|entry| { + if warn_redundant && entry.mode == Some(mode) { + tracing::warn!("Redundant mode set for: {:?}", &instr.path); + } + entry.mode = Some(mode); + }) + .or_insert_with(|| FsNode { + entry: FsEntry::Unchanged, + mode: Some(mode), + owner: None, + group: None, + removed_before_added: false, + comment: instr.comment, + }); + } + FsOp::SetOwner { ref owner } => { + self.fs + .entry(instr.path.clone()) + .and_modify(|entry| { + if warn_redundant && entry.owner.as_ref() == Some(owner) { + tracing::warn!("Redundant owner set for: {:?}", &instr.path); + } + entry.owner = Some(owner.clone()); + }) + .or_insert_with(|| FsNode { + entry: FsEntry::Unchanged, + mode: None, + owner: Some(owner.clone()), + group: None, + removed_before_added: false, + comment: instr.comment, + }); + } + FsOp::SetGroup { ref group } => { + self.fs + .entry(instr.path.clone()) + .and_modify(|entry| { + if warn_redundant && entry.group.as_ref() == Some(group) { + tracing::warn!("Redundant group set for: {:?}", &instr.path); + } + entry.group = Some(group.clone()); + }) + .or_insert_with(|| FsNode { + entry: FsEntry::Unchanged, + mode: None, + owner: None, + group: Some(group.clone()), + removed_before_added: false, + comment: instr.comment, + }); + } + FsOp::Comment => (), + FsOp::Restore { .. } => { + tracing::error!( + "Restore operation not supported as *input* to state::apply_instructions" + ); + } + } + } + } + + /// Replace a node, taking into account if it was removed before being added back. + fn replace_node(&mut self, path: Utf8PathBuf, new_node: FsNode) { + self.add_missing_parents(&path); + let entry = self.fs.entry(path).or_insert(FsNode { + entry: FsEntry::Removed, + mode: Some(Mode::new(0)), + owner: Some(ROOT), + group: Some(ROOT), + removed_before_added: false, + comment: None, + }); + entry.entry = new_node.entry; + entry.mode = new_node.mode; + entry.owner = new_node.owner; + entry.group = new_node.group; + entry.comment = new_node.comment; + } + + /// Add missing directory parents for a given node + fn add_missing_parents(&mut self, path: &Utf8Path) { + for parent in path.ancestors() { + self.fs.entry(parent.into()).or_insert_with(|| FsNode { + entry: FsEntry::Directory, + mode: Some(DEFAULT_DIR_MODE), + owner: Some(ROOT), + group: Some(ROOT), + removed_before_added: false, + comment: None, + }); + } + } +} + +/// Describe the goal of the diff: is it for saving or for application/diff +/// +/// This will affect the exact instructions that gets generated +#[derive(Debug, Clone, strum::EnumDiscriminants)] +pub enum DiffGoal<'map, 'files> { + Apply(Arc, &'map PathMap<'files>), + Save, +} + +impl PartialEq for DiffGoal<'_, '_> { + fn eq(&self, other: &Self) -> bool { + #[allow(clippy::match_like_matches_macro)] + match (self, other) { + (DiffGoal::Apply(_, _), DiffGoal::Apply(_, _)) => true, + (DiffGoal::Save, DiffGoal::Save) => true, + _ => false, + } + } +} + +// Generate a stream of instructions to go from state before to state after +#[tracing::instrument(level = "debug", skip_all)] +pub fn diff( + goal: &DiffGoal<'_, '_>, + before: FsEntries, + after: FsEntries, +) -> anyhow::Result> { + let diff_iter = itertools::merge_join_by(before.fs, after.fs, |(k1, _), (k2, _)| k1.cmp(k2)); + + let mut results = vec![]; + + let mut id_resolver = NumericToNameResolveCache::new(); + + for entry in diff_iter { + match entry { + itertools::EitherOrBoth::Both(before, after) if before.1 == after.1 => {} + itertools::EitherOrBoth::Both(before, after) => { + // tracing::debug!("{:?} -> {:?}", before, after); + // Compare the structs and generate a stream of instructions + let path = before.0; + let before = before.1; + let after = after.1; + + if before.entry != after.entry { + let before_discr = FsEntryDiscriminants::from(&before.entry); + let after_discr = FsEntryDiscriminants::from(&after.entry); + + if before.removed_before_added || before_discr != after_discr { + // The entry was removed before being added back, generate a removal + results.push(FsInstruction { + path: path.clone(), + op: FsOp::Remove, + comment: Some( + "Removed (and later recreated) due to file type conflict".into(), + ), + }); + } + // Just the properties of it has changed + let path = path.as_path(); + if let Some(instr) = fsnode_into_base_instruction!(after, path) { + results.push(instr); + } + } + + match (before.mode, after.mode) { + (None, None) => (), + (Some(_), None) => { + results.push(FsInstruction { + path: path.clone(), + op: FsOp::Comment, + comment: Some("Mode change unneeded".into()), + }); + } + (Some(v1), Some(v2)) if v1 == v2 => (), + (None, Some(v)) | (Some(_), Some(v)) => { + results.push(FsInstruction { + path: path.clone(), + op: FsOp::SetMode { mode: v }, + comment: None, + }); + } + } + match (before.owner, after.owner) { + (None, None) => (), + (Some(_), None) => { + results.push(FsInstruction { + path: path.clone(), + op: FsOp::Comment, + comment: Some("Owner change unneeded".into()), + }); + } + (Some(v1), Some(v2)) if v1 == v2 => (), + (None, Some(v)) | (Some(_), Some(v)) => { + results.push(FsInstruction { + path: path.clone(), + op: FsOp::SetOwner { owner: v }, + comment: None, + }); + } + } + match (before.group, after.group) { + (None, None) => (), + (Some(_), None) => { + results.push(FsInstruction { + path: path.clone(), + op: FsOp::Comment, + comment: Some("Group change unneeded".into()), + }); + } + (Some(v1), Some(v2)) if v1 == v2 => (), + (None, Some(v)) | (Some(_), Some(v)) => { + results.push(FsInstruction { + path: path.clone(), + op: FsOp::SetGroup { group: v }, + comment: None, + }); + } + } + } + itertools::EitherOrBoth::Left(before) => { + // tracing::debug!("{:?} -> ()", before); + match goal { + DiffGoal::Apply(ref _backend_impl, path_map) => { + // Figure out what the previous state of this file was: + match path_map.get(before.0.as_std_path()) { + Some(entry) => { + if before.1.entry != FsEntry::Unchanged { + match entry.properties { + Properties::RegularFileBasic(_) + | Properties::RegularFileSystemd(_) + | Properties::RegularFile(_) => { + results.push(FsInstruction { + path: before.0.clone(), + op: FsOp::Restore, + comment: before.1.comment, + }); + } + Properties::Symlink(ref v) => { + results.push(FsInstruction { + path: before.0.clone(), + op: FsOp::CreateSymlink { + target: Utf8Path::from_path(&v.target) + .ok_or_else(|| anyhow!("Invalid UTF-8"))? + .into(), + }, + comment: before.1.comment, + }); + } + Properties::Directory(_) => { + results.push(FsInstruction { + path: before.0.clone(), + op: FsOp::CreateDirectory, + comment: before.1.comment, + }); + } + Properties::Fifo(_) + | Properties::DeviceNode(_) + | Properties::Permissions(_) + | Properties::Special + | Properties::Removed => { + anyhow::bail!("{:?} needs to be restored to package manager state, but how do to that is not yet implemented", entry.path) + } + Properties::Unknown => { + anyhow::bail!("{:?} needs to be restored to package manager state, but how do to that is unknown", entry.path) + } + } + } + match (entry.properties.mode(), before.1.mode) { + (None, None) | (None, Some(_)) | (Some(_), None) => (), + (Some(v1), Some(v2)) if v1 == v2 => (), + (Some(v1), Some(_)) => { + results.push(FsInstruction { + path: before.0.clone(), + op: FsOp::SetMode { mode: v1 }, + comment: None, + }); + } + } + let fs_owner = entry + .properties + .owner() + .map(|v| id_resolver.lookup(&IdKey::User(v))) + .transpose()?; + match (fs_owner, before.1.owner) { + (None, None) | (None, Some(_)) | (Some(_), None) => (), + (Some(v1), Some(v2)) if v1 == v2 => (), + (Some(v1), Some(_)) => { + results.push(FsInstruction { + path: before.0.clone(), + op: FsOp::SetOwner { owner: v1 }, + comment: None, + }); + } + } + let fs_group = entry + .properties + .group() + .map(|v| id_resolver.lookup(&IdKey::Group(v))) + .transpose()?; + match (fs_group, before.1.group) { + (None, None) | (None, Some(_)) | (Some(_), None) => (), + (Some(v1), Some(v2)) if v1 == v2 => (), + (Some(v1), Some(_)) => { + results.push(FsInstruction { + path: before.0.clone(), + op: FsOp::SetGroup { group: v1 }, + comment: None, + }); + } + } + } + None => { + results.push(FsInstruction { + path: before.0, + op: FsOp::Remove, + comment: before.1.comment, + }); + } + } + } + DiffGoal::Save => { + // Generate instructions to remove the entry + results.push(FsInstruction { + path: before.0, + op: FsOp::Remove, + comment: before.1.comment, + }); + // TODO: Do something special when the before instruction is a removal one?I + } + } + } + itertools::EitherOrBoth::Right(after) => { + // tracing::debug!("() -> {:?}", after); + results.extend(after.1.into_instruction(&after.0)); + } + } + } + + Ok(results.into_iter()) +} + +#[cfg(test)] +mod tests { + use FsOp; + + use super::*; + + #[test] + fn test_apply_instructions() { + let mut entries = FsEntries::default(); + let instrs = vec![ + FsInstruction { + path: "/hello/symlink".into(), + op: FsOp::CreateSymlink { + target: "/hello/target".into(), + }, + comment: None, + }, + FsInstruction { + path: "/hello/file".into(), + op: FsOp::CreateFile(FileContents::from_literal( + b"hello".to_vec().into_boxed_slice(), + )), + comment: Some("A comment".into()), + }, + FsInstruction { + path: "/hello/file".into(), + op: FsOp::SetMode { + mode: Mode::new(0o600), + }, + comment: None, + }, + ]; + entries.apply_instructions(instrs.into_iter(), false); + + assert_eq!( + entries.fs.get(Utf8Path::new("/hello/symlink")), + Some(&FsNode { + entry: FsEntry::Symlink { + target: "/hello/target".into() + }, + mode: Some(DEFAULT_FILE_MODE), + owner: Some(ROOT), + group: Some(ROOT), + removed_before_added: false, + comment: None, + }) + ); + assert_eq!( + entries.fs.get(Utf8Path::new("/hello/file")), + Some(&FsNode { + entry: FsEntry::File(FileContents::from_literal( + b"hello".to_vec().into_boxed_slice() + )), + mode: Some(Mode::new(0o600)), + owner: Some(ROOT), + group: Some(ROOT), + removed_before_added: false, + comment: Some("A comment".into()), + }) + ); + assert_eq!( + entries.fs.get(Utf8Path::new("/hello")), + Some(&FsNode { + entry: FsEntry::Directory, + mode: Some(DEFAULT_DIR_MODE), + owner: Some(ROOT), + group: Some(ROOT), + removed_before_added: false, + comment: None, + }) + ); + assert_eq!( + entries.fs.get(Utf8Path::new("/")), + Some(&FsNode { + entry: FsEntry::Directory, + mode: Some(DEFAULT_DIR_MODE), + owner: Some(ROOT), + group: Some(ROOT), + removed_before_added: false, + comment: None, + }) + ); + } +} diff --git a/crates/konfigkoll_core/src/utils.rs b/crates/konfigkoll_core/src/utils.rs new file mode 100644 index 00000000..b2b87a98 --- /dev/null +++ b/crates/konfigkoll_core/src/utils.rs @@ -0,0 +1,137 @@ +//! Utilities + +use std::num::NonZeroUsize; + +use anyhow::anyhow; +use camino::{Utf8Path, Utf8PathBuf}; +use clru::CLruCache; +use compact_str::CompactString; +use paketkoll_types::files::{Gid, Uid}; + +/// UID/GID to name resolver / cache +#[derive(Debug)] +pub(crate) struct IdResolveCache { + cache: CLruCache, +} + +impl IdResolveCache +where + Key: PartialEq + Eq + std::hash::Hash, +{ + /// Create a new instance + pub(crate) fn new() -> Self { + Self { + cache: CLruCache::with_hasher( + NonZeroUsize::new(100).expect("Compile time constant"), + ahash::RandomState::new(), + ), + } + } +} + +impl Default for IdResolveCache +where + Key: PartialEq + Eq + std::hash::Hash, +{ + fn default() -> Self { + Self::new() + } +} + +pub(crate) type IdKeyId = IdKey; +pub(crate) type IdKeyName = IdKey; + +pub(crate) type NumericToNameResolveCache = IdResolveCache; +pub(crate) type NameToNumericResolveCache = IdResolveCache; + +impl IdResolveCache, CompactString> { + /// Lookup a UID/GID (resolving and caching if necessary) + pub(crate) fn lookup(&mut self, key: &IdKey) -> anyhow::Result { + match self.cache.get(key) { + Some(v) => Ok(v.clone()), + None => { + // Resolve + let name: CompactString = match key { + IdKey::User(uid) => { + nix::unistd::User::from_uid(uid.into())? + .ok_or_else(|| anyhow!("Failed to find user with ID {}", uid))? + .name + } + IdKey::Group(gid) => { + nix::unistd::Group::from_gid(gid.into())? + .ok_or_else(|| anyhow!("Failed to find group with ID {}", gid))? + .name + } + } + .into(); + self.cache.put(*key, name.clone()); + Ok(name) + } + } + } +} + +impl IdResolveCache, u32> { + /// Lookup a UID/GID (resolving and caching if necessary) + pub(crate) fn lookup( + &mut self, + key: &IdKey, + ) -> anyhow::Result { + match self.cache.get(key) { + Some(v) => Ok(*v), + None => { + // Resolve + let id = match key { + IdKey::User(user) => nix::unistd::User::from_name(user.as_str())? + .ok_or_else(|| anyhow!("Failed to find user with ID {}", user))? + .uid + .as_raw(), + IdKey::Group(group) => nix::unistd::Group::from_name(group.as_str())? + .ok_or_else(|| anyhow!("Failed to find group with ID {}", group))? + .gid + .as_raw(), + }; + self.cache.put(key.clone(), id); + Ok(id) + } + } + } +} +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub(crate) enum IdKey +where + UserKey: Clone + std::fmt::Debug + PartialEq + Eq + std::hash::Hash, + GroupKey: Clone + std::fmt::Debug + PartialEq + Eq + std::hash::Hash, +{ + User(UserKey), + Group(GroupKey), +} + +/// Safe path join that does not replace when the second path is absolute +pub fn safe_path_join(left: &Utf8Path, right: &Utf8Path) -> Utf8PathBuf { + let right = if right.is_absolute() { + right + .strip_prefix("/") + .expect("We know the path is aboslute") + } else { + right + }; + left.join(right) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_safe_path_join() { + assert_eq!( + safe_path_join(Utf8Path::new("/a/b"), Utf8Path::new("c/d")), + Utf8PathBuf::from("/a/b/c/d") + ); + assert_eq!( + safe_path_join(Utf8Path::new("/a/b"), Utf8Path::new("/c/d")), + Utf8PathBuf::from("/a/b/c/d") + ); + } +} diff --git a/crates/konfigkoll_hwinfo/Cargo.toml b/crates/konfigkoll_hwinfo/Cargo.toml new file mode 100644 index 00000000..a43cd909 --- /dev/null +++ b/crates/konfigkoll_hwinfo/Cargo.toml @@ -0,0 +1,25 @@ +[package] +description = "Hardware info provider for Konfigkoll" +edition = "2021" +license = "MPL-2.0" +name = "konfigkoll_hwinfo" +repository = "https://github.com/VorpalBlade/paketkoll" +rust-version = "1.79.0" +version = "0.1.0" + +[dependencies] +ahash.workspace = true +anyhow.workspace = true +itertools.workspace = true +rune = { workspace = true, optional = true } +winnow = { workspace = true, features = ["simd"] } + +[lints] +workspace = true + +[dev-dependencies] +indoc.workspace = true +pretty_assertions = { workspace = true } + +[features] +rune = ["dep:rune"] diff --git a/crates/konfigkoll_hwinfo/README.md b/crates/konfigkoll_hwinfo/README.md new file mode 100644 index 00000000..13267a25 --- /dev/null +++ b/crates/konfigkoll_hwinfo/README.md @@ -0,0 +1,15 @@ +# konfigkoll_hwinfo + +Hardware information module for `KonfigKoll` + +This is a collection of functions that no other library seemed to provide. +You are free to use this, and this follows semver, but it isn't primarily +intended for third party consumption. + +Everything here is Linux only and should work without root access. + +## MSRV (Minimum Supported Rust Version) policy + +The MSRV may be bumped as needed. It is guaranteed that this library will at +least build on the current stable Rust release. An MSRV change is not considered +a breaking change and as such may change even in a patch version. diff --git a/crates/konfigkoll_hwinfo/src/lib.rs b/crates/konfigkoll_hwinfo/src/lib.rs new file mode 100644 index 00000000..cd521b50 --- /dev/null +++ b/crates/konfigkoll_hwinfo/src/lib.rs @@ -0,0 +1,9 @@ +//! Hardware information module for `KonfigKoll` +//! +//! This is a collection of functions that no other library seemed to provide. +//! You are free to use this, and this follows semver, but it isn't primarily +//! intended for third party consumption. +//! +//! Everything here is Linux only and should work without root access. + +pub mod pci; diff --git a/crates/konfigkoll_hwinfo/src/pci.rs b/crates/konfigkoll_hwinfo/src/pci.rs new file mode 100644 index 00000000..4636440f --- /dev/null +++ b/crates/konfigkoll_hwinfo/src/pci.rs @@ -0,0 +1,198 @@ +//! Utilities similar to pciutils to read PCI devices on Linux + +use ahash::AHashMap; + +mod parser; + +/// A database of PCI devices IDs +#[derive(Debug, PartialEq, Eq)] +#[cfg_attr(feature = "rune", derive(rune::Any))] +#[cfg_attr(feature = "rune", rune(item = ::sysinfo))] +pub struct PciIdDb { + pub classes: AHashMap, + pub vendors: AHashMap, +} + +impl PciIdDb { + /// Create from a string containing `pci.ids` + pub fn parse(s: &str) -> anyhow::Result { + parser::parse_pcidatabase(s) + } + + /// Create from a file containing `pci.ids` + pub fn parse_file(path: &std::path::Path) -> anyhow::Result { + let s = std::fs::read_to_string(path)?; + Self::parse(&s) + } +} + +#[derive(Debug, PartialEq, Eq)] +pub struct Class { + pub name: String, + pub subclasses: AHashMap, +} + +#[derive(Debug, PartialEq, Eq)] +pub struct Subclass { + pub name: String, + pub program_interfaces: AHashMap, +} + +#[derive(Debug, PartialEq, Eq)] +pub struct ProgrammingInterface { + pub name: String, +} + +#[derive(Debug, PartialEq, Eq)] +pub struct Vendor { + pub name: String, + pub devices: AHashMap, +} + +#[derive(Debug, PartialEq, Eq)] +pub struct Device { + pub name: String, + pub subsystems: AHashMap<(u16, u16), Subsystem>, +} + +#[derive(Debug, PartialEq, Eq)] +pub struct Subsystem { + pub name: String, +} + +/// Data about a PCI device +#[derive(Debug, Clone, PartialEq, Eq)] +#[cfg_attr(feature = "rune", derive(rune::Any))] +#[cfg_attr(feature = "rune", rune(item = ::sysinfo))] +pub struct PciDevice { + #[cfg_attr(feature = "rune", rune(get))] + pub class: u32, + #[cfg_attr(feature = "rune", rune(get))] + pub vendor: u16, + #[cfg_attr(feature = "rune", rune(get))] + pub device: u16, + #[cfg_attr(feature = "rune", rune(get))] + pub revision: u8, + #[cfg_attr(feature = "rune", rune(get))] + pub subsystem_vendor: u16, + #[cfg_attr(feature = "rune", rune(get))] + pub subsystem_device: u16, +} + +impl PciDevice { + /// Load data from /sys + fn from_directory(path: &std::path::Path) -> anyhow::Result { + let class = std::fs::read_to_string(path.join("class"))?; + let vendor = std::fs::read_to_string(path.join("vendor"))?; + let device = std::fs::read_to_string(path.join("device"))?; + let revision = std::fs::read_to_string(path.join("revision"))?; + let subsystem_vendor = std::fs::read_to_string(path.join("subsystem_vendor"))?; + let subsystem_device = std::fs::read_to_string(path.join("subsystem_device"))?; + Ok(Self { + class: u32::from_str_radix(&class, 16)?, + vendor: u16::from_str_radix(&vendor, 16)?, + device: u16::from_str_radix(&device, 16)?, + revision: u8::from_str_radix(&revision, 16)?, + subsystem_vendor: u16::from_str_radix(&subsystem_vendor, 16)?, + subsystem_device: u16::from_str_radix(&subsystem_device, 16)?, + }) + } + + /// Get the vendor, device and possibly subsystem names + pub fn vendor_names<'db>(&self, db: &'db PciIdDb) -> PciVendorLookup<&'db str> { + // Resolve vendor + let vendor = db.vendors.get(&self.vendor); + let device = vendor.and_then(|v| v.devices.get(&self.device)); + let subsystem = device.and_then(|d| { + d.subsystems + .get(&(self.subsystem_vendor, self.subsystem_device)) + }); + // The subvendor can be different than the main vendor + // See https://admin.pci-ids.ucw.cz/mods/PC/?action=help?help=pci + let subvendor = db.vendors.get(&self.subsystem_vendor); + + // Extract strings + PciVendorLookup { + vendor: vendor.map(|v| v.name.as_str()), + device: device.map(|d| d.name.as_str()), + subvendor: subvendor.map(|v| v.name.as_str()), + subdevice: subsystem.map(|s| s.name.as_str()), + } + } + + /// Get the class, subclass and program interface names + pub fn class_strings<'db>(&self, db: &'db PciIdDb) -> PciClassLookup<&'db str> { + // Split up class 0xccsspp + let class = (self.class >> 16) as u8; + let subclass = (self.class >> 8) as u8; + let program_interface = self.class as u8; + + // Resolve hierarchy + let class = db.classes.get(&class); + let subclass = class.and_then(|c| c.subclasses.get(&subclass)); + let program_interface = subclass.and_then(|s| s.program_interfaces.get(&program_interface)); + + // Extract strings + PciClassLookup { + class: class.map(|c| c.name.as_str()), + subclass: subclass.map(|s| s.name.as_str()), + program_interface: program_interface.map(|p| p.name.as_str()), + } + } +} + +/// Result from [`PciDevice::vendor_names`] +#[derive(Debug, PartialEq, Eq, Hash)] +pub struct PciVendorLookup { + pub vendor: Option, + pub device: Option, + pub subvendor: Option, + pub subdevice: Option, +} + +impl PciVendorLookup +where + S: ToOwned, +{ + pub fn to_owned(&self) -> PciVendorLookup { + PciVendorLookup { + vendor: self.vendor.as_ref().map(ToOwned::to_owned), + device: self.device.as_ref().map(ToOwned::to_owned), + subvendor: self.subvendor.as_ref().map(ToOwned::to_owned), + subdevice: self.subdevice.as_ref().map(ToOwned::to_owned), + } + } +} + +/// Result from [`PciDevice::class_strings`] +#[derive(Debug, PartialEq, Eq, Hash)] +pub struct PciClassLookup { + pub class: Option, + pub subclass: Option, + pub program_interface: Option, +} + +impl PciClassLookup +where + S: ToOwned, +{ + pub fn to_owned(&self) -> PciClassLookup { + PciClassLookup { + class: self.class.as_ref().map(ToOwned::to_owned), + subclass: self.subclass.as_ref().map(ToOwned::to_owned), + program_interface: self.program_interface.as_ref().map(ToOwned::to_owned), + } + } +} + +/// Read PCI device info from `/sys` +pub fn load_pci_devices() -> anyhow::Result> { + let path = std::path::Path::new("/sys/bus/pci/devices"); + let mut devices = vec![]; + for entry in std::fs::read_dir(path)? { + let entry = entry?; + let path = entry.path(); + devices.push(PciDevice::from_directory(&path)?); + } + Ok(devices.into_iter()) +} diff --git a/crates/konfigkoll_hwinfo/src/pci/parser.rs b/crates/konfigkoll_hwinfo/src/pci/parser.rs new file mode 100644 index 00000000..dcd8ca9f --- /dev/null +++ b/crates/konfigkoll_hwinfo/src/pci/parser.rs @@ -0,0 +1,658 @@ +//! Parser for pci.ids + +use ahash::AHashMap; +use winnow::{ + ascii::{hex_uint, newline, space1}, + combinator::{alt, opt, separated, trace}, + error::{ContextError, StrContext}, + stream::AsChar, + token::{take, take_until}, + PResult, Parser, +}; + +use super::{Class, ProgrammingInterface, Subclass}; + +#[derive(Debug, PartialEq, Eq)] +enum Line<'input> { + Class(ClassLine<'input>), + Subclass(SubclassLine<'input>), + ProgrammingInterface(ProgrammingInterfaceLine<'input>), + + Vendor(VendorLine<'input>), + Device(DeviceLine<'input>), + Subsystem(SubsystemLine<'input>), +} + +#[derive(Debug, PartialEq, Eq)] +struct VendorLine<'input> { + id: u16, + name: &'input str, +} + +#[derive(Debug, PartialEq, Eq)] +struct DeviceLine<'input> { + id: u16, + name: &'input str, +} + +#[derive(Debug, PartialEq, Eq)] +struct SubsystemLine<'input> { + subvendor: u16, + subdevice: u16, + name: &'input str, +} + +#[derive(Debug, PartialEq, Eq)] +struct ClassLine<'input> { + id: u8, + name: &'input str, +} + +#[derive(Debug, PartialEq, Eq)] +struct SubclassLine<'input> { + id: u8, + name: &'input str, +} + +#[derive(Debug, PartialEq, Eq)] +struct ProgrammingInterfaceLine<'input> { + id: u8, + name: &'input str, +} + +/// Sub-error type for the first splitting layer +#[derive(Debug, PartialEq)] +pub struct ParsePciError { + message: String, + pos: usize, + input: String, +} + +impl ParsePciError { + fn from_parse<'input>( + error: &winnow::error::ParseError<&'input str, ContextError>, + input: &'input str, + ) -> Self { + let message = error.inner().to_string(); + let input = input.to_owned(); + Self { + message, + pos: error.offset(), + input, + } + } +} + +impl std::fmt::Display for ParsePciError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let pos = self.pos; + let input = &self.input; + let message = &self.message; + write!( + f, + "Error at position {}: {}\n{}\n{}^", + pos, + message, + &input[..pos], + " ".repeat(pos) + ) + } +} + +impl std::error::Error for ParsePciError {} + +pub(super) fn parse_pcidatabase(input: &str) -> anyhow::Result { + let lines = parse_file + .parse(input) + .map_err(|error| ParsePciError::from_parse(&error, input))?; + build_hierarchy(&lines) +} + +/// This function takes the line-by-line parsed data and builds a hierarchical +/// structure from it. +/// +/// We either need to keep a cursor into the structure we are building (ouch in +/// Rust), or we need a lookahead of 1 line to determine when to go up a level. +/// We do the latter, using [`itertools::put_back`]. +fn build_hierarchy(lines: &[Line<'_>]) -> anyhow::Result { + let mut db = super::PciIdDb { + classes: Default::default(), + vendors: Default::default(), + }; + + let mut lines = itertools::put_back(lines.iter()); + + while let Some(line) = lines.next() { + match line { + Line::Class(class) => { + let mut subclasses = AHashMap::new(); + while let Some(line) = lines.next() { + match line { + Line::Subclass(subclass) => { + let mut prog_ifs = AHashMap::new(); + while let Some(line) = lines.next() { + match line { + Line::ProgrammingInterface(prog_if) => { + prog_ifs.insert( + prog_if.id, + ProgrammingInterface { + name: prog_if.name.to_string(), + }, + ); + } + _ => { + lines.put_back(line); + break; + } + } + } + subclasses.insert( + subclass.id, + Subclass { + name: subclass.name.to_string(), + program_interfaces: prog_ifs, + }, + ); + } + _ => { + lines.put_back(line); + break; + } + } + } + db.classes.insert( + class.id, + Class { + name: class.name.to_string(), + subclasses, + }, + ); + } + Line::Vendor(vendor) => { + let mut devices = AHashMap::new(); + while let Some(line) = lines.next() { + match line { + Line::Device(device) => { + let mut subsystems = AHashMap::new(); + while let Some(line) = lines.next() { + match line { + Line::Subsystem(subsystem) => { + subsystems.insert( + (subsystem.subvendor, subsystem.subdevice), + super::Subsystem { + name: subsystem.name.to_string(), + }, + ); + } + _ => { + lines.put_back(line); + break; + } + } + } + devices.insert( + device.id, + super::Device { + name: device.name.to_string(), + subsystems, + }, + ); + } + _ => { + lines.put_back(line); + break; + } + } + } + db.vendors.insert( + vendor.id, + super::Vendor { + name: vendor.name.to_string(), + devices, + }, + ); + } + Line::Subclass(_) + | Line::ProgrammingInterface(_) + | Line::Device(_) + | Line::Subsystem(_) => anyhow::bail!("Unexpected line at top level: {line:?}"), + } + } + + Ok(db) +} + +fn parse_file<'input>(i: &mut &'input str) -> PResult>> { + let alternatives = ( + comment.map(|_| None).context(StrContext::Label("comment")), + // Vendor hierarchy + vendor + .map(|v| Some(Line::Vendor(v))) + .context(StrContext::Label("vendor")), + device + .map(|d| Some(Line::Device(d))) + .context(StrContext::Label("device")), + subsystem + .map(|s| Some(Line::Subsystem(s))) + .context(StrContext::Label("subsystem")), + // Class hierarchy + class + .map(|c| Some(Line::Class(c))) + .context(StrContext::Label("class")), + sub_class + .map(|c| Some(Line::Subclass(c))) + .context(StrContext::Label("subclass")), + prog_if + .map(|c| Some(Line::ProgrammingInterface(c))) + .context(StrContext::Label("prog_if")), + "".map(|_| None).context(StrContext::Label("whitespace")), // Blank lines, must be last + ); + (separated(0.., alt(alternatives), newline), opt(newline)) + .map(|(val, _): (Vec<_>, _)| { + // Filter + val.into_iter().flatten().collect() + }) + .parse_next(i) +} + +/// A comment +fn comment(i: &mut &str) -> PResult<()> { + ('#', take_until(0.., '\n')).void().parse_next(i) +} + +fn device<'input>(i: &mut &'input str) -> PResult> { + let parser = ('\t', hex4, space1, string).map(|(_, id, _, name)| DeviceLine { id, name }); + trace("device", parser).parse_next(i) +} + +fn vendor<'input>(i: &mut &'input str) -> PResult> { + let parser = (hex4, space1, string).map(|(id, _, name)| VendorLine { id, name }); + trace("vendor", parser).parse_next(i) +} + +fn subsystem<'input>(i: &mut &'input str) -> PResult> { + let parser = ("\t\t", hex4, space1, hex4, space1, string).map( + |(_, subvendor, _, subdevice, _, name)| SubsystemLine { + subvendor, + subdevice, + name, + }, + ); + trace("subsystem", parser).parse_next(i) +} + +fn prog_if<'input>(i: &mut &'input str) -> PResult> { + let parser = ("\t\t", hex2, space1, string) + .map(|(_, id, _, name)| ProgrammingInterfaceLine { id, name }); + trace("prog_if", parser).parse_next(i) +} + +fn sub_class<'input>(i: &mut &'input str) -> PResult> { + let parser = ('\t', hex2, space1, string).map(|(_, id, _, name)| SubclassLine { id, name }); + trace("sub_class", parser).parse_next(i) +} + +fn class<'input>(i: &mut &'input str) -> PResult> { + let parser = + ('C', space1, hex2, space1, string).map(|(_, _, id, _, name)| ClassLine { id, name }); + trace("class", parser).parse_next(i) +} + +/// A string until the end of the line +fn string<'input>(i: &mut &'input str) -> PResult<&'input str> { + let parser = take_until(0.., '\n'); + + trace("string", parser).parse_next(i) +} + +pub fn hex2(i: &mut &str) -> PResult { + trace("hex2", take(2usize).verify(is_hex)) + .and_then(hex_uint::<_, u8, _>) + .parse_next(i) +} + +pub fn hex4(i: &mut &str) -> PResult { + trace("hex4", take(4usize).verify(is_hex)) + .and_then(hex_uint::<_, u16, _>) + .parse_next(i) +} + +fn is_hex(s: &str) -> bool { + for c in s.bytes() { + if !AsChar::is_hex_digit(c) { + return false; + } + } + true +} + +#[cfg(test)] +mod tests { + + use crate::pci::{Device, PciIdDb, Subsystem, Vendor}; + + use super::*; + use indoc::indoc; + use pretty_assertions::assert_eq; + use winnow::combinator::terminated; + + #[test] + fn test_build_hierarchy() { + let test_data = vec![ + Line::Vendor(VendorLine { + id: 0x0001, + name: "Some ID", + }), + Line::Vendor(VendorLine { + id: 0x0010, + name: "Some other ID", + }), + Line::Device(DeviceLine { + id: 0x8139, + name: "A device", + }), + Line::Vendor(VendorLine { + id: 0x0014, + name: "Another ID", + }), + Line::Device(DeviceLine { + id: 0x0001, + name: "ID ID ID", + }), + Line::Subsystem(SubsystemLine { + subvendor: 0x001c, + subdevice: 0x0004, + name: "Sub device", + }), + // Classes + Line::Class(ClassLine { + id: 0x00, + name: "CA", + }), + Line::Subclass(SubclassLine { + id: 0x00, + name: "CA 0", + }), + Line::Subclass(SubclassLine { + id: 0x01, + name: "CA 1", + }), + Line::Subclass(SubclassLine { + id: 0x05, + name: "CA 5", + }), + Line::Class(ClassLine { + id: 0x06, + name: "CB", + }), + Line::Subclass(SubclassLine { + id: 0x00, + name: "CB 0", + }), + Line::Subclass(SubclassLine { + id: 0x01, + name: "CB 1", + }), + Line::ProgrammingInterface(ProgrammingInterfaceLine { + id: 0x00, + name: "CB 1 0", + }), + Line::ProgrammingInterface(ProgrammingInterfaceLine { + id: 0x05, + name: "CB 1 5", + }), + Line::Subclass(SubclassLine { + id: 0x02, + name: "CC", + }), + ]; + + let db = build_hierarchy(&test_data).unwrap(); + + assert_eq!( + db, + PciIdDb { + classes: AHashMap::from([ + ( + 0, + Class { + name: "CA".into(), + subclasses: AHashMap::from([ + ( + 0, + Subclass { + name: "CA 0".into(), + program_interfaces: AHashMap::from([]) + } + ), + ( + 1, + Subclass { + name: "CA 1".into(), + program_interfaces: AHashMap::from([]) + } + ), + ( + 5, + Subclass { + name: "CA 5".into(), + program_interfaces: AHashMap::from([]) + } + ), + ]) + } + ), + ( + 6, + Class { + name: "CB".into(), + subclasses: AHashMap::from([ + ( + 0, + Subclass { + name: "CB 0".into(), + program_interfaces: AHashMap::from([]) + } + ), + ( + 1, + Subclass { + name: "CB 1".into(), + program_interfaces: AHashMap::from([ + ( + 0, + ProgrammingInterface { + name: "CB 1 0".into() + } + ), + ( + 5, + ProgrammingInterface { + name: "CB 1 5".into() + } + ), + ]) + } + ), + ( + 2, + Subclass { + name: "CC".into(), + program_interfaces: AHashMap::from([]) + } + ), + ]) + } + ), + ]), + vendors: AHashMap::from([ + ( + 0x0001, + Vendor { + name: "Some ID".into(), + devices: AHashMap::from([]) + } + ), + ( + 0x0010, + Vendor { + name: "Some other ID".into(), + devices: AHashMap::from([( + 0x8139, + Device { + name: "A device".into(), + subsystems: AHashMap::from([]) + } + )]) + } + ), + ( + 0x0014, + Vendor { + name: "Another ID".into(), + devices: AHashMap::from([( + 0x0001, + Device { + name: "ID ID ID".into(), + subsystems: AHashMap::from([( + (0x001c, 0x0004), + Subsystem { + name: "Sub device".into() + } + )]) + } + )]) + } + ), + ]) + } + ); + } + + const TEST_DATA: &str = indoc! { +"0001 Some ID +0010 Some other ID +# A Comment +\t8139 A device +0014 Another ID +\t0001 ID ID ID +\t\t001c 0004 Sub device + +# A comment + +C 00 CA +\t00 CA 0 +\t01 CA 1 +\t05 CA 5 +C 01 CB +\t00 CB 0 +\t01 CB 1 +\t\t00 CB 1 0 +\t\t05 CB 1 5 +\t02 CC\n"}; + + #[test] + fn test_parse_file() { + let parsed = parse_file.parse(TEST_DATA).unwrap(); + + assert_eq!( + parsed, + vec![ + Line::Vendor(VendorLine { + id: 0x0001, + name: "Some ID" + }), + Line::Vendor(VendorLine { + id: 0x0010, + name: "Some other ID" + }), + Line::Device(DeviceLine { + id: 0x8139, + name: "A device" + }), + Line::Vendor(VendorLine { + id: 0x0014, + name: "Another ID" + }), + Line::Device(DeviceLine { + id: 0x0001, + name: "ID ID ID" + }), + Line::Subsystem(SubsystemLine { + subvendor: 0x001c, + subdevice: 0x0004, + name: "Sub device" + }), + Line::Class(ClassLine { + id: 0x00, + name: "CA" + }), + Line::Subclass(SubclassLine { + id: 0x00, + name: "CA 0" + }), + Line::Subclass(SubclassLine { + id: 0x01, + name: "CA 1" + }), + Line::Subclass(SubclassLine { + id: 0x05, + name: "CA 5" + }), + Line::Class(ClassLine { + id: 0x01, + name: "CB" + }), + Line::Subclass(SubclassLine { + id: 0x00, + name: "CB 0" + }), + Line::Subclass(SubclassLine { + id: 0x01, + name: "CB 1" + }), + Line::ProgrammingInterface(ProgrammingInterfaceLine { + id: 0x00, + name: "CB 1 0" + }), + Line::ProgrammingInterface(ProgrammingInterfaceLine { + id: 0x05, + name: "CB 1 5" + }), + Line::Subclass(SubclassLine { + id: 0x02, + name: "CC" + }), + ] + ); + } + + #[test] + fn test_class() { + let parsed = terminated(class, newline) + .parse("C 00 Something\n") + .unwrap(); + + assert_eq!( + parsed, + ClassLine { + id: 0, + name: "Something" + } + ); + } + + #[test] + fn test_sub_class() { + let parsed = terminated(sub_class, newline) + .parse("\t0f Some string\n") + .unwrap(); + assert_eq!( + parsed, + SubclassLine { + id: 0x0f, + name: "Some string" + } + ); + } +} diff --git a/crates/konfigkoll_script/Cargo.toml b/crates/konfigkoll_script/Cargo.toml new file mode 100644 index 00000000..94b84316 --- /dev/null +++ b/crates/konfigkoll_script/Cargo.toml @@ -0,0 +1,56 @@ +[package] +description = "Scripting language for Konfigkoll (not for direct public use)" +edition = "2021" +license = "MPL-2.0" +name = "konfigkoll_script" +repository = "https://github.com/VorpalBlade/paketkoll" +rust-version = "1.79.0" +version = "0.1.0" + +[features] +# Default features +default = ["arch_linux", "debian"] + +# Include the Arch Linux backend +arch_linux = ["paketkoll_core/arch_linux"] + +# Include support for the Debian backend +debian = ["paketkoll_core/debian"] + +[dependencies] +ahash.workspace = true +anyhow.workspace = true +camino.workspace = true +compact_str.workspace = true +glob.workspace = true +itertools.workspace = true +konfigkoll_core = { version = "0.1.0", path = "../konfigkoll_core" } +konfigkoll_hwinfo = { version = "0.1.0", path = "../konfigkoll_hwinfo", features = [ + "rune", +] } +konfigkoll_types = { version = "0.1.0", path = "../konfigkoll_types" } +nix = { workspace = true, features = ["user"] } +paketkoll_core = { version = "0.4.1", path = "../paketkoll_core" } +paketkoll_types = { version = "0.1.0", path = "../paketkoll_types", features = [ + "serde", +] } +paketkoll_utils = { version = "0.1.0", path = "../paketkoll_utils" } +parking_lot.workspace = true +regex.workspace = true +rune.workspace = true +rune-modules = { workspace = true, features = ["process", "json", "toml", "tokio"] } +rust-ini.workspace = true +smallvec.workspace = true +sysinfo.workspace = true +tempfile = { workspace = true } +thiserror.workspace = true +tokio = { workspace = true, features = ["process"] } +tracing.workspace = true +winnow = { workspace = true, features = ["simd"] } + +[lints] +workspace = true + +[dev-dependencies] +indoc.workspace = true +pretty_assertions.workspace = true diff --git a/crates/konfigkoll_script/README.md b/crates/konfigkoll_script/README.md new file mode 100644 index 00000000..b9c00507 --- /dev/null +++ b/crates/konfigkoll_script/README.md @@ -0,0 +1,13 @@ +# konfigkoll_script + +Scripting language interface for konfigkoll. + +This provides the glue between Rust and Rune, in particular the custom Rune +modules that konfigkoll provides. + +This is an internal crate with no stability guarantees whatsoever on the +Rust side. The Rune API is also currently heavily unstable but is expected +to be stabilized in the future. + +You should use [`konfigkoll`](https://crates.io/crates/konfigkoll) the command +line tool instead. diff --git a/crates/konfigkoll_script/src/engine.rs b/crates/konfigkoll_script/src/engine.rs new file mode 100644 index 00000000..1419799b --- /dev/null +++ b/crates/konfigkoll_script/src/engine.rs @@ -0,0 +1,305 @@ +use std::{ + collections::BTreeMap, + fmt::Display, + io::Write, + panic::{catch_unwind, AssertUnwindSafe}, + sync::{Arc, OnceLock}, +}; + +use crate::plugins::{ + command::Commands, package_managers::PackageManagers, properties::Properties, + settings::Settings, +}; +use anyhow::Context; +use camino::{Utf8Path, Utf8PathBuf}; +use paketkoll_types::{ + backend::{Backend, Files, PackageBackendMap, PackageMap}, + intern::Interner, +}; +use rune::{ + termcolor::{ColorChoice, StandardStream}, + Diagnostics, Source, Vm, +}; + +/// Describe the phases of script evaluation. +/// +/// Each phase is a separate function defined by the top level script. +#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +pub enum Phase { + /// During this phase, the script can discover information about the system + /// and hardware, and set properties for later use. + #[default] + SystemDiscovery, + /// During this phase file system ignores should be set up. These are + /// needed by the file system scan code that will be started concurrently + /// after this. + Ignores, + /// Early package dependencies that are needed by the main phase should be + /// declared here. These packages will be installed before the main config + /// runs if they are missing. + ScriptDependencies, + /// During the main phase the config proper is generated. + Main, +} + +impl Phase { + /// Convert to string + pub fn as_str(&self) -> &'static str { + match self { + Self::SystemDiscovery => "phase_system_discovery", + Self::Ignores => "phase_ignores", + Self::ScriptDependencies => "phase_script_dependencies", + Self::Main => "phase_main", + } + } +} + +impl Display for Phase { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.as_str()) + } +} + +/// State being built up by the scripts as it runs +#[derive(Debug)] +pub struct EngineState { + /// Properties set by the user + pub(crate) properties: Properties, + /// Commands to be applied to the system + pub(crate) commands: Commands, + /// Settings of how konfigkoll should behave. + pub(crate) settings: Arc, + /// All the enabled package managers + pub(crate) package_managers: Option, +} + +/// Path to the configuration directory +pub(crate) static CFG_PATH: OnceLock = OnceLock::new(); + +impl EngineState { + pub fn new(files_path: Utf8PathBuf) -> Self { + let settings = Arc::new(Settings::default()); + Self { + properties: Default::default(), + commands: Commands::new(files_path, settings.clone()), + settings, + package_managers: None, + } + } + + pub fn setup_package_managers( + &mut self, + package_backends: &PackageBackendMap, + file_backend_id: Backend, + files_backend: &Arc, + package_maps: &BTreeMap>, + interner: &Arc, + ) { + self.package_managers = Some(PackageManagers::create_from( + package_backends, + file_backend_id, + files_backend, + package_maps, + interner, + )); + } + + pub fn settings(&self) -> Arc { + Arc::clone(&self.settings) + } + + pub fn commands(&self) -> &Commands { + &self.commands + } + + pub fn commands_mut(&mut self) -> &mut Commands { + &mut self.commands + } +} + +/// The script engine that is the main entry point for this crate. +#[derive(Debug)] +pub struct ScriptEngine { + runtime: Arc, + sources: rune::Sources, + /// User scripts + unit: Arc, + /// Properties exposed by us or set by the user + pub(crate) state: EngineState, +} + +impl ScriptEngine { + pub fn create_context() -> Result { + let mut context = rune::Context::with_default_modules()?; + + // Register modules + crate::plugins::register_modules(&mut context)?; + context.install(rune_modules::json::module(true)?)?; + context.install(rune_modules::toml::module(true)?)?; + context.install(rune_modules::toml::de::module(true)?)?; + context.install(rune_modules::toml::ser::module(true)?)?; + + Ok(context) + } + + pub fn new_with_files(config_path: &Utf8Path) -> anyhow::Result { + CFG_PATH.set(config_path.to_owned()).map_err(|v| { + anyhow::anyhow!( + "Failed to set CFG_PATH to {v}, this should not be called more than once" + ) + })?; + let context = Self::create_context()?; + + // Create state + let state = EngineState::new(config_path.join("files")); + + // Load scripts + let mut diagnostics = Diagnostics::new(); + + let mut sources = rune::Sources::new(); + sources + .insert( + Source::from_path(config_path.join("main.rn")).context("Failed to load main.rn")?, + ) + .context("Failed to insert source file")?; + + let result = rune::prepare(&mut sources) + .with_context(&context) + .with_diagnostics(&mut diagnostics) + .build(); + + if !diagnostics.is_empty() { + let mut writer = StandardStream::stderr(ColorChoice::Always); + diagnostics.emit(&mut writer, &sources)?; + } + + // Create ScriptEngine + Ok(Self { + runtime: Arc::new(context.runtime()?), + sources, + state, + unit: Arc::new(result?), + }) + } + + /// Call a function in the script + #[tracing::instrument(level = "info", name = "script", skip(self))] + pub async fn run_phase(&mut self, phase: Phase) -> anyhow::Result<()> { + // Update phase in relevant state + self.state.commands.phase = phase; + // Create VM and do call + let mut vm = Vm::new(self.runtime.clone(), self.unit.clone()); + tracing::info!("Calling script"); + let output = match phase { + Phase::SystemDiscovery => { + vm.async_call( + [phase.as_str()], + (&mut self.state.properties, self.state.settings.as_ref()), + ) + .await + } + Phase::Ignores | Phase::ScriptDependencies => { + vm.async_call( + [phase.as_str()], + (&mut self.state.properties, &mut self.state.commands), + ) + .await + } + Phase::Main => { + vm.async_call( + [phase.as_str()], + ( + &mut self.state.properties, + &mut self.state.commands, + self.state + .package_managers + .as_ref() + .expect("Package managers must be set"), + ), + ) + .await + } + }; + // Handle rune runtime errors + let output = match output { + Ok(output) => output, + Err(e) => { + let err_str = format!("Rune error while executing {phase}: {}", &e); + tracing::error!("{}", err_str); + let mut writer = StandardStream::stderr(ColorChoice::Always); + writer.write_all(b"\n------\n\n")?; + e.emit(&mut writer, &self.sources)?; + writer.write_all(b"\n------\n\n")?; + + return Err(e).context(err_str); + } + }; + tracing::info!("Returned from script"); + // Do error handling on the returned result + match output { + rune::Value::Result(result) => match result.borrow_ref()?.as_ref() { + Ok(_) => (), + Err(e) => vm.with(|| try_format_error(phase, e))?, + }, + _ => anyhow::bail!("Got non-result from {phase}: {output:?}"), + } + Ok(()) + } + + #[inline] + pub fn state(&self) -> &EngineState { + &self.state + } + + #[inline] + pub fn state_mut(&mut self) -> &mut EngineState { + &mut self.state + } +} + +/// Attempt to format the error in the best way possible. +/// +/// Unfortunately this is awkward with dynamic Rune values. +fn try_format_error(phase: Phase, value: &rune::Value) -> anyhow::Result<()> { + match value.clone().into_any() { + rune::runtime::VmResult::Ok(any) => { + if let Ok(err) = any.downcast_borrow_ref::() { + anyhow::bail!("Got error result from {phase}: {:?}", *err); + } + if let Ok(err) = any.downcast_borrow_ref::() { + anyhow::bail!("Got IO error result from {phase}: {:?}", *err); + } + let ty = try_get_type_info(value, "error"); + let formatted = catch_unwind(AssertUnwindSafe(|| format!("{value:?}"))); + anyhow::bail!( + "Got error result from {phase}, but it is a unknown error type: {ty}: {any:?}, formats as: {formatted:?}", + ); + } + rune::runtime::VmResult::Err(not_any) => { + tracing::error!("Got error result from {phase}, it was not an Any: {not_any:?}. Trying other approches at printing the error."); + } + } + // Attempt to format the error + let formatted = catch_unwind(AssertUnwindSafe(|| { + format!("Got error result from {phase}: {value:?}") + })); + match formatted { + Ok(str) => anyhow::bail!(str), + Err(_) => { + let ty = try_get_type_info(value, "error"); + anyhow::bail!( + "Got error result from {phase}, but got a panic while attempting to format said error for printing, {ty}", + ); + } + } +} + +/// Best effort attempt at gettint the type info and printing it +fn try_get_type_info(e: &rune::Value, what: &str) -> String { + match e.type_info() { + rune::runtime::VmResult::Ok(ty) => format!("type info for {what}: {ty:?}"), + rune::runtime::VmResult::Err(err) => { + format!("failed getting type info for {what}: {err:?}") + } + } +} diff --git a/crates/konfigkoll_script/src/lib.rs b/crates/konfigkoll_script/src/lib.rs new file mode 100644 index 00000000..ebf8aca5 --- /dev/null +++ b/crates/konfigkoll_script/src/lib.rs @@ -0,0 +1,20 @@ +//! Scripting language interface for konfigkoll. +//! +//! This provides the glue between Rust and Rune, in particular the custom +//! Rune modules that konfigkoll provides. +//! +//! This is an internal crate with no stability guarantees whatsoever on the +//! Rust side. The Rune API is also currently heavily unstable but is expected +//! to be stabilized in the future. +//! +//! You should use [`konfigkoll`](https://crates.io/crates/konfigkoll) the +//! command line tool instead. + +mod engine; +mod plugins; + +pub use engine::EngineState; +pub use engine::Phase; +pub use engine::ScriptEngine; +pub use plugins::command::Commands; +pub use plugins::settings::Settings; diff --git a/crates/konfigkoll_script/src/plugins.rs b/crates/konfigkoll_script/src/plugins.rs new file mode 100644 index 00000000..ef415ed6 --- /dev/null +++ b/crates/konfigkoll_script/src/plugins.rs @@ -0,0 +1,29 @@ +//! RHAI plugins for Konfigkoll + +pub(crate) mod command; +mod filesystem; +pub mod package_managers; +mod passwd; +mod patch; +mod process; +pub(crate) mod properties; +pub(crate) mod regex; +pub(crate) mod settings; +mod sysinfo; +mod systemd; + +pub(crate) fn register_modules(context: &mut rune::Context) -> Result<(), rune::ContextError> { + context.install(command::module()?)?; + context.install(filesystem::module()?)?; + context.install(package_managers::module()?)?; + context.install(passwd::module()?)?; + context.install(patch::module()?)?; + context.install(process::module(true)?)?; + context.install(properties::module()?)?; + context.install(regex::module()?)?; + context.install(settings::module()?)?; + context.install(sysinfo::module()?)?; + context.install(systemd::module()?)?; + + Ok(()) +} diff --git a/crates/konfigkoll_script/src/plugins/command.rs b/crates/konfigkoll_script/src/plugins/command.rs new file mode 100644 index 00000000..f6d1ae8a --- /dev/null +++ b/crates/konfigkoll_script/src/plugins/command.rs @@ -0,0 +1,366 @@ +//! Commands to change the configuration +//! +//! These are the important ones, the ones that describe how the system should be changed. + +use std::{str::FromStr, sync::Arc}; + +use ahash::AHashSet; +use anyhow::Context; +use camino::Utf8PathBuf; +use compact_str::CompactString; +use konfigkoll_core::utils::safe_path_join; +use konfigkoll_types::{ + FileContents, FsInstruction, FsOp, FsOpDiscriminants, PkgIdent, PkgInstruction, + PkgInstructions, PkgOp, +}; +use paketkoll_types::{backend::Backend, files::Mode}; +use rune::{ContextError, Module, Value}; + +use crate::Phase; + +use super::settings::Settings; + +#[derive(Debug, Clone, rune::Any)] +#[rune(item = ::command)] +/// The changes to apply to the system. +/// +/// This is what will be compared to the installed system +pub struct Commands { + /// The current phase + pub(crate) phase: Phase, + /// Base path to files directory + pub(crate) base_files_path: Utf8PathBuf, + /// Set of file system ignores + pub fs_ignores: AHashSet, + /// Queue of file system instructions + pub fs_actions: Vec, + /// Queue of package instructions + pub package_actions: PkgInstructions, + /// Settings + settings: Arc, +} + +/// Rust API +impl Commands { + pub(crate) fn new(base_files_path: Utf8PathBuf, settings: Arc) -> Self { + Self { + phase: Phase::SystemDiscovery, + base_files_path, + fs_ignores: AHashSet::new(), + fs_actions: Vec::new(), + package_actions: PkgInstructions::new(), + settings, + } + } + + /// Get the contents of an set file + pub(crate) fn file_contents(&self, path: &str) -> Option<&FileContents> { + self.fs_actions + .iter() + .rfind(|i| { + i.path == path && FsOpDiscriminants::from(&i.op) == FsOpDiscriminants::CreateFile + }) + .map(|i| match &i.op { + FsOp::CreateFile(contents) => contents, + _ => unreachable!(), + }) + } +} + +/// Rune API +impl Commands { + /// Ignore a path, preventing it from being scanned for differences + #[rune::function(keep)] + pub fn ignore_path(&mut self, ignore: &str) -> anyhow::Result<()> { + if self.phase != Phase::Ignores { + return Err(anyhow::anyhow!( + "Can only ignore paths during the 'ignores' phase" + )); + } + if !self.fs_ignores.insert(ignore.into()) { + tracing::warn!("Ignoring path '{}' multiple times", ignore); + } + Ok(()) + } + + /// Install a package with the given package manager. + /// + /// If the package manager isn't enabled, this will be a no-op. + #[rune::function(keep)] + pub fn add_pkg(&mut self, package_manager: &str, identifier: &str) -> anyhow::Result<()> { + if self.phase < Phase::ScriptDependencies { + return Err(anyhow::anyhow!( + "Can only add packages during the 'script_dependencies' or 'main' phases" + )); + } + let backend = Backend::from_str(package_manager).context("Invalid backend")?; + if !self.settings.is_pkg_backend_enabled(backend) { + tracing::info!("Skipping disabled package manager {}", package_manager); + return Ok(()); + } + if self + .package_actions + .insert( + PkgIdent { + package_manager: backend, + identifier: identifier.into(), + }, + PkgInstruction { + op: PkgOp::Install, + comment: None, + }, + ) + .is_some() + { + tracing::warn!("Multiple actions for package '{package_manager}:{identifier}'",); + } + Ok(()) + } + + /// Remove a package with the given package manager. + /// + /// If the package manager isn't enabled, this will be a no-op. + #[rune::function(keep)] + pub fn remove_pkg(&mut self, package_manager: &str, identifier: &str) -> anyhow::Result<()> { + if self.phase < Phase::ScriptDependencies { + return Err(anyhow::anyhow!( + "Can only add packages during the 'script_dependencies' or 'main' phases" + )); + } + let backend = Backend::from_str(package_manager).context("Invalid backend")?; + if !self.settings.is_file_backend_enabled(backend) { + tracing::debug!("Skipping disabled package manager {}", package_manager); + return Ok(()); + } + if self + .package_actions + .insert( + PkgIdent { + package_manager: backend, + identifier: identifier.into(), + }, + PkgInstruction { + op: PkgOp::Uninstall, + comment: None, + }, + ) + .is_some() + { + tracing::warn!("Multiple actions for package '{package_manager}:{identifier}'",); + } + Ok(()) + } + + /// Remove a path + #[rune::function(keep)] + pub fn rm(&mut self, path: &str) -> anyhow::Result<()> { + if self.phase != Phase::Main { + return Err(anyhow::anyhow!( + "File system actions are only possible in the 'main' phase" + )); + } + self.fs_actions.push(FsInstruction { + op: konfigkoll_types::FsOp::Remove, + path: path.into(), + comment: None, + }); + Ok(()) + } + + /// Check if a file exists in the `files/` sub-directory to the configuration + #[rune::function(keep)] + pub fn has_source_file(&self, path: &str) -> bool { + let path = safe_path_join(&self.base_files_path, path.into()); + path.exists() + } + + /// Create a file with the given contents + #[rune::function(keep)] + pub fn copy(&mut self, path: &str) -> anyhow::Result<()> { + self.copy_from(path, path) + } + + /// Create a file with the given contents (renaming the file in the process) + /// + /// The rename is useful to copy a file to a different location (e.g. `etc/fstab.hostname` to `etc/fstab`) + #[rune::function(keep)] + pub fn copy_from(&mut self, path: &str, src: &str) -> anyhow::Result<()> { + if self.phase != Phase::Main { + return Err(anyhow::anyhow!( + "File system actions are only possible in the 'main' phase" + )); + } + let contents = FileContents::from_file(&safe_path_join(&self.base_files_path, src.into())); + let contents = match contents { + Ok(v) => v, + Err(e) => { + tracing::error!("Failed to read file contents for '{}': {}", path, e); + return Err(anyhow::anyhow!( + "Failed to read file contents for '{}': {}", + path, + e + )); + } + }; + self.fs_actions.push(FsInstruction { + op: konfigkoll_types::FsOp::CreateFile(contents), + path: path.into(), + comment: None, + }); + Ok(()) + } + + /// Create a symlink + #[rune::function(keep)] + pub fn ln(&mut self, path: &str, target: &str) -> anyhow::Result<()> { + if self.phase != Phase::Main { + return Err(anyhow::anyhow!( + "File system actions are only possible in the 'main' phase" + )); + } + self.fs_actions.push(FsInstruction { + op: konfigkoll_types::FsOp::CreateSymlink { + target: target.into(), + }, + path: path.into(), + comment: None, + }); + Ok(()) + } + + /// Create a file with the given contents + #[rune::function(keep)] + pub fn write(&mut self, path: &str, contents: &[u8]) -> anyhow::Result<()> { + if self.phase != Phase::Main { + return Err(anyhow::anyhow!( + "File system actions are only possible in the 'main' phase" + )); + } + self.fs_actions.push(FsInstruction { + op: konfigkoll_types::FsOp::CreateFile(FileContents::from_literal(contents.into())), + path: path.into(), + comment: None, + }); + Ok(()) + } + + /// Create a directory + #[rune::function(keep)] + pub fn mkdir(&mut self, path: &str) -> anyhow::Result<()> { + if self.phase != Phase::Main { + return Err(anyhow::anyhow!( + "File system actions are only possible in the 'main' phase" + )); + } + self.fs_actions.push(FsInstruction { + op: konfigkoll_types::FsOp::CreateDirectory, + path: path.into(), + comment: None, + }); + Ok(()) + } + + /// Change file owner + #[rune::function(keep)] + pub fn chown(&mut self, path: &str, owner: &str) -> anyhow::Result<()> { + if self.phase != Phase::Main { + return Err(anyhow::anyhow!( + "File system actions are only possible in the 'main' phase" + )); + } + + self.fs_actions.push(FsInstruction { + op: konfigkoll_types::FsOp::SetOwner { + owner: owner.into(), + }, + path: path.into(), + comment: None, + }); + Ok(()) + } + + /// Change file group + #[rune::function(keep)] + pub fn chgrp(&mut self, path: &str, group: &str) -> anyhow::Result<()> { + if self.phase != Phase::Main { + return Err(anyhow::anyhow!( + "File system actions are only possible in the 'main' phase" + )); + } + + self.fs_actions.push(FsInstruction { + op: konfigkoll_types::FsOp::SetGroup { + group: group.into(), + }, + path: path.into(), + comment: None, + }); + Ok(()) + } + + /// Change file mode + #[rune::function(keep)] + pub fn chmod(&mut self, path: &str, mode: Value) -> anyhow::Result<()> { + if self.phase != Phase::Main { + return Err(anyhow::anyhow!( + "File system actions are only possible in the 'main' phase" + )); + } + + let numeric_mode = match mode { + Value::Integer(m) => Mode::new(m as u32), + Value::String(str) => { + let guard = str.borrow_ref()?; + // Convert text mode (u+rx,g+rw,o+r, etc) to numeric mode + Mode::parse(&guard)? + } + _ => return Err(anyhow::anyhow!("Invalid mode value")), + }; + + self.fs_actions.push(FsInstruction { + op: konfigkoll_types::FsOp::SetMode { mode: numeric_mode }, + path: path.into(), + comment: None, + }); + Ok(()) + } + + /// Set all permissions at once + #[rune::function(keep)] + pub fn perms( + &mut self, + path: &str, + owner: &str, + group: &str, + mode: Value, + ) -> anyhow::Result<()> { + self.chown(path, owner)?; + self.chgrp(path, group)?; + self.chmod(path, mode)?; + Ok(()) + } +} + +#[rune::module(::command)] +/// Commands describe the changes to apply to the system +pub(crate) fn module() -> Result { + let mut m = Module::from_meta(self::module_meta)?; + m.ty::()?; + m.function_meta(Commands::ignore_path__meta)?; + m.function_meta(Commands::add_pkg__meta)?; + m.function_meta(Commands::remove_pkg__meta)?; + m.function_meta(Commands::rm__meta)?; + m.function_meta(Commands::has_source_file__meta)?; + m.function_meta(Commands::copy__meta)?; + m.function_meta(Commands::copy_from__meta)?; + m.function_meta(Commands::ln__meta)?; + m.function_meta(Commands::write__meta)?; + m.function_meta(Commands::mkdir__meta)?; + + m.function_meta(Commands::chown__meta)?; + m.function_meta(Commands::chgrp__meta)?; + m.function_meta(Commands::chmod__meta)?; + m.function_meta(Commands::perms__meta)?; + + Ok(m) +} diff --git a/crates/konfigkoll_script/src/plugins/filesystem.rs b/crates/konfigkoll_script/src/plugins/filesystem.rs new file mode 100644 index 00000000..df2316e6 --- /dev/null +++ b/crates/konfigkoll_script/src/plugins/filesystem.rs @@ -0,0 +1,258 @@ +//! Host file system access + +use std::io::{ErrorKind, Read}; + +use anyhow::Context; +use camino::Utf8PathBuf; +use konfigkoll_core::utils::safe_path_join; +use rune::alloc::fmt::TryWrite; +use rune::{ + runtime::{Bytes, Formatter}, + vm_write, Any, ContextError, Module, +}; + +use crate::engine::CFG_PATH; + +/// A file error +#[derive(Debug, Any, thiserror::Error)] +#[rune(item = ::filesystem)] +enum FileError { + #[error("IO Error: {0}")] + IoError(#[from] std::io::Error), + #[error("Allocation error: {0}")] + AllocError(#[from] rune::alloc::Error), +} + +impl FileError { + #[rune::function(vm_result, protocol = STRING_DISPLAY)] + pub(crate) fn display(&self, f: &mut Formatter) { + vm_write!(f, "{}", self); + } + + #[rune::function(vm_result, protocol = STRING_DEBUG)] + pub(crate) fn debug(&self, f: &mut Formatter) { + vm_write!(f, "{:?}", self); + } +} + +/// Represents a temporary directory +/// +/// The directory will be removed when this object is dropped +#[derive(Debug, Any)] +#[rune(item = ::filesystem)] +struct TempDir { + path: Utf8PathBuf, +} + +impl Drop for TempDir { + fn drop(&mut self) { + std::fs::remove_dir_all(&self.path).expect("Failed to remove temporary directory"); + } +} + +impl TempDir { + #[rune::function(vm_result, protocol = STRING_DEBUG)] + fn debug(&self, f: &mut Formatter) { + vm_write!(f, "{:?}", self); + } + + /// Create a new temporary directory + #[rune::function(path = Self::new)] + fn new() -> anyhow::Result { + let dir = tempfile::TempDir::with_prefix("konfigkoll_")?.into_path(); + match Utf8PathBuf::from_path_buf(dir) { + Ok(path) => Ok(Self { path }), + Err(path) => { + std::fs::remove_dir_all(&path).expect("Failed to remove temporary directory"); + Err(anyhow::anyhow!("Failed to convert path to utf8: {path:?}")) + } + } + } + + /// Get the path to the temporary directory + #[rune::function] + fn path(&self) -> String { + self.path.to_string() + } + + /// Write a temporary file under this directory, getting it's path path + #[rune::function] + fn write(&self, path: &str, contents: &[u8]) -> anyhow::Result { + let p = safe_path_join(&self.path, path.into()); + std::fs::write(&p, contents).with_context(|| format!("Failed to write to {p}"))?; + Ok(p.into_string()) + } + + /// Read a file from the temporary directory + /// + /// Returns a `Result` + #[rune::function] + fn read(&self, path: &str) -> anyhow::Result { + let p = safe_path_join(&self.path, path.into()); + let data = std::fs::read(&p).with_context(|| format!("Failed to read {p}"))?; + Ok(Bytes::from_vec(data.try_into()?)) + } +} + +/// Represents an open file +#[derive(Debug, Any)] +#[rune(item = ::filesystem)] +struct File { + file: std::fs::File, + // TODO: Needed for future privilege separation + #[allow(dead_code)] + need_root: bool, +} + +/// Rune API +impl File { + #[rune::function(vm_result, protocol = STRING_DEBUG)] + pub(crate) fn debug(&self, f: &mut Formatter) { + vm_write!(f, "{:?}", self); + } + + /// Open a file (with normal user permissions) + #[rune::function(path = Self::open)] + pub fn open(path: &str) -> anyhow::Result { + let file = std::fs::File::open(path).with_context(|| format!("Failed to open {path}"))?; + Ok(Self { + file, + need_root: false, + }) + } + + /// Open a file as root + #[rune::function(path = Self::open_as_root)] + pub fn open_as_root(path: &str) -> anyhow::Result { + let file = + std::fs::File::open(path).with_context(|| format!("Failed to open {path} as root"))?; + Ok(Self { + file, + need_root: true, + }) + } + + /// Open a file relative to the config directory. + /// + /// This is generally safe (as long as the file exists in the config directory) + #[rune::function(path = Self::open_from_config)] + pub fn open_from_config(path: &str) -> anyhow::Result { + let p = safe_path_join(CFG_PATH.get().expect("CFG_PATH not set"), path.into()); + let file = std::fs::File::open(&p) + .with_context(|| format!("Failed to open {path} from config directory, tried {p}"))?; + Ok(Self { + file, + need_root: false, + }) + } + + /// Read the entire file as a string + #[rune::function] + pub fn read_all_string(&mut self) -> Result { + let mut buf = String::new(); + self.file.read_to_string(&mut buf)?; + Ok(buf) + } + + /// Read the entire file as bytes + #[rune::function] + pub fn read_all_bytes(&mut self) -> Result { + let mut buf = Vec::new(); + self.file.read_to_end(&mut buf)?; + let buf = rune::alloc::Vec::try_from(buf)?; + Ok(buf.into()) + } +} + +/// Check if a path exists +/// +/// Returns a `Result` +#[rune::function] +fn exists(path: &str) -> Result { + let metadata = std::fs::symlink_metadata(path); + + match metadata { + Ok(_) => Ok(true), + Err(err) if err.kind() == ErrorKind::NotFound => Ok(false), + Err(err) => Err(err), + } +} + +/// Run a glob pattern against the host file system +/// +/// Returns a `Result>` +#[rune::function] +fn glob(pattern: &str) -> anyhow::Result> { + let paths = glob::glob(pattern).context("Failed to construct glob")?; + + let mut result = Vec::new(); + for path in paths { + result.push(path?.to_string_lossy().to_string()); + } + + Ok(result) +} + +/// Get the path to the configuration directory +/// +/// **Prefer `File::open_from_config` instead if you just want to load data from the config directory** +/// +/// This is primarily useful together with the `process` module to pass a +/// path to a file from the configuration directory to an external command. +#[rune::function] +fn config_path() -> String { + CFG_PATH.get().expect("CFG_PATH not set").to_string() +} + +#[rune::module(::filesystem)] +/// Read only access to the host file system and the configuration directory +/// +/// # Host file system access +/// +/// Be careful with host file system access, since it can make your configuration non-deterministic. +/// +/// The main purpose of this is for things that *shouldn't* be stored in your git +/// managed configuration, in particular for passwords and other secrets: +/// +/// * Hashed passwords from `/etc/shadow` +/// * Passwords for wireless networks +/// * Passwords for any services needed (such as databases) +/// +/// Another use case is to read some system information from `/sys` that isn't +/// already exposed by other APIs +/// +/// # Configuration directory access +/// +/// This is generally safe, in order to read files that are part of the configuration +/// (if you want to use them as templates for example and fill in some values) +/// +/// Use `File::open_from_config` for this. In special circumstances (together with the `process` module) +/// you may also need [`config_path`]. +/// +/// # Temporary directories +/// +/// This is generally not needed when working with konfigkoll, but can be useful +/// for interacting with external commands via the `process` module. +pub(crate) fn module() -> Result { + let mut m = Module::from_meta(self::module_meta)?; + m.ty::()?; + m.function_meta(File::debug)?; + m.function_meta(File::open)?; + m.function_meta(File::open_as_root)?; + m.function_meta(File::open_from_config)?; + m.function_meta(File::read_all_string)?; + m.function_meta(File::read_all_bytes)?; + m.ty::()?; + m.function_meta(FileError::display)?; + m.function_meta(FileError::debug)?; + m.ty::()?; + m.function_meta(TempDir::debug)?; + m.function_meta(TempDir::new)?; + m.function_meta(TempDir::path)?; + m.function_meta(TempDir::read)?; + m.function_meta(TempDir::write)?; + m.function_meta(exists)?; + m.function_meta(glob)?; + m.function_meta(config_path)?; + Ok(m) +} diff --git a/crates/konfigkoll_script/src/plugins/package_managers.rs b/crates/konfigkoll_script/src/plugins/package_managers.rs new file mode 100644 index 00000000..7c4ef5f3 --- /dev/null +++ b/crates/konfigkoll_script/src/plugins/package_managers.rs @@ -0,0 +1,189 @@ +//! Access to system package manager + +use std::{collections::BTreeMap, str::FromStr, sync::Arc}; + +use anyhow::Context; +use paketkoll_types::{ + backend::{Backend, Files, OriginalFileQuery, PackageBackendMap, PackageMap, Packages}, + intern::Interner, +}; +use rune::{ + runtime::{Bytes, Shared}, + Any, ContextError, Module, +}; + +/// Type of map for package managers +pub type PackageManagerMap = BTreeMap; + +#[derive(Debug, Any)] +#[rune(item = ::package_managers)] +/// The collection of enabled package managers +pub struct PackageManagers { + package_managers: PackageManagerMap, + backend_with_files: Backend, +} + +impl PackageManagers { + /// Create a new package managers + pub fn create_from( + package_backends: &PackageBackendMap, + file_backend_id: Backend, + files_backend: &Arc, + package_maps: &BTreeMap>, + interner: &Arc, + ) -> Self { + let files_backends = [(file_backend_id, files_backend)]; + // Join all three maps on key. This is equivalent to a SQL outer join. + // Use itertools::merge_join_by for this. + let merged = + itertools::merge_join_by(package_backends, files_backends, |l, r| l.0.cmp(&r.0)); + // We now know that all keys are present (everything is a package, file or both backend) + let mut package_managers = PackageManagerMap::new(); + for entry in merged { + let (backend, packages, files) = match entry { + itertools::EitherOrBoth::Both(a, b) => (*a.0, Some(a.1), Some(b.1)), + itertools::EitherOrBoth::Left(a) => (*a.0, Some(a.1), None), + itertools::EitherOrBoth::Right(b) => (b.0, None, Some(b.1)), + }; + + let package_map = package_maps.get(&backend).cloned(); + let pkg_mgr = PackageManager::new( + backend, + files.cloned(), + packages.cloned(), + package_map, + interner.clone(), + ); + package_managers.insert(backend, pkg_mgr); + } + Self { + package_managers, + backend_with_files: file_backend_id, + } + } +} + +impl PackageManagers { + /// Get an instance of a [`PackageManager`] by backend name + #[rune::function] + fn get(&self, name: &str) -> Option { + let backend = Backend::from_str(name).ok()?; + self.package_managers.get(&backend).cloned() + } + + /// Get the package manager that handles files + #[rune::function] + fn files(&self) -> PackageManager { + self.package_managers + .get(&self.backend_with_files) + .expect("There should always be a files backend") + .clone() + } +} + +/// Inner struct because rune function attributes don't want to play along. +#[derive(Debug, Clone)] +struct PackageManagerInner { + backend: Backend, + files: Option>, + packages: Option>, + package_map: Option>, + interner: Arc, +} + +#[derive(Debug, Clone, Any)] +#[rune(item = ::package_managers)] +#[repr(transparent)] +/// A package manager +pub struct PackageManager { + inner: Shared, +} + +// Rust API +impl PackageManager { + /// Create a new package manager + pub fn new( + backend: Backend, + files: Option>, + packages: Option>, + package_map: Option>, + interner: Arc, + ) -> Self { + Self { + inner: Shared::new(PackageManagerInner { + backend, + files, + packages, + package_map, + interner, + }) + .expect("Failed to create shared package manager"), + } + } + + pub fn files(&self) -> Option> { + self.inner.borrow_ref().ok()?.files.clone() + } + + pub fn packages(&self) -> Option> { + self.inner.borrow_ref().ok()?.packages.clone() + } + + /// Get the original file contents of a package from Rust code + pub fn file_contents(&self, package: &str, path: &str) -> anyhow::Result> { + let queries: [_; 1] = [OriginalFileQuery { + package: package.into(), + path: path.into(), + }]; + let guard = self.inner.borrow_ref()?; + let files = guard + .files + .as_ref() + .ok_or_else(|| anyhow::anyhow!("No files backend for {}", guard.backend))?; + let package_map = guard + .package_map + .as_ref() + .ok_or_else(|| anyhow::anyhow!("No package map for {}", guard.backend))?; + let results = files + .original_files(&queries, package_map, &guard.interner) + .with_context(|| format!("Failed original_file_contents({package}, {path})"))?; + if results.len() != 1 { + anyhow::bail!( + "Failed original_file_contents({package}, {path}): Got wrong number of results: {}", + results.len() + ); + } + let result = results + .into_iter() + .next() + .ok_or_else(|| { + anyhow::anyhow!( + "Failed original_file_contents({package}, {path}): Failed to extract result" + ) + })? + .1; + Ok(result) + } +} + +// Rune API +impl PackageManager { + /// Get the original file contents of a package as a `Result` + #[rune::function] + fn original_file_contents(&self, package: &str, path: &str) -> anyhow::Result { + let result = self.file_contents(package, path)?; + Ok(Bytes::from_vec(result.try_into()?)) + } +} + +#[rune::module(::package_managers)] +/// Interface to the package manager(s) in the system +pub(crate) fn module() -> Result { + let mut m = Module::from_meta(self::module_meta)?; + m.ty::()?; + m.function_meta(PackageManager::original_file_contents)?; + m.ty::()?; + m.function_meta(PackageManagers::get)?; + m.function_meta(PackageManagers::files)?; + Ok(m) +} diff --git a/crates/konfigkoll_script/src/plugins/passwd.rs b/crates/konfigkoll_script/src/plugins/passwd.rs new file mode 100644 index 00000000..a4f83b0b --- /dev/null +++ b/crates/konfigkoll_script/src/plugins/passwd.rs @@ -0,0 +1,642 @@ +//! Helpers for working with /etc/passwd and /etc/groups (as well as shadow files) + +mod sysusers; + +use std::{ + collections::{BTreeMap, BTreeSet}, + fmt::Write, +}; + +use ahash::{AHashMap, AHashSet}; +use itertools::Itertools; +use rune::{runtime::Function, Any, ContextError, Module, Value}; +use sysusers::{GroupId, UserId}; +use winnow::Parser; + +use crate::Commands; + +use super::package_managers::PackageManager; + +type Users = BTreeMap; +type Groups = BTreeMap; + +/// A representation of the user and group databases +/// +/// This can be used to handle `/etc/passwd` and related files. +/// Typically you would: +/// * Create an instance early in the main phase +/// * Add things to it as needed (next to the associated packages) +/// * Apply it at the end of the main phase +/// +/// +/// A rough example: +/// +/// ```rune +/// // Mappings for the IDs that systemd auto-assigns inconsistently from computer to computer +/// const USER_MAPPING = [("systemd-journald", 900), /* ... */] +/// const GROUP_MAPPING = [("systemd-journald", 900), /* ... */] +/// +/// pub async fn phase_main(props, cmds, package_managers) { +/// let passwd = passwd::Passwd::new(USER_MAPPING, GROUP_MAPPING)?; +/// +/// let files = package_managers.files(); +/// // These two files MUST come first as other files later on refer to them, +/// // and we are not order independent (unlike the real sysusers.d). +/// passwd.add_from_sysusers(files, "systemd", "/usr/lib/sysusers.d/basic.conf")?; +/// passwd.add_from_sysusers(files, "filesystem", "/usr/lib/sysusers.d/arch.conf")?; +/// +/// // Various other packages and other changes ... +/// passwd.add_from_sysusers(files, "dbus", "/usr/lib/sysusers.d/dbus.conf")?; +/// // ... +/// +/// // Add human user +/// let me = passwd::User::new(1000, "me", "me", ""); +/// me.shell = "/bin/zsh"; +/// me.home = "/home/me"; +/// passwd.add_user_with_group(me); +/// passwd.add_user_to_groups("me", ["wheel", "optical", "uucp", "users"]); +/// +/// // Don't store passwords in your git repo, load them from the system instead +/// passwd.passwd_from_system(["me", "root"]); +/// +/// // Give root a login shell, we don't want /usr/bin/nologin! +/// passwd.update_user("root", |user| { +/// user.shell = "/bin/zsh"; +/// user +/// }); +/// +/// // Deal with the IDs not matching (because the mappings were created +/// // before konfigkoll was in use for example) +/// passwd.align_ids_with_system()?; +/// +/// // Apply changes +/// passwd.apply(cmds)?; +/// } +/// ``` +#[derive(Debug, Any)] +#[rune(item = ::passwd)] +struct Passwd { + users: Users, + groups: Groups, + user_ids: AHashMap, + group_ids: AHashMap, +} + +/// Internal helper functions +impl Passwd { + fn sanity_check(&self) -> anyhow::Result<()> { + // Check for duplicate IDs + { + let mut ids = BTreeSet::new(); + for user in self.users.values() { + if !ids.insert(user.uid) { + return Err(anyhow::anyhow!( + "More than one user maps to UID: {}", + user.uid + )); + } + } + } + { + let mut ids = BTreeSet::new(); + for group in self.groups.values() { + if !ids.insert(group.gid) { + return Err(anyhow::anyhow!( + "More than one group maps to GID: {}", + group.gid + )); + } + } + } + Ok(()) + } +} + +macro_rules! log_and_error { + ($($arg:tt)*) => { + tracing::error!($($arg)*); + return Err(anyhow::anyhow!($($arg)*)); + }; +} + +/// Rune API +impl Passwd { + /// Create a new Passwd instance + /// + /// # Arguments + /// * `user_ids` - A list of tuples of (username, uid) to use if sysusers files does not specify a UID + /// * `group_ids` - A list of tuples of (groupname, gid) to use if sysusers files does not specify a GID + #[rune::function(path = Self::new)] + fn new(user_ids: Vec<(String, u32)>, group_ids: Vec<(String, u32)>) -> anyhow::Result { + let num_uids = user_ids.len(); + let num_gids = group_ids.len(); + let uids: AHashMap = user_ids.into_iter().collect(); + let gids: AHashMap = group_ids.into_iter().collect(); + // Sanity check that there are no duplicates + if uids.len() != num_uids { + log_and_error!("Duplicate user names in user ID mapping"); + } + if gids.len() != num_gids { + log_and_error!("Duplicate group names in group ID mapping"); + } + // Sanity check that the mapped to values are unique + if uids.values().collect::>().len() != num_uids { + log_and_error!("Duplicate user IDs in user ID mapping"); + } + if gids.values().collect::>().len() != num_gids { + log_and_error!("Duplicate group IDs in group ID mapping"); + } + Ok(Self { + users: BTreeMap::new(), + groups: BTreeMap::new(), + user_ids: uids, + group_ids: gids, + }) + } + + /// Add a user to the passwd database + #[rune::function] + fn add_user(&mut self, user: User) { + self.users.insert(user.name.clone(), user); + } + + /// Add a user to the passwd database (and add a matching group with the same ID) + #[rune::function] + fn add_user_with_group(&mut self, user: User) { + let group = Group { + name: user.group.clone(), + gid: user.uid, + members: Default::default(), + passwd: "!*".into(), + admins: Default::default(), + }; + self.users.insert(user.name.clone(), user); + self.groups.insert(group.name.clone(), group); + } + + /// Add a group to the passwd database + #[rune::function] + fn add_group(&mut self, group: Group) { + self.groups.insert(group.name.clone(), group); + } + + /// Add an already added user to one or more already added groups + #[rune::function] + fn add_user_to_groups(&mut self, user: &str, groups: Vec) { + for group in groups { + if let Some(group) = self.groups.get_mut(&group) { + group.members.insert(user.into()); + } else { + tracing::error!("Group {} not found", group); + } + } + } + + /// Add an already added user to one or more already added groups + #[rune::function] + fn add_user_to_groups_as_admin(&mut self, user: &str, groups: Vec) { + for group in groups { + if let Some(group) = self.groups.get_mut(&group) { + group.admins.insert(user.into()); + } else { + tracing::error!("Group {} not found", group); + } + } + } + + #[rune::function] + fn update_user(&mut self, user: &str, func: &Function) { + // TODO: Get rid of expect + let user = self.users.get_mut(user).expect("User not found"); + *user = func + .call::<_, User>((user.clone(),)) + .expect("User update call failed"); + } + + #[rune::function] + fn update_group(&mut self, group: &str, func: &Function) { + let group = self.groups.get_mut(group).expect("Group not found"); + *group = func + .call::<_, Group>((group.clone(),)) + .expect("Group update call failed"); + } + + /// Read the passwd and group files from the system and update IDs to match the system (based on name) + #[rune::function] + fn align_ids_with_system(&mut self) -> anyhow::Result<()> { + self.sanity_check().inspect_err(|e| { + tracing::error!("Sanity check *before* aligning passwd IDs failed: {e}"); + })?; + let passwd = std::fs::read_to_string("/etc/passwd")?; + for line in passwd.lines() { + let parts: Vec<_> = line.split(':').collect(); + if parts.len() != 7 { + tracing::error!("Invalid line in /etc/passwd: {}", line); + continue; + } + let name = parts[0]; + let uid: u32 = parts[2].parse()?; + if let Some(user) = self.users.get_mut(name) { + if user.uid != uid { + tracing::info!("Updating UID for {} from {} to {}", name, user.uid, uid); + user.uid = uid; + } + } + } + + let group = std::fs::read_to_string("/etc/group")?; + for line in group.lines() { + let parts: Vec<_> = line.split(':').collect(); + if parts.len() != 4 { + tracing::error!("Invalid line in /etc/group: {}", line); + continue; + } + let name = parts[0]; + let gid: u32 = parts[2].parse()?; + if let Some(group) = self.groups.get_mut(name) { + if group.gid != gid { + tracing::info!("Updating GID for {} from {} to {}", name, group.gid, gid); + group.gid = gid; + } + } + } + Ok(()) + } + + /// Set user passwords to what they are set to on the system for the given users + #[rune::function] + // Allow because rune doesn't work without the owned vec + #[allow(clippy::needless_pass_by_value)] + fn passwd_from_system(&mut self, users: Vec) -> anyhow::Result<()> { + let shadow = std::fs::read_to_string("/etc/shadow")?; + for line in shadow.lines() { + let parts: Vec<_> = line.split(':').collect(); + if parts.len() != 9 { + tracing::error!("Invalid line in /etc/shadow: {}", line); + continue; + } + let name = parts[0]; + let passwd = parts[1]; + if users.contains(&name.to_string()) { + if let Some(user) = self.users.get_mut(name) { + user.passwd = passwd.into(); + } + } + } + Ok(()) + } + + /// Add users and groups declared in a systemd sysusers file + /// + /// You need to provide a map of preferred IDs for any IDs not explicitly set in the sysusers file. + /// + /// # Arguments + /// * `package_manager` - The package manager to use for reading the sysusers file + /// * `config_file` - The path to the sysusers file + #[rune::function(keep)] + fn add_from_sysusers( + &mut self, + package_manager: &PackageManager, + package: &str, + config_file: &str, + ) -> anyhow::Result<()> { + let file_contents = + String::from_utf8(package_manager.file_contents(package, config_file)?)?; + let parsed = sysusers::parse_file + .parse(&file_contents) + .map_err(|error| sysusers::SysusersParseError::from_parse(&error, &file_contents))?; + for directive in parsed { + match directive { + sysusers::Directive::Comment => (), + sysusers::Directive::User(user) => { + let (uid, gid, group) = match user.id { + Some(UserId::Uid(uid)) => (uid, None, user.name.clone()), + Some(UserId::UidGroup(uid, group)) => (uid, None, group), + Some(UserId::UidGid(uid, gid)) => { + // Resolve gid to group name + let group = self.groups.values().find(|v| v.gid == gid); + let group_name = group.map(|g| g.name.as_str()).ok_or_else(|| { + anyhow::anyhow!("No group with GID {} for user {}", gid, user.name) + })?; + (uid, Some(gid), group_name.into()) + } + Some(UserId::FromPath(_)) => { + return Err(anyhow::anyhow!("Cannot yet handle user IDs from path")) + } + None => { + let uid = self + .user_ids + .get(user.name.as_str()) + .ok_or_else(|| anyhow::anyhow!("No ID for user {}", user.name))?; + (*uid, None, user.name.clone()) + } + }; + self.groups + .entry(group.clone().into()) + .or_insert_with(|| Group { + name: group.clone().into(), + gid: gid.unwrap_or(uid), + members: Default::default(), + passwd: "!*".into(), + admins: Default::default(), + }); + self.users + .entry(user.name.clone().into_string()) + .or_insert_with(|| User { + uid, + name: user.name.into_string(), + group: group.into(), + gecos: user.gecos.map(Into::into).unwrap_or_default(), + home: user.home.map(Into::into).unwrap_or_else(|| "/".into()), + shell: user + .shell + .map(Into::into) + .unwrap_or_else(|| "/usr/bin/nologin".into()), + passwd: "!*".into(), + change: None, + min: None, + max: None, + warn: None, + inact: None, + expire: None, + }); + } + sysusers::Directive::Group(group) => { + let gid = match group.id { + Some(GroupId::Gid(gid)) => gid, + Some(GroupId::FromPath(_)) => { + return Err(anyhow::anyhow!("Cannot yet handle group IDs from path")) + } + None => self + .group_ids + .get(group.name.as_str()) + .copied() + .ok_or_else(|| anyhow::anyhow!("No ID for group {}", group.name))?, + }; + self.groups + .entry(group.name.clone().into_string()) + .or_insert_with(|| Group { + name: group.name.into_string(), + gid, + members: Default::default(), + passwd: "!*".into(), + admins: Default::default(), + }); + } + sysusers::Directive::AddUserToGroup { user, group } => { + if let Some(group) = self.groups.get_mut(group.as_str()) { + group.members.insert(user.into_string()); + } else { + tracing::error!("Group {} not found", group); + } + } + sysusers::Directive::SetRange(_, _) => (), + } + } + Ok(()) + } + + /// Apply to commands + #[rune::function] + fn apply(self, cmds: &mut Commands) -> anyhow::Result<()> { + self.sanity_check() + .inspect_err(|e| tracing::error!("Sanity check when applying passwd failed: {e}"))?; + let mut passwd = String::new(); + let mut shadow = String::new(); + let users = self.users.values().sorted().collect_vec(); + let groups = self.groups.values().sorted().collect_vec(); + for user in users { + writeln!(passwd, "{}", user.format_passwd(&self.groups))?; + writeln!(shadow, "{}", user.format_shadow())?; + } + let mut groups_contents = String::new(); + let mut gshadow = String::new(); + for group in groups { + writeln!(groups_contents, "{}", group.format_group())?; + writeln!(gshadow, "{}", group.format_gshadow())?; + } + for suffix in ["", "-"] { + cmds.write(&format!("/etc/passwd{suffix}"), passwd.as_bytes())?; + cmds.write(&format!("/etc/group{suffix}"), groups_contents.as_bytes())?; + let shadow_file = format!("/etc/shadow{suffix}"); + cmds.write(&shadow_file, shadow.as_bytes())?; + let gshadow_file = format!("/etc/gshadow{suffix}"); + cmds.write(&gshadow_file, gshadow.as_bytes())?; + if suffix == "-" { + // This is already set by package management for the main files + cmds.chmod(&shadow_file, Value::Integer(0o600))?; + cmds.chmod(&gshadow_file, Value::Integer(0o600))?; + } + } + Ok(()) + } +} + +/// Represents a user +#[derive(Any, Debug, Clone, Eq, PartialEq, PartialOrd, Ord)] +#[rune(item = ::passwd)] +struct User { + // passwd info + /// User ID + #[rune(get, set)] + uid: u32, + /// Username + #[rune(get, set)] + name: String, + /// Group name + #[rune(get, set)] + group: String, + /// User information + #[rune(get, set)] + gecos: String, + /// Home directory + #[rune(get, set)] + home: String, + /// Path to shell + #[rune(get, set)] + shell: String, + + // Shadow info + /// User password (probably hashed) + #[rune(get, set)] + passwd: String, + + /// Last password change (days since epoch) + #[rune(get, set)] + change: Option, + /// Min password age (days) + #[rune(get, set)] + min: Option, + /// Max password age (days) + #[rune(get, set)] + max: Option, + /// Password warning period (days) + #[rune(get, set)] + warn: Option, + /// Password inactivity period (days) + #[rune(get, set)] + inact: Option, + /// Account expiration date (days since epoch) + #[rune(get, set)] + expire: Option, +} + +/// Rust API +impl User { + fn format_passwd(&self, groups: &Groups) -> String { + format!( + "{name}:x:{uid}:{gid}:{gecos}:{dir}:{shell}", + name = self.name, + uid = self.uid, + gid = groups.get(&self.group).map(|g| g.gid).unwrap_or(0), + gecos = self.gecos, + dir = self.home, + shell = self.shell, + ) + } + + fn format_shadow(&self) -> String { + let f64 = |v: Option| v.map(|v| format!("{v}")).unwrap_or("".into()); + let f32 = |v: Option| v.map(|v| format!("{v}")).unwrap_or("".into()); + format!( + "{name}:{passwd}:{change}:{min}:{max}:{warn}:{inact}:{expire}:", + name = self.name, + passwd = self.passwd, + change = f64(self.change), + min = f32(self.min), + max = f32(self.max), + warn = f32(self.warn), + inact = f32(self.inact), + expire = f64(self.expire), + ) + } +} + +/// Rune API +impl User { + /// Create a new User + /// + /// This is optimised for a system user with sensible defaults. + /// + /// These defaults are: + /// * Home directory: `/` + /// * Shell: `/usr/bin/nologin` + /// * Password: `!*` (no login) + /// * No password expiration/age/warning/etc + /// * No account expiration + #[rune::function(path = Self::new)] + fn new(uid: u32, name: String, group: String, gecos: String) -> Self { + Self { + uid, + name, + group, + gecos, + home: "/".into(), + shell: "/usr/bin/nologin".into(), + passwd: "!*".into(), + change: None, + min: None, + max: None, + warn: None, + inact: None, + expire: None, + } + } +} + +/// Represents a group +#[derive(Any, Debug, Clone, Eq, PartialEq, PartialOrd, Ord)] +#[rune(item = ::passwd)] +struct Group { + /// Group ID + #[rune(get, set)] + gid: u32, + /// Group name + #[rune(get, set)] + name: String, + /// Group members + members: BTreeSet, + + // Shadow info + /// Password for group (probably hashed) + #[rune(get, set)] + passwd: String, + // Administrators + admins: BTreeSet, +} + +/// Rust API +impl Group { + fn format_group(&self) -> String { + let members = self + .members + .iter() + .map(String::as_str) + .collect::>() + .join(","); + format!("{name}:x:{gid}:{members}", name = self.name, gid = self.gid,) + } + + fn format_gshadow(&self) -> String { + let members = self + .members + .iter() + .map(String::as_str) + .collect::>() + .join(","); + let admins = self + .admins + .iter() + .map(String::as_str) + .collect::>() + .join(","); + format!( + "{name}:{passwd}:{admins}:{members}", + name = self.name, + passwd = self.passwd, + members = members, + admins = admins, + ) + } +} + +/// Rune API +impl Group { + /// Create a new group + #[rune::function(path = Self::new)] + fn new(name: String, gid: u32) -> Self { + Self { + name, + gid, + members: BTreeSet::new(), + passwd: "!*".into(), + admins: BTreeSet::new(), + } + } +} + +#[rune::module(::passwd)] +/// Utilities for patching file contents conveniently. +pub(crate) fn module() -> Result { + let mut m = Module::from_meta(self::module_meta)?; + m.ty::()?; + m.ty::()?; + m.ty::()?; + + m.function_meta(Passwd::new)?; + m.function_meta(Passwd::add_user)?; + m.function_meta(Passwd::add_group)?; + m.function_meta(Passwd::add_user_with_group)?; + m.function_meta(Passwd::add_user_to_groups)?; + m.function_meta(Passwd::add_user_to_groups_as_admin)?; + m.function_meta(Passwd::add_from_sysusers__meta)?; + m.function_meta(Passwd::passwd_from_system)?; + m.function_meta(Passwd::align_ids_with_system)?; + m.function_meta(Passwd::update_group)?; + m.function_meta(Passwd::update_user)?; + m.function_meta(Passwd::apply)?; + m.function_meta(User::new)?; + m.function_meta(Group::new)?; + + Ok(m) +} diff --git a/crates/konfigkoll_script/src/plugins/passwd/sysusers.rs b/crates/konfigkoll_script/src/plugins/passwd/sysusers.rs new file mode 100644 index 00000000..029d88d1 --- /dev/null +++ b/crates/konfigkoll_script/src/plugins/passwd/sysusers.rs @@ -0,0 +1,416 @@ +//! Parser for systemd sysusers.d files. + +use compact_str::CompactString; +use winnow::ascii::{dec_uint, escaped_transform, newline, space1}; +use winnow::combinator::{alt, delimited, opt, separated, trace}; +use winnow::error::{ContextError, StrContext}; +use winnow::stream::Accumulate; +use winnow::token::take_till; +use winnow::PResult; +use winnow::Parser; + +/// Sub-error type for the first splitting layer +#[derive(Debug, PartialEq)] +pub(super) struct SysusersParseError { + message: String, + pos: usize, + input: String, +} + +impl SysusersParseError { + pub(super) fn from_parse<'input>( + error: &winnow::error::ParseError<&'input str, ContextError>, + input: &'input str, + ) -> Self { + let message = error.inner().to_string(); + let input = input.to_owned(); + Self { + message, + pos: error.offset(), + input, + } + } +} + +impl std::fmt::Display for SysusersParseError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let pos = self.pos; + let input = &self.input; + let message = &self.message; + write!( + f, + "Error at position {}: {}\n{}\n{}^", + pos, + message, + &input[..pos], + " ".repeat(pos) + ) + } +} + +impl std::error::Error for SysusersParseError {} + +#[derive(Debug, PartialEq, Eq)] +pub(super) enum Directive { + Comment, + User(User), + Group(Group), + AddUserToGroup { + user: CompactString, + group: CompactString, + }, + SetRange(u32, u32), +} + +#[derive(Debug, PartialEq, Eq)] +pub(super) struct User { + pub name: CompactString, + pub id: Option, + pub gecos: Option, + pub home: Option, + pub shell: Option, +} + +#[derive(Debug, PartialEq, Eq)] +pub(super) enum UserId { + Uid(u32), + UidGid(u32, u32), + UidGroup(u32, CompactString), + FromPath(CompactString), +} + +#[derive(Debug, PartialEq, Eq)] +pub(super) enum GroupId { + Gid(u32), + FromPath(CompactString), +} + +#[derive(Debug, PartialEq, Eq)] +pub(super) struct Group { + pub name: CompactString, + pub id: Option, +} + +/// Top level parser +pub(super) fn parse_file(i: &mut &str) -> PResult> { + let alternatives = ( + comment + .map(|_| Directive::Comment) + .context(StrContext::Label("comment")), + user.context(StrContext::Label("user")), + group.context(StrContext::Label("group")), + add_to_group.context(StrContext::Label("add_to_group")), + set_range.context(StrContext::Label("set_range")), + "".map(|_| Directive::Comment) + .context(StrContext::Label("whitespace")), // Blank lines + ); + (separated(0.., alt(alternatives), newline), opt(newline)) + .map(|(val, _)| val) + .parse_next(i) +} + +/// Helper to `directive` to flatten the optional tuple +fn flattener(e: Option<(&str, Option)>) -> Option { + e.and_then(|(_, arg)| arg) +} + +fn user(i: &mut &str) -> PResult { + let entry_type = 'u'; + let user_name = any_string.context(StrContext::Label("user_name")); + let id = user_id_parser.context(StrContext::Label("id")); + let gecos = optional_string.context(StrContext::Label("gecos")); + let home_dir = optional_string.context(StrContext::Label("home")); + let shell = optional_string.context(StrContext::Label("shell")); + + let mut parser = ( + entry_type, + space1, + user_name, + opt((space1, id)).map(flattener), + opt((space1, gecos)).map(flattener), + opt((space1, home_dir)).map(flattener), + opt((space1, shell)).map(flattener), + ) + .map(|(_, _, name, id, gecos, home, shell)| { + Directive::User(User { + name, + id, + gecos, + home, + shell, + }) + }); + parser.parse_next(i) +} + +fn group(i: &mut &str) -> PResult { + let entry_type = 'g'; + let path = any_string.context(StrContext::Label("group_name")); + let id = group_id_parser.context(StrContext::Label("id")); + + let mut parser = ( + entry_type, + space1, + path, + opt((space1, id)).map(flattener), + opt((space1, '-')), + opt((space1, '-')), + ) + .map(|(_, _, name, id, _, _)| Directive::Group(Group { name, id })); + parser.parse_next(i) +} + +fn add_to_group(i: &mut &str) -> PResult { + let entry_type = 'm'; + let user = any_string.context(StrContext::Label("user_name")); + let group = any_string.context(StrContext::Label("group_name")); + + let mut parser = (entry_type, space1, user, space1, group) + .map(|(_, _, user, _, group)| Directive::AddUserToGroup { user, group }); + parser.parse_next(i) +} + +fn set_range(i: &mut &str) -> PResult { + let entry_type = 'r'; + let name = '-'; + let range = range_parser.context(StrContext::Label("range")); + + let mut parser = (entry_type, space1, name, space1, range) + .map(|(_, _, _, _, range)| Directive::SetRange(range.0, range.1)); + parser.parse_next(i) +} + +fn user_id_parser(i: &mut &str) -> PResult> { + let mut parser = alt(( + ('-').map(|_| None), + (dec_uint, ':', dec_uint).map(|(uid, _, gid)| Some(UserId::UidGid(uid, gid))), + (dec_uint, ':', name).map(|(uid, _, group)| Some(UserId::UidGroup(uid, group))), + (dec_uint).map(|uid| Some(UserId::Uid(uid))), + name.map(|path| Some(UserId::FromPath(path))), + )); + parser.parse_next(i) +} +fn group_id_parser(i: &mut &str) -> PResult> { + let mut parser = alt(( + ('-').map(|_| None), + (dec_uint).map(|id| Some(GroupId::Gid(id))), + name.map(|path| Some(GroupId::FromPath(path))), + )); + parser.parse_next(i) +} + +fn range_parser(i: &mut &str) -> PResult<(u32, u32)> { + let mut parser = (dec_uint, '-', dec_uint).map(|(start, _, end)| (start, end)); + parser.parse_next(i) +} + +/// A comment +fn comment(i: &mut &str) -> PResult<()> { + ('#', take_till(0.., ['\n', '\r'])).void().parse_next(i) +} + +fn optional_string(i: &mut &str) -> PResult> { + // - is None, otherwise string + alt(('-'.value(None), any_string.map(Some))).parse_next(i) +} + +fn any_string(i: &mut &str) -> PResult { + trace( + "any_string", + alt(( + quoted_string, + single_quoted_string, + unquoted_string_with_escapes, + )), + ) + .parse_next(i) +} + +/// Quoted string value +fn single_quoted_string(i: &mut &str) -> PResult { + delimited( + '\'', + escaped_transform(take_till(1.., ['\'', '\\']), '\\', escapes), + '\'', + ) + .map(|s: CompactStringWrapper| s.0) + .parse_next(i) +} + +/// Quoted string value +fn quoted_string(i: &mut &str) -> PResult { + delimited( + '"', + escaped_transform(take_till(1.., ['"', '\\']), '\\', escapes), + '"', + ) + .map(|s: CompactStringWrapper| s.0) + .parse_next(i) +} + +/// Unquoted string value +fn unquoted_string_with_escapes(i: &mut &str) -> PResult { + escaped_transform(take_till(1.., [' ', '\t', '\n', '\r', '\\']), '\\', escapes) + .map(|s: CompactStringWrapper| s.0) + .parse_next(i) +} + +/// A valid name +fn name(i: &mut &str) -> PResult { + take_till(1.., [' ', '\t', '\n', '\r']) + .map(CompactString::from) + .parse_next(i) +} + +fn escapes<'input>(i: &mut &'input str) -> PResult<&'input str> { + alt(( + "n".value("\n"), + "r".value("\r"), + "t".value("\t"), + " ".value(" "), + "\"".value("\""), + "\\".value("\\"), + )) + .parse_next(i) +} + +/// Wrapper to get around coherence issues +#[repr(transparent)] +struct CompactStringWrapper(CompactString); + +impl<'i> Accumulate<&'i str> for CompactStringWrapper { + fn initial(capacity: Option) -> Self { + match capacity { + Some(capacity) => CompactStringWrapper(CompactString::with_capacity(capacity)), + None => CompactStringWrapper(CompactString::new("")), + } + } + + fn accumulate(&mut self, acc: &'i str) { + self.0.push_str(acc); + } +} + +#[cfg(test)] +mod tests { + + use super::*; + + use pretty_assertions::assert_eq; + + #[test] + fn test_comment() { + let input = "# This is a comment\nblah"; + let (rest, _) = comment.parse_peek(input).unwrap(); + assert_eq!(rest, "\nblah"); + } + + #[test] + fn test_user() { + let input = "u user 1000:2000 \"GECOS quux\" /home/user /bin/bash\n"; + let expected = Directive::User(User { + name: "user".into(), + id: Some(UserId::UidGid(1000, 2000)), + gecos: Some("GECOS quux".into()), + home: Some("/home/user".into()), + shell: Some("/bin/bash".into()), + }); + let (rest, result) = user.parse_peek(input).unwrap(); + assert_eq!(rest, "\n"); + assert_eq!(result, expected); + } + + #[test] + fn test_group() { + let input = "g group 1000 - -\n"; + let expected = Directive::Group(Group { + name: "group".into(), + id: Some(GroupId::Gid(1000)), + }); + let (rest, result) = group.parse_peek(input).unwrap(); + assert_eq!(rest, "\n"); + assert_eq!(result, expected); + + let input = "g group -\n"; + let expected = Directive::Group(Group { + name: "group".into(), + id: None, + }); + let (rest, result) = group.parse_peek(input).unwrap(); + assert_eq!(rest, "\n"); + assert_eq!(result, expected); + + let input = "g group /path/to/group\n"; + let expected = Directive::Group(Group { + name: "group".into(), + id: Some(GroupId::FromPath("/path/to/group".into())), + }); + let (rest, result) = group.parse_peek(input).unwrap(); + assert_eq!(rest, "\n"); + assert_eq!(result, expected); + } + + #[test] + fn test_add_to_group() { + let input = "m user group\n"; + let expected = Directive::AddUserToGroup { + user: "user".into(), + group: "group".into(), + }; + let (rest, result) = add_to_group.parse_peek(input).unwrap(); + assert_eq!(rest, "\n"); + assert_eq!(result, expected); + } + + #[test] + fn test_set_range() { + let input = "r - 500-999\n"; + let expected = Directive::SetRange(500, 999); + let (rest, result) = set_range.parse_peek(input).unwrap(); + assert_eq!(rest, "\n"); + assert_eq!(result, expected); + } + + #[test] + fn test_parse_file() { + let input = indoc::indoc!( + r#"# This is a comment + u user 1000:2000 "GECOS quux" /home/user /bin/bash + u user 1001 "GECOS bar" + g group 1000 + m user group + r - 500-999 + "# + ); + let expected = vec![ + Directive::Comment, + Directive::User(User { + name: "user".into(), + id: Some(UserId::UidGid(1000, 2000)), + gecos: Some("GECOS quux".into()), + home: Some("/home/user".into()), + shell: Some("/bin/bash".into()), + }), + Directive::User(User { + name: "user".into(), + id: Some(UserId::Uid(1001)), + gecos: Some("GECOS bar".into()), + home: None, + shell: None, + }), + Directive::Group(Group { + name: "group".into(), + id: Some(GroupId::Gid(1000)), + }), + Directive::AddUserToGroup { + user: "user".into(), + group: "group".into(), + }, + Directive::SetRange(500, 999), + Directive::Comment, + ]; + let (rest, result) = parse_file.parse_peek(input).unwrap(); + assert_eq!(rest, ""); + assert_eq!(result, expected); + } +} diff --git a/crates/konfigkoll_script/src/plugins/patch.rs b/crates/konfigkoll_script/src/plugins/patch.rs new file mode 100644 index 00000000..2f792de2 --- /dev/null +++ b/crates/konfigkoll_script/src/plugins/patch.rs @@ -0,0 +1,231 @@ +//! Facilities to patch a file compared to the default package provided one. + +use std::{borrow::Cow, cell::RefCell, rc::Rc}; + +use anyhow::Context; +use regex::Regex; +use rune::{ + runtime::{Shared, VmResult}, + Any, ContextError, Module, +}; + +/// A simple line editor, like sed +#[derive(Debug, Default, Any)] +#[rune(item = ::patch)] +struct LineEditor { + inner: Rc>, +} + +impl LineEditor { + /// Create a new empty line editor + #[rune::function(path = Self::new)] + fn new() -> Self { + Default::default() + } + + /// Add a new rule to the line editor. + /// + /// Returns a Result<()>, where the error variant can happen on invalid regexes. + #[rune::function] + pub fn add(&mut self, selector: &Selector, action: &Action) -> anyhow::Result<()> { + self.inner + .borrow_mut() + .add(selector.try_into()?, false, action.try_into()?); + Ok(()) + } + + /// Add a new rule where the selector condition has been inverted to the line editor + /// + /// Returns a Result<()>, where the error variant can happen on invalid regexes. + #[rune::function] + pub fn add_inverted(&mut self, selector: &Selector, action: &Action) -> anyhow::Result<()> { + self.inner + .borrow_mut() + .add(selector.try_into()?, true, action.try_into()?); + Ok(()) + } + + /// Apply the line editor to a string + #[rune::function] + fn apply(&self, text: &str) -> String { + self.inner.borrow().apply(text) + } + + /// Clone the line editor, allowing "forking it" into two different related variants + #[rune::function] + fn clone(&self) -> Self { + Self { + inner: Rc::new(RefCell::new(self.inner.borrow().clone())), + } + } +} + +/// Selects if a line should be edited by [`LineEditor`] or not +#[derive(Debug, Any)] +#[rune(item = ::patch)] +enum Selector { + /// Match all lines + #[rune(constructor)] + All, + /// End of file + #[rune(constructor)] + Eof, + /// Match a specific line number (1-indexed) + #[rune(constructor)] + Line(#[rune(get)] usize), + /// A range of line numbers (1-indexed, inclusive) + #[rune(constructor)] + Range(#[rune(get)] usize, #[rune(get)] usize), + /// A regex to match the line + #[rune(constructor)] + Regex(#[rune(get)] String), + /// A custom function, passed the line number and current line, returning a bool + #[rune(constructor)] + Function(#[rune(get)] Shared), +} + +impl TryFrom<&Selector> for konfigkoll_core::line_edit::Selector { + type Error = anyhow::Error; + + fn try_from(value: &Selector) -> Result { + match value { + Selector::All => Ok(Self::All), + Selector::Eof => Ok(Self::Eof), + Selector::Line(n) => Ok(Self::Line(*n)), + Selector::Range(a, b) => Ok(Self::Range(*a, *b)), + Selector::Regex(r) => Ok(Self::Regex(Regex::new(r).context("invalid regex")?)), + Selector::Function(ref f) => { + let f = f.clone(); + Ok(Self::Function(Rc::new(move |lineno, s| { + let guard = f.borrow_mut().expect("Failed to borrow function object"); + match guard.call::<_, bool>((lineno, s)) { + VmResult::Ok(v) => v, + VmResult::Err(e) => { + tracing::error!( + "Error in custom selector function {:?}: {:?}", + *guard, + e + ); + false + } + } + }))) + } + } + } +} + +/// Action to perform on a line when matched by a [`Selector`] +#[derive(Debug, Any)] +#[rune(item = ::patch)] +enum Action { + /// Copy the current line to the output. Only needed when auto-print is disabled. + #[rune(constructor)] + Print, + /// Delete the current line and short circuit the rest of the program (immediately go to the next line) + #[rune(constructor)] + Delete, + /// Replace pattern space with next line (will print unless auto-print is disabled) + #[rune(constructor)] + NextLine, + /// Stop processing the input and program and terminate early (do not print rest of file) + #[rune(constructor)] + Stop, + /// Stop processing the input and program and terminate early (auto-print rest of file) + #[rune(constructor)] + StopAndPrint, + /// Insert a new line *before* the current line + #[rune(constructor)] + InsertBefore(#[rune(get)] String), + /// Insert a new line *after* the current line + #[rune(constructor)] + InsertAfter(#[rune(get)] String), + /// Replace the entire current string with the given string + #[rune(constructor)] + Replace(#[rune(get)] String), + /// Do a regex search and replace in the current line. + /// + /// Only the first match is replaced in any given line. + /// + /// Capture groups in the replacement string works as with `::regex::Regex`. + #[rune(constructor)] + RegexReplace(#[rune(get)] String, #[rune(get)] String), + /// Like `RegexReplace` but replaces all matches on the line. + #[rune(constructor)] + RegexReplaceAll(#[rune(get)] String, #[rune(get)] String), + /// A sub-program that is executed. Will share pattern space with parent program + Subprogram(LineEditor), + /// A custom function passed the current pattern buffer, returning a new pattern buffer + #[rune(constructor)] + Function(#[rune(get)] Shared), +} + +impl Action { + /// Create an action for a nested sub-program + #[rune::function(path = Self::sub_program)] + fn sub_program(sub: LineEditor) -> Self { + Self::Subprogram(sub) + } +} + +impl TryFrom<&Action> for konfigkoll_core::line_edit::Action { + type Error = anyhow::Error; + + fn try_from(value: &Action) -> Result { + match value { + Action::Print => Ok(Self::Print), + Action::Delete => Ok(Self::Delete), + Action::Stop => Ok(Self::Stop), + Action::StopAndPrint => Ok(Self::StopAndPrint), + Action::InsertBefore(s) => Ok(Self::InsertBefore(s.into())), + Action::InsertAfter(s) => Ok(Self::InsertAfter(s.into())), + Action::Replace(s) => Ok(Self::Replace(s.into())), + Action::RegexReplace(a, b) => Ok(Self::RegexReplace { + regex: Regex::new(a)?, + replacement: b.into(), + replace_all: false, + }), + Action::RegexReplaceAll(a, b) => Ok(Self::RegexReplace { + regex: Regex::new(a)?, + replacement: b.into(), + replace_all: true, + }), + Action::Function(ref f) => { + let f = f.clone(); + Ok(Self::Function(Rc::new(move |s| { + let guard = f.borrow_mut().expect("Failed to borrow function object"); + match guard.call::<_, String>((s,)) { + VmResult::Ok(v) => Cow::Owned(v), + VmResult::Err(e) => { + tracing::error!( + "Error in custom action function {:?}: {:?}", + *guard, + e + ); + Cow::Borrowed(s) + } + } + }))) + } + Action::NextLine => Ok(Self::NextLine), + Action::Subprogram(sub) => Ok(Self::Subprogram(sub.inner.clone())), + } + } +} + +#[rune::module(::patch)] +/// Utilities for patching file contents conveniently. +pub(crate) fn module() -> Result { + let mut m = Module::from_meta(self::module_meta)?; + m.ty::()?; + m.function_meta(LineEditor::new)?; + m.function_meta(LineEditor::apply)?; + m.function_meta(LineEditor::add)?; + m.function_meta(LineEditor::add_inverted)?; + m.function_meta(LineEditor::clone)?; + m.ty::()?; + m.ty::()?; + m.function_meta(Action::sub_program)?; + + Ok(m) +} diff --git a/crates/konfigkoll_script/src/plugins/process.rs b/crates/konfigkoll_script/src/plugins/process.rs new file mode 100644 index 00000000..80cf6e8b --- /dev/null +++ b/crates/konfigkoll_script/src/plugins/process.rs @@ -0,0 +1,439 @@ +// Copyright: +// +// This is based on the process module from rune-rs which is dual licensed under +// the MIT and Apache 2.0 licenses. See +// https://github.com/rune-rs/rune/blob/0.13.x/crates/rune-modules/src/process.rs +// for the original source code. +// +// Parts of the documentation is also copied from tokio and the Rust standard library +// These are *also* dual licensed under the MIT and Apache 2.0 licenses. +// See: +// * https://docs.rs/tokio/1.39.1/tokio/process/index.html +// * https://doc.rust-lang.org/stable/std/process/index.html +// (The docs were not copied from the source but from the generated documentation) + +//! The native `process` module for the [Rune Language]. +//! +//! [Rune Language]: https://rune-rs.github.io +//! +//! ## Usage +//! +//! Add the following to your `Cargo.toml`: +//! +//! ```toml +//! rune-modules = { version = "0.13.3", features = ["process"] } +//! ``` +//! +//! Install it into your context: +//! +//! ```rust +//! let mut context = rune::Context::with_default_modules()?; +//! context.install(rune_modules::process::module(true)?)?; +//! # Ok::<_, rune::support::Error>(()) +//! ``` +//! +//! Use it in Rune: +//! +//! ```rust,ignore +//! use process::Command; +//! +//! fn main() { +//! let command = Command::new("ls"); +//! command.run().await; +//! } +//! ``` + +use rune::alloc::fmt::TryWrite; +use rune::alloc::Vec; +use rune::runtime::{Bytes, Formatter, Mut, Shared, Value, VmResult}; +use rune::{vm_try, vm_write, Any, ContextError, Module}; + +use std::io; +use tokio::process; + +/// A module for working with processes. +/// +/// This allows spawning child processes, capturing their output, and creating pipelines. +#[rune::module(::process)] +pub fn module(_stdio: bool) -> Result { + let mut module = Module::from_meta(self::module_meta)?; + module.ty::()?; + module.ty::()?; + module.ty::()?; + module.ty::()?; + module.ty::()?; + module.ty::()?; + module.ty::()?; + module.ty::()?; + + module.function_meta(Command::string_debug)?; + module.function_meta(Command::new)?; + module.function_meta(Command::spawn)?; + module.function_meta(Command::arg)?; + module.function_meta(Command::args)?; + #[cfg(unix)] + module.function_meta(Command::arg0)?; + module.function_meta(Command::stdin)?; + module.function_meta(Command::stdout)?; + module.function_meta(Command::stderr)?; + + module.function_meta(Child::string_debug)?; + module.function_meta(Child::stdin)?; + module.function_meta(Child::stdout)?; + module.function_meta(Child::stderr)?; + module.function_meta(Child::id)?; + module.function_meta(Child::start_kill)?; + module.function_meta(Child::kill)?; + module.function_meta(Child::wait)?; + module.function_meta(Child::wait_with_output)?; + + module.function_meta(ExitStatus::string_debug)?; + module.function_meta(ExitStatus::string_display)?; + module.function_meta(ExitStatus::code)?; + module.function_meta(ExitStatus::success)?; + + module.function_meta(Output::string_debug)?; + module.function_meta(Stdio::null)?; + module.function_meta(Stdio::inherit)?; + module.function_meta(Stdio::piped)?; + + module.function_meta(ChildStdin::string_debug)?; + module.function_meta(ChildStdin::try_into_stdio)?; + + module.function_meta(ChildStdout::string_debug)?; + module.function_meta(ChildStdout::try_into_stdio)?; + + module.function_meta(ChildStderr::string_debug)?; + module.function_meta(ChildStderr::try_into_stdio)?; + + Ok(module) +} + +/// A builder for a child command to execute +#[derive(Debug, Any)] +#[rune(item = ::process)] +struct Command { + inner: process::Command, +} + +impl Command { + #[rune::function(vm_result, protocol = STRING_DEBUG)] + fn string_debug(&self, f: &mut Formatter) { + vm_write!(f, "{:?}", self); + } + + /// Construct a new command. + #[rune::function(path = Self::new)] + fn new(command: &str) -> Self { + Self { + inner: process::Command::new(command), + } + } + + /// Add arguments. + #[rune::function(instance)] + fn args(&mut self, args: &[Value]) -> VmResult<()> { + for arg in args { + match arg { + Value::String(s) => { + self.inner.arg(&*vm_try!(s.borrow_ref())); + } + actual => { + return VmResult::expected::(vm_try!(actual.type_info())); + } + } + } + + VmResult::Ok(()) + } + + /// Add an argument. + #[rune::function(instance)] + fn arg(&mut self, arg: &str) { + self.inner.arg(arg); + } + + #[cfg(unix)] + #[rune::function(instance)] + /// Set the first process argument, argv[0], to something other than the default executable path. (Unix only) + fn arg0(&mut self, arg: &str) { + self.inner.arg0(arg); + } + + /// Sets configuration for the child process’s standard input (stdin) handle. + #[rune::function(instance)] + fn stdin(&mut self, stdio: Stdio) { + self.inner.stdin(stdio.inner); + } + + /// Sets configuration for the child process’s standard output (stdout) handle. + #[rune::function(instance)] + fn stdout(&mut self, stdio: Stdio) { + self.inner.stdout(stdio.inner); + } + + /// Sets configuration for the child process’s standard error (stderr) handle. + #[rune::function(instance)] + fn stderr(&mut self, stdio: Stdio) { + self.inner.stderr(stdio.inner); + } + + /// Spawn the command. + #[rune::function(instance)] + fn spawn(mut self) -> io::Result { + Ok(Child { + inner: Some(self.inner.spawn()?), + }) + } +} + +/// A running child process +#[derive(Debug, Any)] +#[rune(item = ::process)] +struct Child { + // we use an option to avoid a panic if we try to complete the child process + // multiple times. + // + // TODO: enapculate this pattern in some better way. + inner: Option, +} + +impl Child { + #[rune::function(vm_result, protocol = STRING_DEBUG)] + fn string_debug(&self, f: &mut Formatter) { + vm_write!(f, "{:?}", self); + } + + /// Attempt to take the stdin of the child process. + /// + /// Once taken this can not be taken again. + #[rune::function(instance)] + fn stdin(&mut self) -> Option { + let inner = match &mut self.inner { + Some(inner) => inner, + None => return None, + }; + let stdin = inner.stdin.take()?; + Some(ChildStdin { inner: stdin }) + } + + /// Attempt to take the stdout of the child process. + /// + /// Once taken this can not be taken again. + #[rune::function(instance)] + fn stdout(&mut self) -> Option { + let inner = match &mut self.inner { + Some(inner) => inner, + None => return None, + }; + let stdout = inner.stdout.take()?; + Some(ChildStdout { inner: stdout }) + } + + /// Attempt to take the stderr of the child process. + /// + /// Once taken this can not be taken again. + #[rune::function(instance)] + fn stderr(&mut self) -> Option { + let inner = match &mut self.inner { + Some(inner) => inner, + None => return None, + }; + let stderr = inner.stderr.take()?; + Some(ChildStderr { inner: stderr }) + } + + /// Attempt to get the OS process id of the child process. + /// + /// This will return None after the child process has completed. + #[rune::function(instance)] + fn id(&self) -> Option { + match &self.inner { + Some(inner) => inner.id(), + None => None, + } + } + + #[rune::function(vm_result, instance)] + fn start_kill(&mut self) -> io::Result<()> { + let inner = match &mut self.inner { + Some(inner) => inner, + None => { + rune::vm_panic!("already completed"); + } + }; + + inner.start_kill() + } + + /// Sends a signal to the child process. + #[rune::function(vm_result, instance, path = Self::kill)] + async fn kill(mut this: Mut) -> io::Result<()> { + let inner = match &mut this.inner { + Some(inner) => inner, + None => { + rune::vm_panic!("already completed"); + } + }; + + inner.kill().await + } + + /// Attempt to wait for the child process to exit. + /// + /// This will not capture output, use [`wait_with_output`] for that. + #[rune::function(vm_result, instance)] + async fn wait(self) -> io::Result { + let mut inner = match self.inner { + Some(inner) => inner, + None => { + rune::vm_panic!("already completed"); + } + }; + + let status = inner.wait().await?; + + Ok(ExitStatus { status }) + } + + // Returns a future that will resolve to an Output, containing the exit + // status, stdout, and stderr of the child process. + #[rune::function(vm_result, instance)] + async fn wait_with_output(self) -> io::Result { + let inner = match self.inner { + Some(inner) => inner, + None => { + rune::vm_panic!("already completed"); + } + }; + + let output = inner.wait_with_output().await?; + + Ok(Output { + status: ExitStatus { + status: output.status, + }, + stdout: Shared::new(Bytes::from_vec(Vec::try_from(output.stdout).vm?)).vm?, + stderr: Shared::new(Bytes::from_vec(Vec::try_from(output.stderr).vm?)).vm?, + }) + } +} + +/// The output and exit status, returned by [`Child::wait_with_output`]. +#[derive(Debug, Any)] +#[rune(item = ::process)] +struct Output { + #[rune(get)] + status: ExitStatus, + #[rune(get)] + stdout: Shared, + #[rune(get)] + stderr: Shared, +} + +impl Output { + #[rune::function(vm_result, protocol = STRING_DEBUG)] + fn string_debug(&self, f: &mut Formatter) { + vm_write!(f, "{:?}", self); + } +} + +/// The exit status from a completed child process +#[derive(Debug, Clone, Copy, Any)] +#[rune(item = ::process)] +struct ExitStatus { + status: std::process::ExitStatus, +} + +impl ExitStatus { + #[rune::function(vm_result, protocol = STRING_DISPLAY)] + fn string_display(&self, f: &mut Formatter) { + vm_write!(f, "{}", self.status); + } + + #[rune::function(vm_result, protocol = STRING_DEBUG)] + fn string_debug(&self, f: &mut Formatter) { + vm_write!(f, "{:?}", self); + } + + #[rune::function] + fn success(&self) -> bool { + self.status.success() + } + + #[rune::function] + fn code(&self) -> Option { + self.status.code() + } +} + +/// Describes what to do with a standard I/O stream for a child process when passed to the stdin, stdout, and stderr methods of Command. +#[derive(Debug, Any)] +#[rune(item = ::process)] +struct Stdio { + inner: std::process::Stdio, +} + +impl Stdio { + #[rune::function(vm_result, protocol = STRING_DEBUG)] + fn string_debug(&self, f: &mut Formatter) { + vm_write!(f, "{:?}", self); + } + + /// This stream will be ignored. This is the equivalent of attaching the stream to /dev/null. + #[rune::function(path = Self::null)] + fn null() -> Self { + Self { + inner: std::process::Stdio::null(), + } + } + + /// The child inherits from the corresponding parent descriptor. This is the default. + #[rune::function(path = Self::inherit)] + fn inherit() -> Self { + Self { + inner: std::process::Stdio::inherit(), + } + } + + /// A new pipe should be arranged to connect the parent and child processes. + #[rune::function(path = Self::piped)] + fn piped() -> Self { + Self { + inner: std::process::Stdio::piped(), + } + } +} + +macro_rules! stdio_stream { + ($name:ident, $stream:tt) => { + #[derive(Debug, Any)] + #[rune(item = ::process)] + #[doc = concat!("The ", $stream, " stream for spawned children.")] + struct $name { + inner: process::$name, + } + + impl $name { + #[rune::function(vm_result, protocol = STRING_DEBUG)] + fn string_debug(&self, f: &mut Formatter) { + vm_write!(f, "{:?}", self); + } + + /// Try to convert into a `Stdio`, which allows creating a pipeline between processes. + /// + /// This consumes the stream, as it can only be used once. + /// + /// Returns a Result + #[rune::function(instance)] + fn try_into_stdio(self) -> Result { + Ok(Stdio { + inner: self.inner.try_into()?, + }) + } + } + }; +} +stdio_stream!(ChildStdin, "stdin"); +stdio_stream!(ChildStdout, "stdout"); +stdio_stream!(ChildStderr, "stderr"); diff --git a/crates/konfigkoll_script/src/plugins/properties.rs b/crates/konfigkoll_script/src/plugins/properties.rs new file mode 100644 index 00000000..e17a8a9b --- /dev/null +++ b/crates/konfigkoll_script/src/plugins/properties.rs @@ -0,0 +1,61 @@ +//! Functions for properties +use ahash::AHashMap; +use compact_str::CompactString; +use itertools::Itertools; +use rune::ContextError; +use rune::Module; + +pub type PropertyKey = CompactString; +pub type PropertyValue = rune::runtime::Value; + +/// Configuration and persistent (across phases) properties. +/// +/// It is recommended to store properties using structrured system, such as +/// creating a hierarchy separated by `.`. But it is up to you. +#[derive(Debug, Default, rune::Any)] +#[rune(item = ::properties)] +pub(crate) struct Properties { + properties: AHashMap, +} + +impl Properties { + /// Get a user defined property + /// + /// Will return `()` if the property does not exist. + #[rune::function] + pub fn get(&self, name: &str) -> Option { + self.properties.get(name).cloned() + } + + /// Set a user defined property + #[rune::function] + pub fn set(&mut self, name: &str, value: PropertyValue) { + self.properties.insert(name.into(), value); + } + + /// Check if a property exists + #[rune::function] + pub fn has(&self, name: &str) -> bool { + self.properties.contains_key(name) + } + + /// Dump all properties to the terminal. For debugging + #[rune::function] + pub fn dump(&self) { + for (key, value) in self.properties.iter().sorted_by(|a, b| a.0.cmp(b.0)) { + println!("{} = {:?}", key, value); + } + } +} + +#[rune::module(::properties)] +/// User defined persistent (between phases) properties +pub(crate) fn module() -> Result { + let mut m = Module::from_meta(self::module_meta)?; + m.ty::()?; + m.function_meta(Properties::get)?; + m.function_meta(Properties::set)?; + m.function_meta(Properties::has)?; + m.function_meta(Properties::dump)?; + Ok(m) +} diff --git a/crates/konfigkoll_script/src/plugins/regex.rs b/crates/konfigkoll_script/src/plugins/regex.rs new file mode 100644 index 00000000..d43955ac --- /dev/null +++ b/crates/konfigkoll_script/src/plugins/regex.rs @@ -0,0 +1,93 @@ +//! Exposing regex to rhai + +use regex::Regex as InnerRegex; +use rune::{Any, ContextError, Module}; + +#[derive(Debug, Clone, Any)] +#[rune(item = ::regex)] +/// A UTF-8 string regular expression +pub struct Regex { + inner: InnerRegex, +} + +/// Rust API +impl Regex { + pub fn inner(&self) -> &InnerRegex { + &self.inner + } +} + +/// Rune API +impl Regex { + /// Create a new regex from a string + #[rune::function(path = Self::new)] + fn new(pattern: &str) -> anyhow::Result { + Ok(Self { + inner: InnerRegex::new(pattern)?, + }) + } + + /// Check if the regex matches the string + #[rune::function] + fn is_match(&self, text: &str) -> bool { + self.inner.is_match(text) + } + + /// Find the first match in the string + #[rune::function] + fn find(&self, text: &str) -> Option<(usize, usize)> { + self.inner.find(text).map(|m| (m.start(), m.end())) + } + + /// Replace the leftmost match in the string. + /// + /// Capture groups can be referred to via `$1`, `$2`, etc. (`$0` is the full match). + /// Named capture groups are supported via `$name`. + /// You can also use `${name}` or `${1}` etc, which is often needed to disambiguate + /// when a capture group number is followed by literal text. + #[rune::function] + fn replace(&self, text: &str, replace: &str) -> String { + self.inner.replace(text, replace).to_string() + } + + /// Replace all matches in the string + /// + /// Capture groups can be referred to via `$1`, `$2`, etc. (`$0` is the full match). + /// Named capture groups are supported via `$name`. + #[rune::function] + fn replace_all(&self, text: &str, replace: &str) -> String { + self.inner.replace_all(text, replace).to_string() + } + + /// Capture groups + /// + /// * If no match is found returns `None`. + /// * Otherwise Some(vector of optional strings) where: + /// * The first group (index 0) is the full match as `Some(value)`. + /// * The rest are the capture groups. If they didn't match the are `None`. + /// Otherwise they are `Some(value)`. + #[rune::function] + fn captures(&self, text: &str) -> Option>> { + let captures = self.inner.captures(text)?; + Some( + captures + .iter() + .map(|m| m.map(|v| v.as_str().to_string())) + .collect(), + ) + } +} + +#[rune::module(::regex)] +/// A wrapper for the rust regex crate +pub(crate) fn module() -> Result { + let mut m = Module::from_meta(self::module_meta)?; + m.ty::()?; + m.function_meta(Regex::new)?; + m.function_meta(Regex::is_match)?; + m.function_meta(Regex::find)?; + m.function_meta(Regex::replace)?; + m.function_meta(Regex::replace_all)?; + m.function_meta(Regex::captures)?; + Ok(m) +} diff --git a/crates/konfigkoll_script/src/plugins/settings.rs b/crates/konfigkoll_script/src/plugins/settings.rs new file mode 100644 index 00000000..3375e716 --- /dev/null +++ b/crates/konfigkoll_script/src/plugins/settings.rs @@ -0,0 +1,210 @@ +//! Konfigkoll settings + +use ahash::AHashSet; +use anyhow::Context; +use camino::Utf8PathBuf; +use parking_lot::Mutex; +use rune::ContextError; +use rune::Module; +use std::str::FromStr; + +const DEFAULT_EARLY: &[&str] = &["/etc/passwd", "/etc/group", "/etc/shadow", "/etc/gshadow"]; +const DEFAUT_SENSITIVE: &[&str] = &["/etc/shadow", "/etc/gshadow"]; + +/// Configuration of how konfigkoll should behave. +#[derive(Debug, rune::Any)] +#[rune(item = ::settings)] +pub struct Settings { + file_backend: Mutex>, + enabled_pkg_backends: Mutex>, + /// Configuration files (such as `/etc/passwd`) that should be applied early, + /// before installing packages. + /// This is useful to assign the same IDs instead of auto assignment + early_configs: Mutex>, + /// Configuration files that are sensitive and should not be written with + /// `save` + sensitive_configs: Mutex>, + /// Diff tool to use for comparing files. Default is `diff`. + diff: Mutex>, + /// Pager to use, default is to use $PAGER and fall back to `less` + pager: Mutex>, +} + +impl Default for Settings { + fn default() -> Self { + Self { + file_backend: Mutex::new(None), + enabled_pkg_backends: Mutex::new(AHashSet::new()), + early_configs: Mutex::new(AHashSet::from_iter(DEFAULT_EARLY.iter().map(Into::into))), + sensitive_configs: Mutex::new(AHashSet::from_iter( + DEFAUT_SENSITIVE.iter().map(Into::into), + )), + diff: Mutex::new(vec!["diff".into(), "-Naur".into()]), + pager: Mutex::new(vec![]), + } + } +} + +/// Rust API +impl Settings { + pub fn is_file_backend_enabled(&self, backend: paketkoll_types::backend::Backend) -> bool { + let guard = self.file_backend.lock(); + *guard == Some(backend) + } + + pub fn is_pkg_backend_enabled(&self, backend: paketkoll_types::backend::Backend) -> bool { + let guard = self.enabled_pkg_backends.lock(); + guard.contains(&backend) + } + + pub fn file_backend(&self) -> Option { + let guard = self.file_backend.lock(); + *guard + } + + pub fn enabled_pkg_backends(&self) -> impl Iterator { + let guard = self.enabled_pkg_backends.lock(); + let v: Vec<_> = guard.iter().cloned().collect(); + v.into_iter() + } + + pub fn early_configs(&self) -> impl Iterator { + let guard = self.early_configs.lock(); + let v: Vec<_> = guard.iter().cloned().collect(); + v.into_iter() + } + + pub fn sensitive_configs(&self) -> impl Iterator { + let guard = self.sensitive_configs.lock(); + let v: Vec<_> = guard.iter().cloned().collect(); + v.into_iter() + } + + /// Get diff tool to use + pub fn diff(&self) -> Vec { + let guard = self.diff.lock(); + guard.clone() + } + + /// Get preferred pager to use + pub fn pager(&self) -> Vec { + let guard = self.pager.lock(); + if guard.len() >= 1 { + guard.clone() + } else { + vec![std::env::var("PAGER").ok().unwrap_or_else(|| "less".into())] + } + } +} + +/// Rune API +impl Settings { + /// Set a package manager as the data source and target for file system checks. + /// + /// Unlike package manager backends, there can only be one of these (otherwise + /// the semantics would get confusing regarding which files are managed by which + /// tool). + /// + /// Valid values are: + /// * "pacman" (Arch Linux and derivatives) + /// * "apt" (Debian and derivatives) + /// + /// This will return an error on other values. + #[rune::function] + pub fn set_file_backend(&self, name: &str) -> anyhow::Result<()> { + let backend = paketkoll_types::backend::Backend::from_str(name) + .with_context(|| format!("Unknown backend {name}"))?; + let mut guard = self.file_backend.lock(); + if guard.is_some() { + tracing::warn!("File backend was set more than once"); + } + *guard = Some(backend); + + Ok(()) + } + + /// Enable a package manager or other backend as a data source and target for package operations. + /// + /// Multiple ones can be enabled at the same time (typically flatpak + native package manager). + /// + /// Valid values are: + /// * "pacman" (Arch Linux and derivatives) + /// * "apt" (Debian and derivatives) + /// * "flatpak" (Flatpak) + /// + /// This will return an error on other values. + #[rune::function] + pub fn enable_pkg_backend(&self, name: &str) -> anyhow::Result<()> { + let backend = paketkoll_types::backend::Backend::from_str(name) + .with_context(|| format!("Unknown backend {name}"))?; + + let before = self.enabled_pkg_backends.lock().insert(backend); + + if !before { + tracing::warn!("Package backend {name} was enabled more than once"); + } + + Ok(()) + } + + /// Add a configuration file that should be applied early (before package installation). + /// This is useful for files like `/etc/passwd` to assign the same IDs instead + /// of auto assignment at package installation. + /// + /// By default, `/etc/passwd`, `/etc/group`, `/etc/shadow`, and `/etc/gshadow` are already added. + #[rune::function] + pub fn early_config(&self, path: &str) { + let before = self.early_configs.lock().insert(path.into()); + + if !before { + tracing::warn!("Early config {path} was added more than once"); + } + } + + /// Set a configuration as sensitive, this will not be saved with `save`. + /// + /// This is intended for things like `/etc/shadow` and `/etc/gshadow` + /// (those are sensitive by default) to prevent accidental leaks. + /// + /// You can add more such files with this function. + #[rune::function] + pub fn sensitive_config(&self, path: &str) { + let before = self.sensitive_configs.lock().insert(path.into()); + + if !before { + tracing::warn!("Sensitive config {path} was added more than once"); + } + } + + /// Set the diff tool to use for comparing files. + /// + /// Default is `diff`. + #[rune::function] + pub fn set_diff(&self, cmd: Vec) { + let mut guard = self.diff.lock(); + *guard = cmd; + } + + /// Set the pager to use for viewing files. + /// + /// Default is to use `$PAGER` and fall back to `less`. + #[rune::function] + pub fn set_pager(&self, cmd: Vec) { + let mut guard = self.pager.lock(); + *guard = cmd; + } +} + +#[rune::module(::settings)] +/// Settings of how konfigkoll should behave. +pub(crate) fn module() -> Result { + let mut m = Module::from_meta(self::module_meta)?; + m.ty::()?; + m.function_meta(Settings::set_file_backend)?; + m.function_meta(Settings::enable_pkg_backend)?; + m.function_meta(Settings::early_config)?; + m.function_meta(Settings::sensitive_config)?; + m.function_meta(Settings::set_diff)?; + m.function_meta(Settings::set_pager)?; + Ok(m) +} diff --git a/crates/konfigkoll_script/src/plugins/sysinfo.rs b/crates/konfigkoll_script/src/plugins/sysinfo.rs new file mode 100644 index 00000000..94489a29 --- /dev/null +++ b/crates/konfigkoll_script/src/plugins/sysinfo.rs @@ -0,0 +1,135 @@ +//! System information gathering +use konfigkoll_hwinfo::pci::{PciDevice, PciIdDb}; +use rune::{Any, ContextError, Module}; +use sysinfo::{CpuRefreshKind, MemoryRefreshKind}; + +/// System info access +#[derive(Debug, Any)] +#[rune(item = ::sysinfo)] +struct SysInfo { + inner: sysinfo::System, + cpu_loaded: bool, + pci_devices: Option>, + // TODO: Needed for future functionality + #[allow(dead_code)] + pci_db: Option, +} + +impl SysInfo { + /// Create a new system info object + #[rune::function(path = Self::new)] + fn new() -> Self { + Self { + inner: sysinfo::System::new(), + cpu_loaded: false, + pci_devices: None, + pci_db: None, + } + } + + /// Total amount of memory in kB + #[rune::function] + fn total_memory(&mut self) -> u64 { + self.inner + .refresh_memory_specifics(MemoryRefreshKind::new().with_ram()); + self.inner.total_memory() / 1024 + } + + /// The system architecture + #[rune::function] + fn architecture(&self) -> Option { + sysinfo::System::cpu_arch() + } + + /// The kernel version + #[rune::function] + fn kernel(&self) -> Option { + sysinfo::System::kernel_version() + } + + /// The DNS hostname + #[rune::function] + fn host_name(&self) -> Option { + sysinfo::System::host_name() + } + + /// The OS ID + /// + /// On Linux this corresponds to the `ID` field in `/etc/os-release`. + #[rune::function] + fn os_id(&self) -> String { + sysinfo::System::distribution_id() + } + + /// The OS version + /// + /// On Linux this corresponds to the `VERSION_ID` field in `/etc/os-release` or + /// `DISTRIB_RELEASE` in `/etc/lsb-release`. + #[rune::function] + fn os_version(&self) -> Option { + sysinfo::System::os_version() + } + + /// Number of physical CPU cores + #[rune::function] + fn cpu_cores(&self) -> Option { + self.inner.physical_core_count() + } + + /// Get the CPU vendor + #[rune::function] + fn cpu_vendor_id(&mut self) -> Option { + if !self.cpu_loaded { + self.inner.refresh_cpu_specifics(CpuRefreshKind::default()); + self.cpu_loaded = true; + } + self.inner + .cpus() + .first() + .map(|cpu| cpu.vendor_id().to_string()) + } + + /// Get the CPU vendor + #[rune::function] + fn cpu_brand(&mut self) -> Option { + if !self.cpu_loaded { + self.inner.refresh_cpu_specifics(CpuRefreshKind::default()); + self.cpu_loaded = true; + } + self.inner.cpus().first().map(|cpu| cpu.brand().to_string()) + } + + #[rune::function] + /// Get the PCI devices + fn pci_devices(&mut self) -> anyhow::Result> { + if self.pci_devices.is_none() { + let devices = konfigkoll_hwinfo::pci::load_pci_devices()?; + self.pci_devices = Some(devices.collect()); + } + + self.pci_devices + .clone() + .ok_or_else(|| anyhow::anyhow!("Failed to load PCI devices")) + } +} + +#[rune::module(::sysinfo)] +/// Various functions to get system information +pub(crate) fn module() -> Result { + let mut m = Module::from_meta(self::module_meta)?; + m.ty::()?; + m.function_meta(SysInfo::new)?; + m.function_meta(SysInfo::architecture)?; + m.function_meta(SysInfo::kernel)?; + m.function_meta(SysInfo::host_name)?; + m.function_meta(SysInfo::os_id)?; + m.function_meta(SysInfo::os_version)?; + m.function_meta(SysInfo::cpu_cores)?; + m.function_meta(SysInfo::total_memory)?; + m.function_meta(SysInfo::cpu_vendor_id)?; + m.function_meta(SysInfo::cpu_brand)?; + m.function_meta(SysInfo::pci_devices)?; + m.ty::()?; + m.ty::()?; + Ok(m) +} diff --git a/crates/konfigkoll_script/src/plugins/systemd.rs b/crates/konfigkoll_script/src/plugins/systemd.rs new file mode 100644 index 00000000..751013be --- /dev/null +++ b/crates/konfigkoll_script/src/plugins/systemd.rs @@ -0,0 +1,239 @@ +//! Helpers for working with systemd units + +use camino::Utf8PathBuf; +use compact_str::CompactString; +use rune::{Any, ContextError, Module}; + +use crate::{Commands, Phase}; + +use super::package_managers::PackageManager; + +/// A systemd Unit file +/// +/// This can be used to enable or mask systemd units. +/// +/// For example, to enable a unit file from a package: +/// +/// ```rune +/// systemd::Unit::from_pkg("util-linux", +/// "fstrim.timer", +/// package_managers.files()) +/// .enable(ctx.cmds)?; +/// ``` +/// +/// The additional functions can be used to customise the behaviour. +#[derive(Debug, Any)] +#[rune(item = ::systemd)] +struct Unit { + unit: CompactString, + source: Source, + type_: Type, + name: Option, + process_aliases: bool, + process_wanted_by: bool, +} + +#[derive(Debug)] +enum Type { + System, + User, +} + +impl Type { + fn as_str(&self) -> &'static str { + match self { + Self::System => "system", + Self::User => "user", + } + } +} + +#[derive(Debug, Clone)] +enum Source { + File { + path: Utf8PathBuf, + contents: Vec, + }, + Package { + package_manager: PackageManager, + package: CompactString, + }, +} + +impl Unit { + fn symlink_name(&self) -> &str { + match &self.name { + Some(name) => name.as_str(), + None => &self.unit, + } + } + + /// Where we expect the symlink to be created + fn symlink_path(&self) -> String { + format!( + "/etc/systemd/{}/{}", + self.type_.as_str(), + self.symlink_name() + ) + } + + /// Where we expect the file to be (for the purpose of symlink target and finding the file contents) + fn unit_file_path(&self) -> String { + match &self.source { + Source::File { path, .. } => path.to_string(), + Source::Package { .. } => { + format!("/usr/lib/systemd/{}/{}", self.type_.as_str(), self.unit) + } + } + } + + /// Get contents of file + fn contents(&self) -> anyhow::Result> { + match &self.source { + Source::File { contents, .. } => Ok(contents.clone()), + Source::Package { + package_manager, + package, + } => package_manager.file_contents(package, &self.unit_file_path()), + } + } + + /// Parse the contents of the unit file, it is a simple INI file, use rust-ini + fn parse_unit_file(&self) -> anyhow::Result { + let contents = self.contents()?; + let contents = std::str::from_utf8(&contents)?; + Ok(ini::Ini::load_from_str(contents)?) + } +} + +/// Rune API +impl Unit { + /// Create a new instance from a file path + #[rune::function(path = Self::from_file, keep)] + pub fn from_file(file: &str, cmds: &Commands) -> anyhow::Result { + Ok(Self { + unit: file.rsplit_once('/').map(|(_, f)| f).ok_or_else(|| anyhow::anyhow!("No file name found"))?.into(), + source: Source::File { + path: file.into(), + contents: cmds.file_contents(file).ok_or_else(|| { + anyhow::anyhow!("Failed to find file contents of {} (did you add a command that created the file before?)", file) + })?.contents()?.into_owned(), + }, + type_: Type::System, + name: None, + process_aliases: true, + process_wanted_by: true, + }) + } + + /// Create a new instace from a unit file in a package + #[rune::function(path = Self::from_pkg, keep)] + pub fn from_pkg(package: &str, unit: &str, package_manager: &PackageManager) -> Self { + Self { + unit: unit.into(), + source: Source::Package { + package_manager: package_manager.clone(), + package: package.into(), + }, + type_: Type::System, + name: None, + process_aliases: true, + process_wanted_by: true, + } + } + + /// Mark this as a user unit instead of (the default) system unit type + #[rune::function(keep)] + pub fn user(mut self) -> Self { + self.type_ = Type::User; + self + } + + /// Override the name of the unit. Useful for parameterised units (e.g. `foo@.service`) + #[rune::function(keep)] + pub fn name(mut self, name: &str) -> Self { + self.name = Some(name.into()); + self + } + + /// Skip installing aliases + #[rune::function(keep)] + pub fn skip_aliases(mut self) -> Self { + self.process_aliases = false; + self + } + + /// Skip installing wanted-by + #[rune::function(keep)] + pub fn skip_wanted_by(mut self) -> Self { + self.process_wanted_by = false; + self + } + + /// Enable the unit + #[rune::function(keep)] + pub fn enable(self, commands: &mut Commands) -> anyhow::Result<()> { + if commands.phase != Phase::Main { + return Err(anyhow::anyhow!( + "File system actions are only possible in the 'main' phase" + )); + } + + let parsed = self.parse_unit_file()?; + let install_section = parsed.section(Some("Install")); + + let type_ = self.type_.as_str(); + let name = self.symlink_name(); + let unit_path = self.unit_file_path(); + + if let Some(install_section) = install_section { + if self.process_aliases { + for alias in install_section.get_all("Alias") { + for alias in alias.split_ascii_whitespace() { + let p = format!("/etc/systemd/{}/{}", type_, alias); + commands.ln(&p, &unit_path)?; + } + } + } + + if self.process_wanted_by { + for wanted_by in install_section.get_all("WantedBy") { + for wanted_by in wanted_by.split_ascii_whitespace() { + let p = format!("/etc/systemd/{}/{}.wants/{}", type_, wanted_by, name); + commands.ln(&p, &unit_path)?; + } + } + } + } + Ok(()) + } + + /// Mask the unit + #[rune::function(keep)] + pub fn mask(self, commands: &mut Commands) -> anyhow::Result<()> { + if commands.phase != Phase::Main { + return Err(anyhow::anyhow!( + "File system actions are only possible in the 'main' phase" + )); + } + + commands.ln(&self.symlink_path(), "/dev/null")?; + Ok(()) + } +} + +#[rune::module(::systemd)] +/// Functionality to simplify working with systemd +pub(crate) fn module() -> Result { + let mut m = Module::from_meta(self::module_meta)?; + m.ty::()?; + m.function_meta(Unit::from_file__meta)?; + m.function_meta(Unit::from_pkg__meta)?; + m.function_meta(Unit::user__meta)?; + m.function_meta(Unit::name__meta)?; + m.function_meta(Unit::skip_aliases__meta)?; + m.function_meta(Unit::skip_wanted_by__meta)?; + m.function_meta(Unit::enable__meta)?; + m.function_meta(Unit::mask__meta)?; + Ok(m) +} diff --git a/crates/konfigkoll_types/Cargo.toml b/crates/konfigkoll_types/Cargo.toml new file mode 100644 index 00000000..24e3fc7e --- /dev/null +++ b/crates/konfigkoll_types/Cargo.toml @@ -0,0 +1,21 @@ +[package] +edition = "2021" +license = "MPL-2.0" +name = "konfigkoll_types" +repository = "https://github.com/VorpalBlade/paketkoll" +rust-version = "1.79.0" +version = "0.1.0" + +[dependencies] +ahash.workspace = true +anyhow.workspace = true +bitflags.workspace = true +camino.workspace = true +compact_str.workspace = true +either.workspace = true +paketkoll_types = { version = "0.1.0", path = "../paketkoll_types" } +paketkoll_utils = { version = "0.1.0", path = "../paketkoll_utils" } +strum.workspace = true + +[lints] +workspace = true diff --git a/crates/konfigkoll_types/README.md b/crates/konfigkoll_types/README.md new file mode 100644 index 00000000..cb26925d --- /dev/null +++ b/crates/konfigkoll_types/README.md @@ -0,0 +1,8 @@ +# konfigkoll_types + +Type definitions for konfigkoll backend. + +These are the core operations that the script desugars into and are compared +against the system state. + +This is an internal API crate with no stability guarantees whatsoever. diff --git a/crates/konfigkoll_types/src/lib.rs b/crates/konfigkoll_types/src/lib.rs new file mode 100644 index 00000000..39752a1a --- /dev/null +++ b/crates/konfigkoll_types/src/lib.rs @@ -0,0 +1,18 @@ +//! Type definitions for konfigkoll backend +//! +//! These are the core operations that the script desugars into and are compared +//! against the system state. +//! +//! This is an internal API crate with no stability guarantees whatsoever. + +mod misc; +mod operations; + +pub use misc::FileContents; +pub use operations::FsInstruction; +pub use operations::FsOp; +pub use operations::FsOpDiscriminants; +pub use operations::PkgIdent; +pub use operations::PkgInstruction; +pub use operations::PkgInstructions; +pub use operations::PkgOp; diff --git a/crates/konfigkoll_types/src/misc.rs b/crates/konfigkoll_types/src/misc.rs new file mode 100644 index 00000000..978b23eb --- /dev/null +++ b/crates/konfigkoll_types/src/misc.rs @@ -0,0 +1,98 @@ +use std::{ + borrow::Cow, + fs::File, + hash::{Hash, Hasher}, + io::{BufReader, Read}, +}; + +use anyhow::Context; +use camino::Utf8Path; +use either::Either; +use paketkoll_types::files::Checksum; + +/// Describes the contents of a file for the purpose of a [`FsOp`](crate::FsOp). +#[derive(Debug, Clone)] +pub enum FileContents { + /// Literal data + Literal { checksum: Checksum, data: Box<[u8]> }, + /// From a file, for use when the data is too big to fit comfortably in memory + FromFile { + checksum: Checksum, + path: camino::Utf8PathBuf, + }, +} + +impl FileContents { + pub fn from_literal(data: Box<[u8]>) -> Self { + let checksum = paketkoll_utils::checksum::sha256_buffer(&data); + Self::Literal { checksum, data } + } + + pub fn from_file(path: &Utf8Path) -> anyhow::Result { + let mut reader = + BufReader::new(File::open(path).with_context(|| format!("Failed to open {path}"))?); + let checksum = + paketkoll_utils::checksum::sha256_readable(&mut reader).context("Checksum failed")?; + Ok(Self::FromFile { + checksum, + path: path.to_owned(), + }) + } + + pub fn checksum(&self) -> &Checksum { + match self { + FileContents::Literal { checksum, .. } => checksum, + FileContents::FromFile { checksum, .. } => checksum, + } + } + + /// Get a readable for the data in this operation + pub fn readable(&self) -> anyhow::Result { + match self { + FileContents::Literal { checksum: _, data } => Ok(Either::Left(data.as_ref())), + FileContents::FromFile { checksum: _, path } => Ok(Either::Right( + File::open(path).with_context(|| format!("Failed to open {path}"))?, + )), + } + } + + pub fn contents(&self) -> anyhow::Result> { + match self { + FileContents::Literal { data, .. } => Ok(Cow::Borrowed(data.as_ref())), + FileContents::FromFile { path, .. } => { + let mut reader = BufReader::new( + File::open(path).with_context(|| format!("Failed to open {path}"))?, + ); + let mut data = Vec::new(); + reader.read_to_end(&mut data)?; + Ok(Cow::Owned(data)) + } + } + } +} + +impl PartialEq for FileContents { + fn eq(&self, other: &Self) -> bool { + self.checksum() == other.checksum() + } +} + +impl Eq for FileContents {} + +impl PartialOrd for FileContents { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for FileContents { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.checksum().cmp(other.checksum()) + } +} + +impl Hash for FileContents { + fn hash(&self, state: &mut H) { + self.checksum().hash(state); + } +} diff --git a/crates/konfigkoll_types/src/operations.rs b/crates/konfigkoll_types/src/operations.rs new file mode 100644 index 00000000..76e21d26 --- /dev/null +++ b/crates/konfigkoll_types/src/operations.rs @@ -0,0 +1,158 @@ +use std::collections::BTreeMap; + +use compact_str::CompactString; +use paketkoll_types::files::Mode; + +use crate::FileContents; + +/// An operation to be performed on a file system entry +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, strum::EnumDiscriminants)] +pub enum FsOp { + /// Remove a file + Remove, + /// Restore a file to its original state + Restore, + + // Creation + /// Create a directory + CreateDirectory, + /// Create a regular file with the given contents + CreateFile(FileContents), + /// Create a symlink pointing to the given location + CreateSymlink { target: camino::Utf8PathBuf }, + /// Create a FIFO + CreateFifo, + /// Create a block device + CreateBlockDevice { major: u64, minor: u64 }, + /// Create a character device + CreateCharDevice { major: u64, minor: u64 }, + + // Metadata + /// Set the mode of a file + SetMode { mode: Mode }, + /// Set the owner of a file + SetOwner { owner: CompactString }, + /// Set the group of a file + SetGroup { group: CompactString }, + + /// Special value for when we want to inform the user about extraneous entries in their config + Comment, +} + +impl std::fmt::Display for FsOp { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + FsOp::Remove => write!(f, "remove"), + FsOp::Restore => { + write!(f, "restore (from package manager)") + } + FsOp::CreateDirectory => write!(f, "mkdir"), + FsOp::CreateFile(contents) => write!(f, "create file (with {})", contents.checksum()), + FsOp::CreateSymlink { target } => write!(f, "symlink to {target}"), + FsOp::CreateFifo => write!(f, "mkfifo"), + FsOp::CreateBlockDevice { .. } => write!(f, "mknod (block device)"), + FsOp::CreateCharDevice { .. } => write!(f, "mknod (char device)"), + FsOp::SetMode { mode } => write!(f, "chmod {mode}"), + FsOp::SetOwner { owner } => write!(f, "chown {owner}"), + FsOp::SetGroup { group } => write!(f, "chgrp {group}"), + FsOp::Comment => write!(f, "COMMENT"), + } + } +} + +/// An instruction for a file system change +#[derive(Debug, Clone)] +pub struct FsInstruction { + /// Path to operate on + pub path: camino::Utf8PathBuf, + /// Operation to perform + pub op: FsOp, + /// Optional comment for saving purposes + pub comment: Option, +} + +impl PartialOrd for FsInstruction { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for FsInstruction { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + match self.path.cmp(&other.path) { + std::cmp::Ordering::Equal => {} + ord => return ord, + } + self.op.cmp(&other.op) + } +} + +impl PartialEq for FsInstruction { + fn eq(&self, other: &Self) -> bool { + self.path == other.path && self.op == other.op + } +} + +impl Eq for FsInstruction {} + +/// Describes an operation to perform on a package +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum PkgOp { + Uninstall, + Install, +} + +/// Identifying a package +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct PkgIdent { + /// Which package manager to use + pub package_manager: paketkoll_types::backend::Backend, + /// Specifier describing which package to install. + /// Typically package name, but may be some other sort of identifier (e.g. for flatpak) + pub identifier: CompactString, +} + +/// An instruction for a package manager change +#[derive(Debug, Clone)] +pub struct PkgInstruction { + /// Operation to perform on package + pub op: PkgOp, + /// Optional comment for saving purposes + pub comment: Option, +} + +impl PkgInstruction { + // Toggle between install and uninstall + pub fn inverted(&self) -> Self { + Self { + op: match self.op { + PkgOp::Install => PkgOp::Uninstall, + PkgOp::Uninstall => PkgOp::Install, + }, + comment: self.comment.clone(), + } + } +} + +/// Type of collection of package instructions +pub type PkgInstructions = BTreeMap; + +impl PartialEq for PkgInstruction { + fn eq(&self, other: &Self) -> bool { + self.op == other.op + } +} + +impl Eq for PkgInstruction {} + +impl PartialOrd for PkgInstruction { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for PkgInstruction { + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + self.op.cmp(&other.op) + } +} diff --git a/crates/mtree2/Cargo.toml b/crates/mtree2/Cargo.toml index e2e5c5e6..5bcf2518 100644 --- a/crates/mtree2/Cargo.toml +++ b/crates/mtree2/Cargo.toml @@ -13,6 +13,7 @@ version = "0.6.1" bitflags.workspace = true faster-hex.workspace = true memchr.workspace = true +smallvec.workspace = true [lints] workspace = true diff --git a/crates/mtree2/src/parser.rs b/crates/mtree2/src/parser.rs index 5ff64093..e8611d8e 100644 --- a/crates/mtree2/src/parser.rs +++ b/crates/mtree2/src/parser.rs @@ -1,4 +1,6 @@ //! Stuff for parsing mtree files. +use smallvec::SmallVec; + use crate::{ util::{parse_time, FromDec, FromHex}, Device, @@ -13,15 +15,15 @@ pub enum MTreeLine<'a> { /// Lines starting with a '#' are ignored. Comment, /// Special commands (starting with '/') alter the behavior of later entries. - Special(SpecialKind, Vec>), + Special(SpecialKind, SmallVec<[Keyword<'a>; 5]>), /// If the first word does not contain a '/', it is a file in the current /// directory. - Relative(&'a [u8], Vec>), + Relative(&'a [u8], SmallVec<[Keyword<'a>; 5]>), /// Change the current directory to the parent of the current directory. DotDot, /// If the first word does contain a '/', it is a file relative to the starting /// (not current) directory. - Full(&'a [u8], Vec>), + Full(&'a [u8], SmallVec<[Keyword<'a>; 5]>), } impl<'a> MTreeLine<'a> { @@ -43,7 +45,7 @@ impl<'a> MTreeLine<'a> { return Ok(MTreeLine::DotDot); } // the rest need params - let mut params = Vec::new(); + let mut params = SmallVec::new(); for part in parts { let keyword = Keyword::from_bytes(part); debug_assert!( diff --git a/crates/paketkoll/Cargo.toml b/crates/paketkoll/Cargo.toml index 60f33426..86d431b6 100644 --- a/crates/paketkoll/Cargo.toml +++ b/crates/paketkoll/Cargo.toml @@ -5,14 +5,14 @@ edition = "2021" keywords = ["apt", "arch-linux", "debian", "pacman", "package-management"] license = "MPL-2.0" name = "paketkoll" -readme = "../../README.md" +readme = "README.md" repository = "https://github.com/VorpalBlade/paketkoll" rust-version = "1.79.0" version = "0.2.3" [features] # Default features -default = ["debian", "arch_linux", "json"] +default = ["debian", "arch_linux", "json", "vendored"] # Include the Arch Linux backend arch_linux = ["paketkoll_core/arch_linux"] @@ -27,6 +27,9 @@ systemd_tmpfiles = ["paketkoll_core/systemd_tmpfiles"] json = ["dep:serde_json", "paketkoll_core/serde", "serde"] serde = ["dep:serde"] +# Vendor C/C++ dependencies instead of linking them dynamically +vendored = ["paketkoll_core/vendored"] + [dependencies] ahash.workspace = true anyhow = { workspace = true, features = ["backtrace"] } @@ -36,6 +39,7 @@ env_logger.workspace = true log.workspace = true os_info.workspace = true paketkoll_core = { version = "0.4.1", path = "../paketkoll_core" } +paketkoll_types = { version = "0.1.0", path = "../paketkoll_types" } proc-exit.workspace = true rayon.workspace = true serde = { workspace = true, optional = true, features = ["serde_derive"] } @@ -47,11 +51,5 @@ serde_json = { workspace = true, optional = true } # the slightly slower mimalloc instead. mimalloc.workspace = true -[build-dependencies] -clap = { workspace = true, features = ["derive"] } -clap_complete.workspace = true -clap_mangen.workspace = true -compact_str.workspace = true - [lints] workspace = true diff --git a/crates/paketkoll/README.md b/crates/paketkoll/README.md new file mode 100644 index 00000000..186f4909 --- /dev/null +++ b/crates/paketkoll/README.md @@ -0,0 +1,181 @@ +# Paketkoll + +[ [lib.rs] ] [ [crates.io] ] [ [AUR] ] + +This is a Rust replacement for `debsums` (on Debian/Ubuntu/...) and `paccheck` +(on Arch Linux and derivatives). It is much faster than those thanks to using +all your CPU cores in parallel. (It is also much much faster than `pacman -Qkk` +which is much slower than `paccheck` even.) + +What it does is compare installed files to what the package manager installed and +report any discrepancies. + +* On Arch Linux it will report changed mode, owner, group, mtimes, symlink target, + file content (sha256) or missing files. +* On Debian it will only report if file content differs for regular files. That + is the only information available on Debian unfortunately (the md5sum). + +Additional features: + +* There is a flag to include or not include "config files" (those marked as such + by the package manager, which is not all files in `/etc` as one might think). +* On Arch Linux you can pass `--trust-mtime` to not check the contents of files + where the mtime matches. This makes the check ultra-fast. +* Doesn't depend on any distro specific libraries for interacting with the package + database. We do our own parsing. This makes it possible to be way faster + (parallelism!) and also to make a cross platform binary that will run on either + distro without any dependencies apart from libc. +* You can also use this to find unmanaged files (not installed by the package + manager) using `paketkoll check-unexpected`, though some work is required, + since there are many legitimately unmanaged files. You may need to find a set + of `--ignore` flags suitable for your system. Only some simple basics ignores + are built in (`/proc`, `/sys`, `/home`, etc). + +Caveats: + +* This is not a drop-in replacement for either debsums nor paccheck, since + command line flags and output format differs. Additionally, debsums have some + extra features that this doesn't, such as filtering out files removed by localepurge. +* This uses much more memory than `paccheck` (3x). This is largely unavoidable due + to memory-speed tradeoffs, though there is room for *some* improvements still. +* paketkoll will not report quite the same errors as `paccheck`. For example, if + it finds that the size differs, it will not bother computing the checksums, + since they can never match. + +## Benchmarks + +Note: CPU time is actually comparable to the original tools (slightly better in +general). But due to parallelism the wall time is *way* better, especially +without `--trust-mtime` (where the runtime is quite small to begin with). + +* All of the runs were performed on warm disk cache. +* Distro-installed versions of paccheck and debsums were used. +* Musl builds built using cross was used across the board for best portability. +* The same build flags as used for binary releases in this were used (opt level 2, fat LTO) + +### Arch Linux (x64-64 AMD desktop) + +* CPU: AMD Ryzen 5 5600X 6-Core Processor (6 cores, 12 threads) +* RAM: 32 GB, 2 DIMMs DDR4, 3600 MHz +* Disk: NVME Gen4 (WD Black SN850 1TB) +* Kernel: 6.7.5-arch1-1 +* `pacman -Q | wc -l` indicates 2211 packages installed + +When only checking file properties and trusting mtime (these should be the most similar options): + +```console +$ hyperfine -i -N --warmup 1 "paketkoll --trust-mtime check" "paccheck --file-properties --quiet" +Benchmark 1: paketkoll --trust-mtime + Time (mean ± σ): 249.4 ms ± 4.8 ms [User: 1194.5 ms, System: 1216.2 ms] + Range (min … max): 242.1 ms … 259.7 ms 12 runs + +Benchmark 2: paccheck --file-properties --quiet + Time (mean ± σ): 2.561 s ± 0.020 s [User: 1.504 s, System: 1.053 s] + Range (min … max): 2.527 s … 2.598 s 10 runs + + Warning: Ignoring non-zero exit code. + +Summary + paketkoll --trust-mtime ran + 10.27 ± 0.21 times faster than paccheck --file-properties --quiet +``` + +The speedup isn't quite as impressive when checking the checksums also, but it is still large: + +```console +$ hyperfine -i -N --warmup 1 "paketkoll check" "paccheck --sha256sum --quiet" +Benchmark 1: paketkoll + Time (mean ± σ): 9.986 s ± 1.329 s [User: 17.368 s, System: 19.087 s] + Range (min … max): 8.196 s … 11.872 s 10 runs + +Benchmark 2: paccheck --sha256sum --quiet + Time (mean ± σ): 68.976 s ± 0.339 s [User: 16.661 s, System: 17.816 s] + Range (min … max): 68.413 s … 69.604 s 10 runs + + Warning: Ignoring non-zero exit code. + +Summary + paketkoll ran + 6.91 ± 0.92 times faster than paccheck --sha256sum --quiet +``` + +* Many and large packages installed +* 6 cores, 12 thread means a decent speed up from multi-threading is possible. +* I don't know what paccheck was doing such that it took 68 seconds but didn't use very much CPU. Presumably waiting for IO? + +### Debian (ARM64 Raspberry Pi) + +* Raspberry Pi 5 (8 GB RAM) +* CPU: Cortex-A76 (4 cores, 4 threads) +* Disk: USB boot from SATA SSD in USB 3.0 enclosure: Samsung SSD 850 PRO 512GB +* Kernel: 6.1.0-rpi8-rpi-2712 +* `dpkg-query -l | grep ii | wc -l` indicates 749 packages installed + +```console +$ hyperfine -i -N --warmup 1 "paketkoll check" "debsums -c" +Benchmark 1: paketkoll + Time (mean ± σ): 2.664 s ± 0.102 s [User: 3.937 s, System: 1.116 s] + Range (min … max): 2.543 s … 2.813 s 10 runs + +Benchmark 2: debsums -c + Time (mean ± σ): 8.893 s ± 0.222 s [User: 5.453 s, System: 1.350 s] + Range (min … max): 8.637 s … 9.199 s 10 runs + + Warning: Ignoring non-zero exit code. + +Summary + 'paketkoll' ran + 3.34 ± 0.15 times faster than 'debsums -c' +``` + +* There aren't a ton of packages installed on this system (it is acting as a headless server). This means that neither command is terribly slow. +* A Pi only has 4 cores also, which limits the maximum possible speedup. + +### Ubuntu 22.04 (x86-64 Intel laptop) + +* CPU: 12th Gen Intel(R) Core(TM) i9-12950HX (8 P-cores with 16 threads + 8 E-cores with 8 threads) +* RAM: 64 GB, 2 DIMMs DDR4, 3600 MHz +* Disk: NVME Gen4 (WD SN810 2 TB) +* Kernel: 6.5.0-17-generic (HWE kernel) +* `dpkg-query -l | grep ii | wc -l` indicates 4012 packages installed + +```console +$ hyperfine -i -N --warmup 1 "paketkoll check" "debsums -c" +Benchmark 1: paketkoll + Time (mean ± σ): 5.341 s ± 0.174 s [User: 42.553 s, System: 33.049 s] + Range (min … max): 5.082 s … 5.586 s 10 runs + +Benchmark 2: debsums -c + Time (mean ± σ): 92.839 s ± 7.332 s [User: 47.664 s, System: 15.697 s] + Range (min … max): 82.872 s … 103.710 s 10 runs + + Warning: Ignoring non-zero exit code. + +Summary + paketkoll ran + 17.38 ± 1.49 times faster than debsums -c +``` + +## Future improvements + +Most future improvements will happen in the [`konfigkoll`](../konfigkoll) +crate, to make it suitable for another project idea I have (basically that project +needs this as a library). + +I consider the program itself mostly feature complete. The main changes would be +bug fixes and possibly supporting additional Linux distributions and package managers. + +## MSRV (Minimum Supported Rust Version) policy + +The MSRV may be bumped as needed. It is guaranteed that this program will at +least build on the current stable Rust release. An MSRV change is not considered +a breaking change and as such may change even in a patch version. + +## What does the name mean? + +paketkoll is Swedish for "package check", though the translation to English isn't +perfect ("ha koll på" means "keep an eye on" for example). + +[crates.io]: https://crates.io/crates/paketkoll +[lib.rs]: https://lib.rs/crates/paketkoll +[AUR]: https://aur.archlinux.org/packages/paketkoll diff --git a/crates/paketkoll/build.rs b/crates/paketkoll/build.rs deleted file mode 100644 index 781f1bcc..00000000 --- a/crates/paketkoll/build.rs +++ /dev/null @@ -1,25 +0,0 @@ -use clap::CommandFactory; -use clap::ValueEnum; -use clap_complete::{generate_to, Shell}; -use std::env; -use std::io::Error; -use std::path::PathBuf; - -include!("src/cli.rs"); - -fn main() -> Result<(), Error> { - let outdir = env::var_os("OUT_DIR").ok_or(std::io::ErrorKind::NotFound)?; - - let mut cmd = Cli::command(); - for &shell in Shell::value_variants() { - generate_to(shell, &mut cmd, "paketkoll", &outdir)?; - } - - clap_mangen::generate_to(cmd, PathBuf::from(outdir))?; - - // Outputs will be in a directory like target/release/build/paketkoll-/out/ - // That is unfortunate, but there doesn't seem to be a way to get a stable output directory - // println!("cargo:warning=shell completion & man page generated in: {outdir:?}"); - - Ok(()) -} diff --git a/crates/paketkoll/src/conversion.rs b/crates/paketkoll/src/conversion.rs index a0ffa9a6..4f47cef0 100644 --- a/crates/paketkoll/src/conversion.rs +++ b/crates/paketkoll/src/conversion.rs @@ -2,7 +2,7 @@ use ahash::AHashSet; use crate::cli::{Backend, Cli, Commands, ConfigFiles}; -impl TryFrom for paketkoll_core::backend::Backend { +impl TryFrom for paketkoll_core::backend::ConcreteBackend { type Error = anyhow::Error; fn try_from(value: Backend) -> Result { @@ -12,20 +12,20 @@ impl TryFrom for paketkoll_core::backend::Backend { match info.os_type() { #[cfg(feature = "arch_linux")] os_info::Type::Arch | os_info::Type::EndeavourOS | - os_info::Type::Manjaro => Ok(Self::ArchLinux), + os_info::Type::Manjaro => Ok(Self::Pacman), #[cfg(feature = "debian")] os_info::Type::Debian | os_info::Type::Mint | os_info::Type::Pop | os_info::Type::Raspbian | - os_info::Type::Ubuntu => Ok(Self::Debian), + os_info::Type::Ubuntu => Ok(Self::Apt), _ => Err(anyhow::anyhow!( "Unknown or unsupported distro: {} (try passing a specific backend if you think it should work)", info.os_type())), } } #[cfg(feature = "arch_linux")] - Backend::ArchLinux => Ok(Self::ArchLinux), + Backend::ArchLinux => Ok(Self::Pacman), #[cfg(feature = "debian")] - Backend::Debian => Ok(Self::Debian), + Backend::Debian => Ok(Self::Apt), Backend::Flatpak => Ok(Self::Flatpak), #[cfg(feature = "systemd_tmpfiles")] Backend::SystemdTmpfiles => Ok(Self::SystemdTmpfiles), diff --git a/crates/paketkoll/src/main.rs b/crates/paketkoll/src/main.rs index 68b6ccc4..264927ed 100644 --- a/crates/paketkoll/src/main.rs +++ b/crates/paketkoll/src/main.rs @@ -3,6 +3,7 @@ use std::{ io::{stdout, BufWriter, Write}, os::unix::ffi::OsStrExt, + path::Path, }; use ahash::AHashSet; @@ -10,7 +11,6 @@ use anyhow::Context; use clap::Parser; use paketkoll::cli::{Cli, Commands, Format}; use paketkoll_core::{ - backend::OriginalFileQuery, config::CheckAllFilesConfiguration, file_ops, package_ops, paketkoll_types::{ @@ -19,6 +19,7 @@ use paketkoll_core::{ package::InstallReason, }, }; +use paketkoll_types::{backend::OriginalFileQuery, package::PackageInterned}; use proc_exit::{Code, Exit}; use rayon::prelude::*; @@ -42,36 +43,7 @@ fn main() -> anyhow::Result { package_ops::installed_packages(&(cli.backend.try_into()?), &(&cli).try_into()?)?; let mut stdout = BufWriter::new(stdout().lock()); - match cli.format { - Format::Human => { - for pkg in packages { - let pkg_name = interner - .try_resolve(&pkg.name.as_interner_ref()) - .ok_or_else(|| anyhow::anyhow!("No package name for package"))?; - match pkg.reason { - Some(InstallReason::Explicit) => { - writeln!(stdout, "{} {}", pkg_name, pkg.version)?; - } - Some(InstallReason::Dependency) => { - writeln!(stdout, "{} {} (as dep)", pkg_name, pkg.version)?; - } - None => writeln!( - stdout, - "{} {} (unknown install reason)", - pkg_name, pkg.version - )?, - } - } - } - #[cfg(feature = "json")] - Format::Json => { - let packages: Vec<_> = packages - .into_par_iter() - .map(|pkg| pkg.into_direct(&interner)) - .collect(); - serde_json::to_writer_pretty(&mut stdout, &packages)?; - } - } + print_packages(&cli, packages, &interner, &mut stdout)?; Ok(Exit::new(Code::SUCCESS)) } @@ -79,22 +51,22 @@ fn main() -> anyhow::Result { ref package, ref path, } => { - let backend: paketkoll_core::backend::Backend = cli.backend.try_into()?; + let interner = Interner::new(); + let backend: paketkoll_core::backend::ConcreteBackend = cli.backend.try_into()?; let backend_impl = backend - .create_full(&(&cli).try_into()?) + .create_full(&(&cli).try_into()?, &interner) .context("Failed to create backend")?; - let interner = Interner::new(); let package_map = backend_impl - .package_map(&interner) + .package_map_complete(&interner) .with_context(|| format!("Failed to collect information from backend {backend}"))?; let package: &str = match package { Some(p) => p, None => { let mut inputs = AHashSet::default(); - inputs.insert(path.as_str().into()); - let file_map = backend_impl.owning_package(&inputs, &interner)?; + inputs.insert(Path::new(path)); + let file_map = backend_impl.owning_packages(&inputs, &interner)?; if file_map.len() != 1 { return Err(anyhow::anyhow!( "Expected exactly one package to own the file, found {}", @@ -117,7 +89,7 @@ fn main() -> anyhow::Result { path: path.into(), }]; let results = backend_impl - .original_files(queries.as_slice(), package_map, &interner) + .original_files(queries.as_slice(), &package_map, &interner) .with_context(|| { format!("Failed to collect original files from backend {backend}") })?; @@ -128,14 +100,14 @@ fn main() -> anyhow::Result { Ok(Exit::new(Code::SUCCESS)) } Commands::Owns { ref paths } => { - let backend: paketkoll_core::backend::Backend = cli.backend.try_into()?; + let interner = Interner::new(); + let backend: paketkoll_core::backend::ConcreteBackend = cli.backend.try_into()?; let backend_impl = backend - .create_files(&(&cli).try_into()?) + .create_files(&(&cli).try_into()?, &interner) .context("Failed to create backend")?; - let interner = Interner::new(); - let inputs = AHashSet::from_iter(paths.iter().map(|e| e.as_str().into())); - let file_map = backend_impl.owning_package(&inputs, &interner)?; + let inputs = AHashSet::from_iter(paths.iter().map(Path::new)); + let file_map = backend_impl.owning_packages(&inputs, &interner)?; for (path, owner) in file_map { if let Some(package) = owner { @@ -149,6 +121,45 @@ fn main() -> anyhow::Result { } } +fn print_packages( + cli: &Cli, + packages: Vec, + interner: &Interner, + stdout: &mut BufWriter>, +) -> Result<(), anyhow::Error> { + match cli.format { + Format::Human => { + for pkg in packages { + let pkg_name = interner + .try_resolve(&pkg.name.as_interner_ref()) + .ok_or_else(|| anyhow::anyhow!("No package name for package"))?; + match pkg.reason { + Some(InstallReason::Explicit) => { + writeln!(stdout, "{} {}", pkg_name, pkg.version)?; + } + Some(InstallReason::Dependency) => { + writeln!(stdout, "{} {} (as dep)", pkg_name, pkg.version)?; + } + None => writeln!( + stdout, + "{} {} (unknown install reason)", + pkg_name, pkg.version + )?, + } + } + } + #[cfg(feature = "json")] + Format::Json => { + let packages: Vec<_> = packages + .into_par_iter() + .map(|pkg| pkg.into_direct(interner)) + .collect(); + serde_json::to_writer_pretty(stdout, &packages)?; + } + }; + Ok(()) +} + fn run_file_checks(cli: &Cli) -> Result { let (interner, mut found_issues) = match cli.command { Commands::Check { .. } => file_ops::check_installed_files( diff --git a/crates/paketkoll_cache/Cargo.toml b/crates/paketkoll_cache/Cargo.toml new file mode 100644 index 00000000..d1f1dd96 --- /dev/null +++ b/crates/paketkoll_cache/Cargo.toml @@ -0,0 +1,25 @@ +[package] +categories = ["filesystem", "os::linux-apis"] +description = "Check installed distro files for changes (caching library)" +edition = "2021" +keywords = ["package-management"] +license = "MPL-2.0" +name = "paketkoll_cache" +repository = "https://github.com/VorpalBlade/paketkoll" +rust-version = "1.79.0" +version = "0.1.0" + +[dependencies] +ahash.workspace = true +anyhow.workspace = true +cached = { workspace = true, features = [ + "disk_store", + "ahash", +], default-features = false } +compact_str.workspace = true +dashmap.workspace = true +paketkoll_types = { version = "0.1.0", path = "../paketkoll_types" } +tracing.workspace = true + +[lints] +workspace = true diff --git a/crates/paketkoll_cache/README.md b/crates/paketkoll_cache/README.md new file mode 100644 index 00000000..900ef523 --- /dev/null +++ b/crates/paketkoll_cache/README.md @@ -0,0 +1,14 @@ +# paketkoll_cache + +Internal crate for [paketkoll] / [konfigkoll]. You don't want to be here (probably). +That said, this follows semver. + +This crate adds disk caching to expensive original file queries in paketkoll. +A dependency of konfigkoll. Not part of paketkoll_core in order to keep build +times and dependencies in check in the development workspace. + +## MSRV (Minimum Supported Rust Version) policy + +The MSRV may be bumped as needed. It is guaranteed that this library will at +least build on the current stable Rust release. An MSRV change is not considered +a breaking change and as such may change even in a patch version. diff --git a/crates/paketkoll_cache/src/lib.rs b/crates/paketkoll_cache/src/lib.rs new file mode 100644 index 00000000..c84e65a2 --- /dev/null +++ b/crates/paketkoll_cache/src/lib.rs @@ -0,0 +1,154 @@ +//! Wrapping backend that performs disk cache + +use std::fmt::Debug; +use std::fmt::Display; +use std::path::Path; + +use ahash::AHashMap; +use anyhow::Context; +use cached::stores::DiskCacheBuilder; +use cached::DiskCache; +use cached::IOCached; +use compact_str::format_compact; +use compact_str::CompactString; +use paketkoll_types::{ + backend::{Backend, Files, Name, OriginalFileQuery, PackageMap}, + intern::{Interner, PackageRef}, +}; + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +struct CacheKey { + backend: &'static str, + package: CompactString, + path: CompactString, +} + +impl CacheKey { + pub fn new(backend: &'static str, package: CompactString, path: CompactString) -> Self { + Self { + backend, + package, + path, + } + } +} + +impl Display for CacheKey { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}:{}:{}", self.backend, self.package, self.path) + } +} + +pub struct FilesCache { + inner: Box, + cache: DiskCache>, +} + +impl FilesCache { + pub fn from_path(inner: Box, path: &std::path::Path) -> anyhow::Result { + let cache = DiskCacheBuilder::new(inner.name()) + .set_refresh(false) + .set_lifespan(60 * 60 * 24 * 30) // A month + .set_disk_directory(path) + .build()?; + Ok(Self { inner, cache }) + } +} + +impl Debug for FilesCache { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("FilesCache") + .field("inner", &self.inner) + .field("cache", &"DiskCache>") + .finish() + } +} + +impl Name for FilesCache { + fn name(&self) -> &'static str { + self.inner.name() + } + + fn as_backend_enum(&self) -> Backend { + self.inner.as_backend_enum() + } +} + +impl Files for FilesCache { + fn files(&self, interner: &Interner) -> anyhow::Result> { + self.inner.files(interner) + } + + fn owning_packages( + &self, + paths: &ahash::AHashSet<&Path>, + interner: &Interner, + ) -> anyhow::Result, ahash::RandomState>> + { + self.inner.owning_packages(paths, interner) + } + + fn original_files( + &self, + queries: &[OriginalFileQuery], + packages: &PackageMap, + interner: &Interner, + ) -> anyhow::Result>> { + // Build up lists of cached and uncached queries + let mut results = AHashMap::new(); + let mut uncached_queries = Vec::new(); + let mut cache_keys = AHashMap::new(); + let inner_name = self.name(); + for query in queries.iter() { + // Resolve exact version and ID of packages from the package map + let cache_key = match packages.get(&PackageRef::get_or_intern(interner, &query.package)) + { + Some(p) => { + let ids = p.ids.iter().map(|v| v.to_str(interner)); + let ids = ids.collect::>().join("#"); + format_compact!( + "{}:{}:{}:{}", + query.package, + p.architecture + .map(|v| v.to_str(interner)) + .unwrap_or_default(), + p.version, + ids + ) + } + None => { + tracing::warn!("Package not found: {}", query.package); + uncached_queries.push(query.clone()); + continue; + } + }; + let cache_key = CacheKey::new(inner_name, cache_key, query.path.clone()); + match self.cache.cache_get(&cache_key)? { + Some(v) => { + results.insert(query.clone(), v); + } + None => { + uncached_queries.push(query.clone()); + cache_keys.insert(query.clone(), cache_key); + } + } + } + // Fetch uncached queries + let uncached_results = self + .inner + .original_files(&uncached_queries, packages, interner)?; + + // Insert the uncached results into the cache and update the results + for (query, result) in uncached_results.into_iter() { + let cache_key = cache_keys.get(&query).context("Cache key not found")?; + self.cache.cache_set(cache_key.clone(), result.clone())?; + results.insert(query, result); + } + + Ok(results) + } + + fn may_need_canonicalization(&self) -> bool { + self.inner.may_need_canonicalization() + } +} diff --git a/crates/paketkoll_core/Cargo.toml b/crates/paketkoll_core/Cargo.toml index 3f6f4f3b..0c4ec673 100644 --- a/crates/paketkoll_core/Cargo.toml +++ b/crates/paketkoll_core/Cargo.toml @@ -29,8 +29,6 @@ arch_linux = [ "dep:rust-ini", ] -systemd_tmpfiles = ["__sha256", "dep:systemd_tmpfiles", "dep:nix"] - # Include support for the Debian backend debian = [ "__bzip2", @@ -43,6 +41,9 @@ debian = [ "dep:ar", ] +# Experimental systemd-tmpfiles backend +systemd_tmpfiles = ["__sha256", "dep:systemd_tmpfiles", "dep:nix"] + # Include support for serde on public datatypes serde = [ "compact_str/serde", @@ -51,6 +52,9 @@ serde = [ "smallvec/serde", ] +# Vendor C/C++ dependencies instead of linking them dynamically +vendored = ["xz2?/static", "bzip2?/static"] + # Internal feature: Enable MD5 support __md5 = ["dep:md-5"] # Internal feature: Enable SHA-256 support @@ -102,12 +106,8 @@ ring = { workspace = true, optional = true } rust-ini = { workspace = true, optional = true } scopeguard.workspace = true serde = { workspace = true, optional = true, features = ["derive"] } -smallvec = { workspace = true, features = [ - "const_generics", - "const_new", - "union", -] } -strum = { workspace = true, features = ["derive"] } +smallvec.workspace = true +strum.workspace = true systemd_tmpfiles = { version = "0.1.1", path = "../systemd_tmpfiles", optional = true } tar = { workspace = true, optional = true } xz2 = { workspace = true, optional = true } diff --git a/crates/paketkoll_core/src/backend.rs b/crates/paketkoll_core/src/backend.rs index 21194fab..b528bbd1 100644 --- a/crates/paketkoll_core/src/backend.rs +++ b/crates/paketkoll_core/src/backend.rs @@ -1,15 +1,8 @@ //! The various backends implementing distro specific support -use ahash::{AHashMap, AHashSet}; -use anyhow::{anyhow, Context}; -use compact_str::CompactString; -use dashmap::DashMap; -use paketkoll_types::{ - files::FileEntry, - intern::{Interner, PackageRef}, - package::PackageInterned, -}; -use std::{fmt::Debug, path::PathBuf}; +use paketkoll_types::backend::{Files, Packages}; +use paketkoll_types::intern::{Interner, PackageRef}; +use std::fmt::Debug; #[cfg(feature = "arch_linux")] pub(crate) mod arch; @@ -17,157 +10,109 @@ pub(crate) mod arch; #[cfg(feature = "debian")] pub(crate) mod deb; -pub(crate) mod filesystem; -pub(crate) mod flatpak; - #[cfg(feature = "systemd_tmpfiles")] pub(crate) mod systemd_tmpfiles; -/// Get the name of a backend (useful in dynamic dispatch for generating reports) -pub trait Name: Send + Sync { - /// The name of the backend (for logging and debugging purposes) - // Temporary, this will get exposed - #[allow(dead_code)] - fn name(&self) -> &'static str; -} - -/// A package manager backend -pub trait Files: Name { - /// Collect a list of files managed by the package manager including - /// any available metadata such as checksums or timestamps about those files - fn files(&self, interner: &Interner) -> anyhow::Result>; - - /// Find the owners of the specified packages - fn owning_package( - &self, - paths: &AHashSet, - interner: &Interner, - ) -> anyhow::Result, ahash::RandomState>>; - - /// Get the original contents of files - fn original_files( - &self, - queries: &[OriginalFileQuery], - packages: ahash::AHashMap, - interner: &Interner, - ) -> anyhow::Result>>; -} - -/// Query type for original file contents -#[derive(Debug, Clone, Eq, PartialEq, Hash)] -pub struct OriginalFileQuery { - pub package: CompactString, - pub path: CompactString, -} - -/// A package manager backend (reading list of packages) -pub trait Packages: Name { - /// Collect a list of all installed packages - fn packages(&self, interner: &Interner) -> anyhow::Result>; - - /// Collect a map of packages with the interned name as key - fn package_map( - &self, - interner: &Interner, - ) -> anyhow::Result> { - let packages = self - .packages(interner) - .with_context(|| anyhow!("Failed to load package list"))?; - let mut package_map = - AHashMap::with_capacity_and_hasher(packages.len(), ahash::RandomState::new()); - for package in packages.into_iter() { - package_map.insert(package.name, package); - } - Ok(package_map) - } -} - -/// A package manager backend (installing/uninstalling packages) -pub trait PackageManager: Name { - /// Perform installation and uninstallation of a bunch of packages - /// - /// The package name format depends on the backend. - fn transact( - &self, - install: &[CompactString], - uninstall: &[CompactString], - ask_confirmation: bool, - ) -> anyhow::Result<()>; -} +pub(crate) mod filesystem; +pub(crate) mod flatpak; /// A backend that implements all operations #[allow(dead_code)] -pub trait FullBackend: Files + Packages + PackageManager {} +pub trait FullBackend: Files + Packages {} /// Which backend to use for the system package manager #[derive(Debug, PartialEq, Eq, Hash, Clone, Copy, strum::Display)] -pub enum Backend { +pub enum ConcreteBackend { /// Backend for Arch Linux and derived distros (pacman) #[cfg(feature = "arch_linux")] - ArchLinux, + #[strum(to_string = "pacman")] + Pacman, /// Backend for Debian and derived distros (dpkg/apt) #[cfg(feature = "debian")] - Debian, + #[strum(to_string = "apt")] + Apt, /// Backend for flatpak (package list only) + #[strum(to_string = "flatpak")] Flatpak, /// Backend for systemd-tmpfiles (file list only) #[cfg(feature = "systemd_tmpfiles")] + #[strum(to_string = "systemd-tmpfiles")] SystemdTmpfiles, } -impl Backend { - pub fn from_name(name: &str) -> Option { - match name { +impl TryFrom for ConcreteBackend { + type Error = anyhow::Error; + + fn try_from(value: paketkoll_types::backend::Backend) -> Result { + match value { #[cfg(feature = "arch_linux")] - "pacman" => Some(Backend::ArchLinux), + paketkoll_types::backend::Backend::Pacman => Ok(Self::Pacman), #[cfg(feature = "debian")] - "apt" => Some(Backend::Debian), - "flatpak" => Some(Backend::Flatpak), - _ => None, + paketkoll_types::backend::Backend::Apt => Ok(Self::Apt), + paketkoll_types::backend::Backend::Flatpak => Ok(Self::Flatpak), + #[cfg(feature = "systemd_tmpfiles")] + paketkoll_types::backend::Backend::SystemdTmpfiles => Ok(Self::SystemdTmpfiles), + #[allow(unreachable_patterns)] + _ => anyhow::bail!("Unsupported backend in current build: {:?}", value), + } + } +} + +impl From for paketkoll_types::backend::Backend { + fn from(value: ConcreteBackend) -> Self { + match value { + #[cfg(feature = "arch_linux")] + ConcreteBackend::Pacman => paketkoll_types::backend::Backend::Pacman, + #[cfg(feature = "debian")] + ConcreteBackend::Apt => paketkoll_types::backend::Backend::Apt, + ConcreteBackend::Flatpak => paketkoll_types::backend::Backend::Flatpak, + #[cfg(feature = "systemd_tmpfiles")] + ConcreteBackend::SystemdTmpfiles => paketkoll_types::backend::Backend::SystemdTmpfiles, } } } // Clippy is wrong, this cannot be derived due to the cfg_if #[allow(clippy::derivable_impls)] -impl Default for Backend { +impl Default for ConcreteBackend { fn default() -> Self { cfg_if::cfg_if! { if #[cfg(feature = "arch_linux")] { - Backend::ArchLinux + ConcreteBackend::Pacman } else if #[cfg(feature = "debian")] { - Backend::Debian + ConcreteBackend::Apt } else { - Backend::Flatpak + ConcreteBackend::Flatpak } } } } -impl Backend { +impl ConcreteBackend { /// Create a backend instance pub fn create_files( self, configuration: &BackendConfiguration, + interner: &Interner, ) -> anyhow::Result> { match self { #[cfg(feature = "arch_linux")] - Backend::ArchLinux => Ok(Box::new({ + ConcreteBackend::Pacman => Ok(Box::new({ let mut builder = crate::backend::arch::ArchLinuxBuilder::default(); builder.package_filter(configuration.package_filter); builder.build()? })), #[cfg(feature = "debian")] - Backend::Debian => Ok(Box::new({ + ConcreteBackend::Apt => Ok(Box::new({ let mut builder = crate::backend::deb::DebianBuilder::default(); builder.package_filter(configuration.package_filter); - builder.build() + builder.build(interner) })), - Backend::Flatpak => Err(anyhow::anyhow!( + ConcreteBackend::Flatpak => Err(anyhow::anyhow!( "Flatpak backend does not support file checks" )), #[cfg(feature = "systemd_tmpfiles")] - Backend::SystemdTmpfiles => Ok(Box::new({ + ConcreteBackend::SystemdTmpfiles => Ok(Box::new({ let builder = crate::backend::systemd_tmpfiles::SystemdTmpfilesBuilder::default(); builder.build() })), @@ -178,26 +123,27 @@ impl Backend { pub fn create_packages( self, configuration: &BackendConfiguration, + interner: &Interner, ) -> anyhow::Result> { match self { #[cfg(feature = "arch_linux")] - Backend::ArchLinux => Ok(Box::new({ + ConcreteBackend::Pacman => Ok(Box::new({ let mut builder = crate::backend::arch::ArchLinuxBuilder::default(); builder.package_filter(configuration.package_filter); builder.build()? })), #[cfg(feature = "debian")] - Backend::Debian => Ok(Box::new({ + ConcreteBackend::Apt => Ok(Box::new({ let mut builder = crate::backend::deb::DebianBuilder::default(); builder.package_filter(configuration.package_filter); - builder.build() + builder.build(interner) })), - Backend::Flatpak => Ok(Box::new({ + ConcreteBackend::Flatpak => Ok(Box::new({ let builder = crate::backend::flatpak::FlatpakBuilder::default(); builder.build() })), #[cfg(feature = "systemd_tmpfiles")] - Backend::SystemdTmpfiles => Err(anyhow::anyhow!( + ConcreteBackend::SystemdTmpfiles => Err(anyhow::anyhow!( "SystemdTmpfiles backend does not support package checks" )), } @@ -207,25 +153,26 @@ impl Backend { pub fn create_full( self, configuration: &BackendConfiguration, + interner: &Interner, ) -> anyhow::Result> { match self { #[cfg(feature = "arch_linux")] - Backend::ArchLinux => Ok(Box::new({ + ConcreteBackend::Pacman => Ok(Box::new({ let mut builder = crate::backend::arch::ArchLinuxBuilder::default(); builder.package_filter(configuration.package_filter); builder.build()? })), #[cfg(feature = "debian")] - Backend::Debian => Ok(Box::new({ + ConcreteBackend::Apt => Ok(Box::new({ let mut builder = crate::backend::deb::DebianBuilder::default(); builder.package_filter(configuration.package_filter); - builder.build() + builder.build(interner) })), - Backend::Flatpak => Err(anyhow::anyhow!( + ConcreteBackend::Flatpak => Err(anyhow::anyhow!( "Flatpak backend does not support file checks" )), #[cfg(feature = "systemd_tmpfiles")] - Backend::SystemdTmpfiles => Err(anyhow::anyhow!( + ConcreteBackend::SystemdTmpfiles => Err(anyhow::anyhow!( "SystemdTmpfiles backend does not support package checks" )), } diff --git a/crates/paketkoll_core/src/backend/arch.rs b/crates/paketkoll_core/src/backend/arch.rs index 2972832c..7622b882 100644 --- a/crates/paketkoll_core/src/backend/arch.rs +++ b/crates/paketkoll_core/src/backend/arch.rs @@ -11,15 +11,18 @@ use std::{ path::{Path, PathBuf}, }; -use super::{Files, FullBackend, Name, PackageFilter, PackageManager, Packages}; +use super::{FullBackend, PackageFilter}; use crate::utils::{ extract_files, group_queries_by_pkg, locate_package_file, package_manager_transaction, }; -use ahash::{AHashMap, AHashSet}; +use ahash::AHashSet; use anyhow::Context; use compact_str::format_compact; use dashmap::{DashMap, DashSet}; use either::Either; +use paketkoll_types::backend::{ + Files, Name, OriginalFileQuery, PackageManagerError, PackageMap, Packages, +}; use paketkoll_types::{files::FileEntry, intern::PackageRef}; use paketkoll_types::{intern::Interner, package::PackageInterned}; use rayon::prelude::*; @@ -42,7 +45,7 @@ pub(crate) struct ArchLinuxBuilder { impl ArchLinuxBuilder { /// Load pacman config fn load_config(&mut self) -> anyhow::Result { - log::debug!(target: "paketkoll_core::backend::arch", "Loading pacman config"); + log::debug!("Loading pacman config"); let mut readable = BufReader::new(std::fs::File::open("/etc/pacman.conf")?); let pacman_config: pacman_conf::PacmanConfig = pacman_conf::PacmanConfig::new(&mut readable)?; @@ -74,6 +77,10 @@ impl Name for ArchLinux { fn name(&self) -> &'static str { NAME } + + fn as_backend_enum(&self) -> paketkoll_types::backend::Backend { + paketkoll_types::backend::Backend::Pacman + } } impl Files for ArchLinux { @@ -84,11 +91,11 @@ impl Files for ArchLinux { let db_path: &Path = Path::new(&self.pacman_config.db_path); // Load packages - log::debug!(target: "paketkoll_core::backend::arch", "Loading packages"); + log::debug!("Loading packages"); let pkgs_and_paths = get_mtree_paths(db_path, interner, self.package_filter)?; // Load mtrees - log::debug!(target: "paketkoll_core::backend::arch", "Loading mtrees"); + log::debug!("Loading mtrees"); // Directories are duplicated across packages, we deduplicate them here let seen_directories = DashSet::new(); // It is counter-intuitive, but we are faster if we collect into a vec here and start @@ -121,10 +128,10 @@ impl Files for ArchLinux { fn original_files( &self, - queries: &[super::OriginalFileQuery], - packages: AHashMap, + queries: &[OriginalFileQuery], + packages: &PackageMap, interner: &Interner, - ) -> anyhow::Result>> { + ) -> anyhow::Result>> { let queries_by_pkg = group_queries_by_pkg(queries); let mut results = ahash::AHashMap::new(); @@ -178,9 +185,9 @@ impl Files for ArchLinux { Ok(results) } - fn owning_package( + fn owning_packages( &self, - paths: &AHashSet, + paths: &AHashSet<&Path>, interner: &Interner, ) -> anyhow::Result, ahash::RandomState>> { // Optimise for speed, go directly into package cache and look for files that contain the given string @@ -267,35 +274,76 @@ impl Packages for ArchLinux { .collect(); results } -} -impl PackageManager for ArchLinux { fn transact( &self, - install: &[compact_str::CompactString], - uninstall: &[compact_str::CompactString], + install: &[&str], + uninstall: &[&str], ask_confirmation: bool, - ) -> anyhow::Result<()> { + ) -> Result<(), PackageManagerError> { if !install.is_empty() { package_manager_transaction( "pacman", - "-S", + &["-S"], install, - ask_confirmation.then_some("--noconfirm"), + (!ask_confirmation).then_some("--noconfirm"), ) .context("Failed to install with pacman")?; } if !uninstall.is_empty() { package_manager_transaction( "pacman", - "-R", + &["-R"], uninstall, - ask_confirmation.then_some("--noconfirm"), + (!ask_confirmation).then_some("--noconfirm"), ) .context("Failed to uninstall with pacman")?; } Ok(()) } + + fn mark(&self, dependencies: &[&str], manual: &[&str]) -> Result<(), PackageManagerError> { + if !dependencies.is_empty() { + package_manager_transaction("pacman", &["-D", "--asdeps"], dependencies, None) + .context("Failed to mark dependencies with pacman")?; + } + if !manual.is_empty() { + package_manager_transaction("pacman", &["-D", "--asexplicit"], manual, None) + .context("Failed to mark manual with pacman")?; + } + Ok(()) + } + + fn remove_unused(&self, ask_confirmation: bool) -> Result<(), PackageManagerError> { + let mut query_cmd = std::process::Command::new("pacman"); + query_cmd.args(["-Qttdq"]); + + let mut run_query = || -> anyhow::Result> { + let query_output = query_cmd + .output() + .with_context(|| "Failed to execute pacman -Qdtq")?; + let out = String::from_utf8(query_output.stdout) + .with_context(|| "Failed to parse pacman -Qdtq output as UTF-8")?; + if out.is_empty() { + Ok(None) + } else { + Ok(Some(out)) + } + }; + + while let Some(packages) = run_query()? { + let packages = packages.lines().collect::>(); + package_manager_transaction( + "pacman", + &["-R"], + &packages, + (!ask_confirmation).then_some("--noconfirm"), + ) + .context("Failed to remove unused packages with pacman")?; + } + + Ok(()) + } } // To download to cache: pacman -Sw packagename @@ -309,7 +357,7 @@ fn download_arch_pkg(pkg: &str) -> Result<(), anyhow::Error> { .args(["-Sw", "--noconfirm", pkg]) .status()?; if !status.success() { - log::warn!(target: "paketkoll_core::backend::arch", "Failed to download package for {pkg}"); + log::warn!("Failed to download package for {pkg}"); }; Ok(()) } diff --git a/crates/paketkoll_core/src/backend/arch/desc.rs b/crates/paketkoll_core/src/backend/arch/desc.rs index 07d6dd62..ba8b16c9 100644 --- a/crates/paketkoll_core/src/backend/arch/desc.rs +++ b/crates/paketkoll_core/src/backend/arch/desc.rs @@ -75,7 +75,7 @@ pub(super) fn from_arch_linux_desc( provides, reason: Some(reason.unwrap_or(InstallReason::Explicit)), status: PackageInstallStatus::Installed, - id: None, + ids: Default::default(), }) } @@ -216,7 +216,7 @@ mod tests { provides: vec![PackageRef::get_or_intern(&interner, "libfoo.so"),], reason: Some(InstallReason::Dependency), status: PackageInstallStatus::Installed, - id: None, + ids: Default::default(), } ); } diff --git a/crates/paketkoll_core/src/backend/arch/mtree.rs b/crates/paketkoll_core/src/backend/arch/mtree.rs index d3f965fb..4bf9aad0 100644 --- a/crates/paketkoll_core/src/backend/arch/mtree.rs +++ b/crates/paketkoll_core/src/backend/arch/mtree.rs @@ -10,7 +10,7 @@ use std::{ use anyhow::Context; use dashmap::DashSet; -use flate2::read::GzDecoder; +use flate2::bufread::GzDecoder; use mtree2::{self, MTree}; use paketkoll_types::{ files::{ diff --git a/crates/paketkoll_core/src/backend/deb.rs b/crates/paketkoll_core/src/backend/deb.rs index 4ac32741..49e5178c 100644 --- a/crates/paketkoll_core/src/backend/deb.rs +++ b/crates/paketkoll_core/src/backend/deb.rs @@ -6,7 +6,7 @@ use std::fs::{DirEntry, File}; use std::io::BufReader; use std::path::{Path, PathBuf}; -use super::{Files, FullBackend, Name, PackageManager, Packages}; +use super::FullBackend; use crate::backend::PackageFilter; use crate::utils::{ extract_files, group_queries_by_pkg, locate_package_file, package_manager_transaction, @@ -17,12 +17,14 @@ use bstr::ByteSlice; use bstr::ByteVec; use compact_str::CompactString; use dashmap::DashMap; +use paketkoll_types::backend::{ + Files, Name, OriginalFileQuery, PackageManagerError, PackageMap, Packages, +}; use paketkoll_types::files::{FileEntry, Properties}; -use paketkoll_types::intern::{Interner, PackageRef}; +use paketkoll_types::intern::{ArchitectureRef, Interner, PackageRef}; use paketkoll_types::package::PackageInterned; use rayon::prelude::*; use regex::RegexSet; - // Each package has a set of files in DB_PATH: // *.list (all installed paths, one per line, including directories) // *.md5sums (md5sumpath, one per line for all regular files) @@ -42,6 +44,7 @@ const NAME: &str = "Debian"; #[derive(Debug)] pub(crate) struct Debian { package_filter: &'static PackageFilter, + primary_architecture: ArchitectureRef, } #[derive(Debug, Default)] @@ -55,11 +58,20 @@ impl DebianBuilder { self } - pub fn build(self) -> Debian { + pub fn build(self, interner: &Interner) -> Debian { + let arch = std::process::Command::new("dpkg") + .args(["--print-architecture"]) + .output() + .expect("Failed to get primary architecture") + .stdout; + let arch_str = arch.trim(); + let primary_architecture = + ArchitectureRef::get_or_intern(interner, arch_str.to_str_lossy().as_ref()); Debian { package_filter: self .package_filter .unwrap_or_else(|| &PackageFilter::Everything), + primary_architecture, } } } @@ -68,34 +80,38 @@ impl Name for Debian { fn name(&self) -> &'static str { NAME } + + fn as_backend_enum(&self) -> paketkoll_types::backend::Backend { + paketkoll_types::backend::Backend::Apt + } } impl Files for Debian { fn files(&self, interner: &Interner) -> anyhow::Result> { - log::debug!(target: "paketkoll_core::backend::deb", "Loading packages"); + log::debug!("Loading packages"); let packages_files: Vec<_> = get_package_files(interner)?.collect(); // Handle diversions: (parse output of dpkg-divert --list) - log::debug!(target: "paketkoll_core::backend::deb", "Loading diversions"); + log::debug!("Loading diversions"); let diversions = divert::get_diverions(interner).context("Failed to get dpkg diversions")?; // Load config files. - log::debug!(target: "paketkoll_core::backend::deb", "Loading status to get config files"); + log::debug!("Loading status to get config files"); let (config_files, _) = { let mut status = BufReader::new(File::open(STATUS_PATH)?); - parsers::parse_status(interner, &mut status) + parsers::parse_status(interner, &mut status, self.primary_architecture) } .context(format!("Failed to parse {}", STATUS_PATH))?; - log::debug!(target: "paketkoll_core::backend::deb", "Merging packages files into one map"); + log::debug!("Merging packages files into one map"); let merged = DashMap::with_hasher(ahash::RandomState::new()); packages_files.into_par_iter().for_each(|files| { merge_deb_fileentries(&merged, files, &diversions); }); // The config files must be merged into the results - log::debug!(target: "paketkoll_core::backend::deb", "Merging config files"); + log::debug!("Merging config files"); merge_deb_fileentries(&merged, config_files, &diversions); // For Debian we apply the filter here at the end, since multiple steps @@ -116,10 +132,10 @@ impl Files for Debian { fn original_files( &self, - queries: &[super::OriginalFileQuery], - packages: ahash::AHashMap, + queries: &[OriginalFileQuery], + packages: &PackageMap, interner: &Interner, - ) -> anyhow::Result>> { + ) -> anyhow::Result>> { let queries_by_pkg = group_queries_by_pkg(queries); let mut results = ahash::AHashMap::new(); @@ -183,9 +199,9 @@ impl Files for Debian { Ok(results) } - fn owning_package( + fn owning_packages( &self, - paths: &ahash::AHashSet, + paths: &ahash::AHashSet<&Path>, interner: &Interner, ) -> anyhow::Result, ahash::RandomState>> { // Optimise for speed, go directly into package cache and look for files that contain the given string @@ -220,6 +236,10 @@ impl Files for Debian { Ok(file_to_package) } + + fn may_need_canonicalization(&self) -> bool { + true + } } fn is_file_match( @@ -336,15 +356,15 @@ fn process_file(interner: &Interner, entry: &DirEntry) -> anyhow::Result anyhow::Result> { // Parse status - log::debug!(target: "paketkoll_core::backend::deb", "Loading status to installed packages"); + log::debug!("Loading status to installed packages"); let (_, mut packages) = { let mut status = BufReader::new(File::open(STATUS_PATH)?); - parsers::parse_status(interner, &mut status) + parsers::parse_status(interner, &mut status, self.primary_architecture) } .context(format!("Failed to parse {}", STATUS_PATH))?; // Parse extended status - log::debug!(target: "paketkoll_core::backend::deb", "Loading extended status to get auto installed packages"); + log::debug!("Loading extended status to get auto installed packages"); let extended_packages = { let mut status = BufReader::new(File::open(EXTENDED_STATUS_PATH)?); parsers::parse_extended_status(interner, &mut status)? @@ -365,35 +385,56 @@ impl Packages for Debian { Ok(packages) } -} -impl PackageManager for Debian { fn transact( &self, - install: &[compact_str::CompactString], - uninstall: &[compact_str::CompactString], + install: &[&str], + uninstall: &[&str], ask_confirmation: bool, - ) -> anyhow::Result<()> { + ) -> Result<(), PackageManagerError> { if !install.is_empty() { package_manager_transaction( "apt-get", - "install", + &["install"], install, - ask_confirmation.then_some("-y"), + (!ask_confirmation).then_some("-y"), ) .context("Failed to install with apt-get")?; } if !uninstall.is_empty() { package_manager_transaction( "apt-get", - "remove", + &["remove"], uninstall, - ask_confirmation.then_some("-y"), + (!ask_confirmation).then_some("-y"), ) .context("Failed to uninstall with apt-get")?; } Ok(()) } + + fn mark(&self, dependencies: &[&str], manual: &[&str]) -> Result<(), PackageManagerError> { + if !dependencies.is_empty() { + package_manager_transaction("apt-mark", &["auto"], dependencies, None) + .context("Failed to mark auto-installed with apt-mark")?; + } + if !manual.is_empty() { + package_manager_transaction("apt-mark", &["manual"], manual, None) + .context("Failed to mark manual with apt-mark")?; + } + Ok(()) + } + + fn remove_unused(&self, ask_confirmation: bool) -> Result<(), PackageManagerError> { + package_manager_transaction( + "apt-get", + &["autoremove"], + &[], + (!ask_confirmation).then_some("-y"), + ) + .context("Failed to autoremove with apt-get")?; + Ok(()) + } } // To get the original package file itno the cache: apt install --reinstall -d pkgname @@ -408,8 +449,7 @@ fn download_deb(pkg: &str) -> Result<(), anyhow::Error> { .args(["install", "--reinstall", "-d", pkg]) .status()?; if !status.success() { - log::warn!(target: "paketkoll_core::backend::deb", - "Failed to download package for {pkg}"); + log::warn!("Failed to download package for {pkg}"); }; Ok(()) } diff --git a/crates/paketkoll_core/src/backend/deb/parsers.rs b/crates/paketkoll_core/src/backend/deb/parsers.rs index 4b81636c..7903b03f 100644 --- a/crates/paketkoll_core/src/backend/deb/parsers.rs +++ b/crates/paketkoll_core/src/backend/deb/parsers.rs @@ -4,8 +4,9 @@ use std::io::BufRead; use anyhow::{bail, Context}; use bstr::{io::BufReadExt, ByteSlice, ByteVec}; +use compact_str::format_compact; use paketkoll_types::intern::{ArchitectureRef, Interner, PackageRef}; -use paketkoll_types::package::PackageBuilder; +use paketkoll_types::package::{Package, PackageBuilder}; use paketkoll_types::{ files::{Checksum, FileEntry, FileFlags, Properties, RegularFileBasic}, package::{Dependency, InstallReason, PackageInstallStatus, PackageInterned}, @@ -132,9 +133,12 @@ fn dependency_name(segment: &str, interner: &lasso::ThreadedRodeo) -> PackageRef pub(super) fn parse_status( interner: &Interner, input: &mut impl BufRead, + primary_architecture: ArchitectureRef, ) -> anyhow::Result<(Vec, Vec)> { let mut state = StatusParsingState::Start; + let all_architecture = ArchitectureRef::get_or_intern(interner, "all"); + let mut config_files = vec![]; let mut packages = vec![]; @@ -150,7 +154,14 @@ pub(super) fn parse_status( let line = guard.trim_end(); if let Some(stripped) = line.strip_prefix("Package: ") { if let Some(builder) = package_builder { - packages.push(builder.build()?); + let mut package = builder.build()?; + fixup_pkg_ids( + &mut package, + primary_architecture, + all_architecture, + interner, + ); + packages.push(package); } package_builder = Some(PackageInterned::builder()); // This will be updated later with the correct reason when we parse extended status @@ -257,12 +268,52 @@ pub(super) fn parse_status( } if let Some(builder) = package_builder { - packages.push(builder.build()?); + let mut package = builder.build()?; + fixup_pkg_ids( + &mut package, + primary_architecture, + all_architecture, + interner, + ); + packages.push(package); } Ok((config_files, packages)) } +fn fixup_pkg_ids( + package: &mut Package, + primary_architecture: ArchitectureRef, + all_architecture: ArchitectureRef, + interner: &lasso::ThreadedRodeo, +) { + match package.architecture { + Some(arch) if arch == primary_architecture || arch == all_architecture => { + let pkg = package.name.to_str(interner); + let arch = arch.to_str(interner); + package.ids.push(package.name); + package.ids.push(PackageRef::get_or_intern( + interner, + format_compact!("{pkg}:{arch}"), + )); + } + Some(arch) => { + let pkg = package.name.to_str(interner); + let arch = arch.to_str(interner); + package.ids.push(PackageRef::get_or_intern( + interner, + format_compact!("{pkg}:{arch}"), + )); + } + None => { + log::error!( + "Package {} has no architecture", + package.name.to_str(interner) + ); + } + } +} + #[derive(Debug, Clone, Copy, PartialEq, Eq)] enum StatusParsingState { Start, @@ -525,7 +576,8 @@ mod tests { "}; let mut input = input.as_bytes(); let interner = Interner::default(); - let (files, packages) = parse_status(&interner, &mut input).unwrap(); + let primary_arch = ArchitectureRef::get_or_intern(&interner, "arm64"); + let (files, packages) = parse_status(&interner, &mut input, primary_arch).unwrap(); assert_eq!( packages, vec![Package { @@ -540,7 +592,10 @@ mod tests { provides: vec![], reason: Some(InstallReason::Explicit), status: PackageInstallStatus::Installed, - id: None, + ids: smallvec::smallvec![ + PackageRef::get_or_intern(&interner, "libc6"), + PackageRef::get_or_intern(&interner, "libc6:arm64"), + ], }] ); assert_eq!( diff --git a/crates/paketkoll_core/src/backend/filesystem.rs b/crates/paketkoll_core/src/backend/filesystem.rs index d368620a..de471562 100644 --- a/crates/paketkoll_core/src/backend/filesystem.rs +++ b/crates/paketkoll_core/src/backend/filesystem.rs @@ -7,10 +7,7 @@ use std::{ path::PathBuf, }; -use crate::{ - config::{CommonFileCheckConfiguration, ConfigFiles}, - utils::MODE_MASK, -}; +use crate::config::{CommonFileCheckConfiguration, ConfigFiles}; use anyhow::{Context, Result}; use paketkoll_types::{ @@ -20,6 +17,7 @@ use paketkoll_types::{ }, issue::{EntryType, Issue, IssueKind, IssueVec}, }; +use paketkoll_utils::MODE_MASK; /// Determine if a given file should be processed fn should_process(file: &FileEntry, config: &CommonFileCheckConfiguration) -> bool { @@ -184,6 +182,7 @@ pub(crate) fn check_file( // SAFETY: As far as I can find out, these do not actually // have any safety invariants, as they just perform some simple bitwise arithmetics. let major_actual = unsafe { libc::major(rdev) } as u64; + // SAFETY: Same as for major let minor_actual = unsafe { libc::minor(rdev) } as u64; if (major_actual, minor_actual) != (*major, *minor) { issues.push(IssueKind::WrongDeviceNodeId { diff --git a/crates/paketkoll_core/src/backend/flatpak.rs b/crates/paketkoll_core/src/backend/flatpak.rs index ee9902f4..b291a636 100644 --- a/crates/paketkoll_core/src/backend/flatpak.rs +++ b/crates/paketkoll_core/src/backend/flatpak.rs @@ -2,15 +2,15 @@ use std::process::{Command, Stdio}; +use crate::utils::package_manager_transaction; use anyhow::Context; +use paketkoll_types::backend::{Name, PackageManagerError, Packages}; +use paketkoll_types::package::InstallReason; use paketkoll_types::{ intern::{ArchitectureRef, PackageRef}, package::{Package, PackageInstallStatus, PackageInterned}, }; - -use crate::utils::package_manager_transaction; - -use super::{Name, PackageManager, Packages}; +use smallvec::SmallVec; /// Flatpak backend #[derive(Debug)] @@ -29,6 +29,10 @@ impl Name for Flatpak { fn name(&self) -> &'static str { "Flatpak" } + + fn as_backend_enum(&self) -> paketkoll_types::backend::Backend { + paketkoll_types::backend::Backend::Flatpak + } } impl Packages for Flatpak { @@ -58,6 +62,51 @@ impl Packages for Flatpak { parse_flatpak_output(&output, interner) } + + /// Flatpak uses the package ref (or partial ref, i.e. application ID) for installation + fn transact( + &self, + install: &[&str], + uninstall: &[&str], + ask_confirmation: bool, + ) -> Result<(), PackageManagerError> { + if !install.is_empty() { + package_manager_transaction( + "flatpak", + &["install"], + install, + (!ask_confirmation).then_some("--noninteractive"), + ) + .context("Failed to install with flatpak")?; + } + if !uninstall.is_empty() { + package_manager_transaction( + "flatpak", + &["uninstall"], + uninstall, + (!ask_confirmation).then_some("--noninteractive"), + ) + .context("Failed to uninstall with flatpak")?; + } + Ok(()) + } + + fn mark(&self, _dependencies: &[&str], _manual: &[&str]) -> Result<(), PackageManagerError> { + Err(PackageManagerError::UnsupportedOperation( + "Marking packages as dependencies or manually installed is not supported by flatpak", + )) + } + + fn remove_unused(&self, ask_confirmation: bool) -> Result<(), PackageManagerError> { + package_manager_transaction( + "flatpak", + &["uninstall", "--unused"], + &[], + (!ask_confirmation).then_some("--noninteractive"), + ) + .context("Failed to remove unused packages with flatpak")?; + Ok(()) + } } fn parse_flatpak_output( @@ -67,17 +116,17 @@ fn parse_flatpak_output( let mut packages = Vec::new(); for line in output.lines() { - let parts: Vec<&str> = line.split('\t').collect(); + let parts: SmallVec<[&str; 6]> = line.split('\t').collect(); if parts.len() != 6 { anyhow::bail!("Unexpected number of columns in flatpak list: {}", line); } // Parse ref - let arch = { + let (app_id, arch) = { let ref_parts: Vec<&str> = parts[0].split('/').collect(); if ref_parts.len() != 3 { anyhow::bail!("Unexpected number of parts in flatpak ref: {}", parts[0]); } - ref_parts[1] + (ref_parts[0], ref_parts[1]) }; let version = parts[3]; @@ -87,6 +136,9 @@ fn parse_flatpak_output( Some(parts[4].into()) }; + let options = parts[5]; + let is_runtime = options.contains("runtime"); + // Build package struct let package = Package { name: PackageRef::get_or_intern(interner, parts[2]), @@ -95,45 +147,25 @@ fn parse_flatpak_output( architecture: Some(ArchitectureRef::get_or_intern(interner, arch)), depends: vec![], provides: vec![], - reason: None, + reason: if is_runtime { + // This is an approximation, flatpak doesn't appear to track + // dependency vs explicit installs. + Some(InstallReason::Dependency) + } else { + None + }, status: PackageInstallStatus::Installed, - id: Some(parts[0].into()), + ids: smallvec::smallvec![ + // TODO: What other subsets of the ref is valid? + PackageRef::get_or_intern(interner, app_id), + PackageRef::get_or_intern(interner, parts[0]) + ], }; packages.push(package); } Ok(packages) } -impl PackageManager for Flatpak { - /// Flatpak uses the package ref (or partial ref, i.e. application ID) for installation - fn transact( - &self, - install: &[compact_str::CompactString], - uninstall: &[compact_str::CompactString], - ask_confirmation: bool, - ) -> anyhow::Result<()> { - if !install.is_empty() { - package_manager_transaction( - "flatpak", - "install", - install, - ask_confirmation.then_some("--noninteractive"), - ) - .context("Failed to install with flatpak")?; - } - if !uninstall.is_empty() { - package_manager_transaction( - "flatpak", - "uninstall", - uninstall, - ask_confirmation.then_some("--noninteractive"), - ) - .context("Failed to uninstall with flatpak")?; - } - Ok(()) - } -} - #[cfg(test)] mod tests { use Package; @@ -175,7 +207,13 @@ mod tests { provides: vec![], reason: None, status: PackageInstallStatus::Installed, - id: Some("com.github.tchx84.Flatseal/x86_64/stable".into()), + ids: smallvec::smallvec![ + PackageRef::get_or_intern(&interner, "com.github.tchx84.Flatseal"), + PackageRef::get_or_intern( + &interner, + "com.github.tchx84.Flatseal/x86_64/stable" + ) + ], }, Package { name: PackageRef::get_or_intern(&interner, "Fedora Media Writer"), @@ -188,7 +226,13 @@ mod tests { provides: vec![], reason: None, status: PackageInstallStatus::Installed, - id: Some("org.fedoraproject.MediaWriter/x86_64/stable".into()), + ids: smallvec::smallvec![ + PackageRef::get_or_intern(&interner, "org.fedoraproject.MediaWriter"), + PackageRef::get_or_intern( + &interner, + "org.fedoraproject.MediaWriter/x86_64/stable" + ) + ], }, Package { name: PackageRef::get_or_intern(&interner, "Freedesktop Platform"), @@ -197,9 +241,15 @@ mod tests { architecture: Some(ArchitectureRef::get_or_intern(&interner, "x86_64")), depends: vec![], provides: vec![], - reason: None, + reason: Some(InstallReason::Dependency), status: PackageInstallStatus::Installed, - id: Some("org.freedesktop.Platform/x86_64/23.08".into()), + ids: smallvec::smallvec![ + PackageRef::get_or_intern(&interner, "org.freedesktop.Platform"), + PackageRef::get_or_intern( + &interner, + "org.freedesktop.Platform/x86_64/23.08" + ) + ], }, Package { name: PackageRef::get_or_intern(&interner, "Mesa"), @@ -208,9 +258,15 @@ mod tests { architecture: Some(ArchitectureRef::get_or_intern(&interner, "x86_64")), depends: vec![], provides: vec![], - reason: None, + reason: Some(InstallReason::Dependency), status: PackageInstallStatus::Installed, - id: Some("org.freedesktop.Platform.GL.default/x86_64/23.08".into()), + ids: smallvec::smallvec![ + PackageRef::get_or_intern(&interner, "org.freedesktop.Platform.GL.default"), + PackageRef::get_or_intern( + &interner, + "org.freedesktop.Platform.GL.default/x86_64/23.08" + ) + ], }, Package { name: PackageRef::get_or_intern(&interner, "Mesa (Extra)"), @@ -219,9 +275,15 @@ mod tests { architecture: Some(ArchitectureRef::get_or_intern(&interner, "x86_64")), depends: vec![], provides: vec![], - reason: None, + reason: Some(InstallReason::Dependency), status: PackageInstallStatus::Installed, - id: Some("org.freedesktop.Platform.GL.default/x86_64/23.08-extra".into()), + ids: smallvec::smallvec![ + PackageRef::get_or_intern(&interner, "org.freedesktop.Platform.GL.default"), + PackageRef::get_or_intern( + &interner, + "org.freedesktop.Platform.GL.default/x86_64/23.08-extra" + ) + ], }, Package { name: PackageRef::get_or_intern(&interner, "nvidia-550-78"), @@ -230,9 +292,18 @@ mod tests { architecture: Some(ArchitectureRef::get_or_intern(&interner, "x86_64")), depends: vec![], provides: vec![], - reason: None, + reason: Some(InstallReason::Dependency), status: PackageInstallStatus::Installed, - id: Some("org.freedesktop.Platform.GL.nvidia-550-78/x86_64/1.4".into()), + ids: smallvec::smallvec![ + PackageRef::get_or_intern( + &interner, + "org.freedesktop.Platform.GL.nvidia-550-78" + ), + PackageRef::get_or_intern( + &interner, + "org.freedesktop.Platform.GL.nvidia-550-78/x86_64/1.4" + ) + ], }, Package { name: PackageRef::get_or_intern(&interner, "Intel"), @@ -241,9 +312,18 @@ mod tests { architecture: Some(ArchitectureRef::get_or_intern(&interner, "x86_64")), depends: vec![], provides: vec![], - reason: None, + reason: Some(InstallReason::Dependency), status: PackageInstallStatus::Installed, - id: Some("org.freedesktop.Platform.VAAPI.Intel/x86_64/23.08".into()), + ids: smallvec::smallvec![ + PackageRef::get_or_intern( + &interner, + "org.freedesktop.Platform.VAAPI.Intel" + ), + PackageRef::get_or_intern( + &interner, + "org.freedesktop.Platform.VAAPI.Intel/x86_64/23.08" + ) + ], }, Package { name: PackageRef::get_or_intern(&interner, "openh264"), @@ -252,9 +332,15 @@ mod tests { architecture: Some(ArchitectureRef::get_or_intern(&interner, "x86_64")), depends: vec![], provides: vec![], - reason: None, + reason: Some(InstallReason::Dependency), status: PackageInstallStatus::Installed, - id: Some("org.freedesktop.Platform.openh264/x86_64/2.2.0".into()), + ids: smallvec::smallvec![ + PackageRef::get_or_intern(&interner, "org.freedesktop.Platform.openh264"), + PackageRef::get_or_intern( + &interner, + "org.freedesktop.Platform.openh264/x86_64/2.2.0" + ) + ], }, Package { name: PackageRef::get_or_intern(&interner, "openh264"), @@ -263,9 +349,15 @@ mod tests { architecture: Some(ArchitectureRef::get_or_intern(&interner, "x86_64")), depends: vec![], provides: vec![], - reason: None, + reason: Some(InstallReason::Dependency), status: PackageInstallStatus::Installed, - id: Some("org.freedesktop.Platform.openh264/x86_64/2.4.1".into()), + ids: smallvec::smallvec![ + PackageRef::get_or_intern(&interner, "org.freedesktop.Platform.openh264"), + PackageRef::get_or_intern( + &interner, + "org.freedesktop.Platform.openh264/x86_64/2.4.1" + ) + ], }, Package { name: PackageRef::get_or_intern( @@ -277,9 +369,12 @@ mod tests { architecture: Some(ArchitectureRef::get_or_intern(&interner, "x86_64")), depends: vec![], provides: vec![], - reason: None, + reason: Some(InstallReason::Dependency), status: PackageInstallStatus::Installed, - id: Some("org.gnome.Platform/x86_64/46".into()), + ids: smallvec::smallvec![ + PackageRef::get_or_intern(&interner, "org.gnome.Platform"), + PackageRef::get_or_intern(&interner, "org.gnome.Platform/x86_64/46") + ], }, Package { name: PackageRef::get_or_intern(&interner, "Adwaita dark GTK theme"), @@ -288,9 +383,15 @@ mod tests { architecture: Some(ArchitectureRef::get_or_intern(&interner, "x86_64")), depends: vec![], provides: vec![], - reason: None, + reason: Some(InstallReason::Dependency), status: PackageInstallStatus::Installed, - id: Some("org.gtk.Gtk3theme.Adwaita-dark/x86_64/3.22".into()), + ids: smallvec::smallvec![ + PackageRef::get_or_intern(&interner, "org.gtk.Gtk3theme.Adwaita-dark"), + PackageRef::get_or_intern( + &interner, + "org.gtk.Gtk3theme.Adwaita-dark/x86_64/3.22" + ) + ], }, Package { name: PackageRef::get_or_intern(&interner, "Breeze GTK theme"), @@ -299,9 +400,15 @@ mod tests { architecture: Some(ArchitectureRef::get_or_intern(&interner, "x86_64")), depends: vec![], provides: vec![], - reason: None, + reason: Some(InstallReason::Dependency), status: PackageInstallStatus::Installed, - id: Some("org.gtk.Gtk3theme.Breeze/x86_64/3.22".into()), + ids: smallvec::smallvec![ + PackageRef::get_or_intern(&interner, "org.gtk.Gtk3theme.Breeze"), + PackageRef::get_or_intern( + &interner, + "org.gtk.Gtk3theme.Breeze/x86_64/3.22" + ) + ], }, Package { name: PackageRef::get_or_intern(&interner, "Adwaita theme"), @@ -310,9 +417,12 @@ mod tests { architecture: Some(ArchitectureRef::get_or_intern(&interner, "x86_64")), depends: vec![], provides: vec![], - reason: None, + reason: Some(InstallReason::Dependency), status: PackageInstallStatus::Installed, - id: Some("org.kde.KStyle.Adwaita/x86_64/6.6".into()), + ids: smallvec::smallvec![ + PackageRef::get_or_intern(&interner, "org.kde.KStyle.Adwaita"), + PackageRef::get_or_intern(&interner, "org.kde.KStyle.Adwaita/x86_64/6.6") + ], }, Package { name: PackageRef::get_or_intern(&interner, "KDE Application Platform"), @@ -321,9 +431,12 @@ mod tests { architecture: Some(ArchitectureRef::get_or_intern(&interner, "x86_64")), depends: vec![], provides: vec![], - reason: None, + reason: Some(InstallReason::Dependency), status: PackageInstallStatus::Installed, - id: Some("org.kde.Platform/x86_64/6.6".into()), + ids: smallvec::smallvec![ + PackageRef::get_or_intern(&interner, "org.kde.Platform"), + PackageRef::get_or_intern(&interner, "org.kde.Platform/x86_64/6.6") + ], }, ] ); diff --git a/crates/paketkoll_core/src/backend/systemd_tmpfiles.rs b/crates/paketkoll_core/src/backend/systemd_tmpfiles.rs index cb229c2a..c09de16e 100644 --- a/crates/paketkoll_core/src/backend/systemd_tmpfiles.rs +++ b/crates/paketkoll_core/src/backend/systemd_tmpfiles.rs @@ -11,6 +11,8 @@ use std::{ use ahash::AHashMap; use anyhow::Context; use compact_str::CompactString; +use paketkoll_types::backend::PackageMap; +use paketkoll_types::backend::{Files, Name, OriginalFileQuery}; use paketkoll_types::files::{ Checksum, DeviceNode, DeviceType, Directory, Fifo, FileEntry, FileFlags, Gid, Mode, Permissions, Properties, RegularFile, RegularFileBasic, RegularFileSystemd, Symlink, Uid, @@ -18,9 +20,7 @@ use paketkoll_types::files::{ use paketkoll_utils::checksum::{sha256_buffer, sha256_readable}; use systemd_tmpfiles::specifier::Resolve; -use crate::utils::MODE_MASK; - -use super::{Files, Name}; +use paketkoll_utils::MODE_MASK; const NAME: &str = "systemd_tmpfiles"; @@ -41,6 +41,10 @@ impl Name for SystemdTmpfiles { fn name(&self) -> &'static str { NAME } + + fn as_backend_enum(&self) -> paketkoll_types::backend::Backend { + paketkoll_types::backend::Backend::SystemdTmpfiles + } } impl Files for SystemdTmpfiles { @@ -73,19 +77,16 @@ impl Files for SystemdTmpfiles { fn original_files( &self, - _queries: &[super::OriginalFileQuery], - _packages: ahash::AHashMap< - paketkoll_types::intern::PackageRef, - paketkoll_types::package::PackageInterned, - >, + _queries: &[OriginalFileQuery], + _packages: &PackageMap, _interner: &paketkoll_types::intern::Interner, - ) -> anyhow::Result>> { + ) -> anyhow::Result>> { anyhow::bail!("Original file queries are not supported for systemd-tmpfiles") } - fn owning_package( + fn owning_packages( &self, - _paths: &ahash::AHashSet, + _paths: &ahash::AHashSet<&Path>, _interner: &paketkoll_types::intern::Interner, ) -> anyhow::Result< dashmap::DashMap, ahash::RandomState>, diff --git a/crates/paketkoll_core/src/file_ops.rs b/crates/paketkoll_core/src/file_ops.rs index 6fd90654..cb068b11 100644 --- a/crates/paketkoll_core/src/file_ops.rs +++ b/crates/paketkoll_core/src/file_ops.rs @@ -1,16 +1,12 @@ //! Contain file checking functionality -use std::{ - os::unix::ffi::OsStrExt, - path::{Path, PathBuf}, -}; +use std::{os::unix::ffi::OsStrExt, path::PathBuf}; use anyhow::Context; -use dashmap::DashMap; use ignore::{overrides::OverrideBuilder, Match, WalkBuilder, WalkState}; -use crate::backend::OriginalFileQuery; use paketkoll_types::intern::{Interner, PackageRef}; +use paketkoll_types::{backend::OriginalFileQuery, files::PathMap}; use paketkoll_types::{ files::FileEntry, issue::{Issue, IssueKind, PackageIssue}, @@ -20,21 +16,21 @@ use rayon::prelude::*; /// Perform a query of original files #[doc(hidden)] pub fn original_files( - backend: &crate::backend::Backend, + backend: &crate::backend::ConcreteBackend, backend_config: &crate::backend::BackendConfiguration, queries: &[OriginalFileQuery], ) -> anyhow::Result>> { + let interner = Interner::new(); let backend_impl = backend - .create_full(backend_config) + .create_full(backend_config, &interner) .with_context(|| format!("Failed to create backend for {backend}"))?; - let interner = Interner::new(); let package_map = backend_impl - .package_map(&interner) + .package_map_complete(&interner) .with_context(|| format!("Failed to collect information from backend {backend}"))?; let results = backend_impl - .original_files(queries, package_map, &interner) + .original_files(queries, &package_map, &interner) .with_context(|| format!("Failed to collect original files from backend {backend}"))?; Ok(results) @@ -42,20 +38,20 @@ pub fn original_files( /// Check file system for differences using the given configuration pub fn check_installed_files( - backend: &crate::backend::Backend, + backend: &crate::backend::ConcreteBackend, backend_config: &crate::backend::BackendConfiguration, filecheck_config: &crate::config::CommonFileCheckConfiguration, ) -> anyhow::Result<(Interner, Vec)> { + let interner = Interner::new(); let backend_impl = backend - .create_files(backend_config) + .create_files(backend_config, &interner) .with_context(|| format!("Failed to create backend for {backend}"))?; - let interner = Interner::new(); // Get distro specific file list let results = backend_impl .files(&interner) .with_context(|| format!("Failed to collect information from backend {backend}"))?; - log::debug!(target: "paketkoll_core::backend", "Checking file system"); + log::debug!("Checking file system"); // For all file entries, check on file system // Par-bridge is used here to avoid batching. We do too much work for // batching to be useful, and this way we avoid pathological cases with @@ -83,23 +79,44 @@ pub fn check_installed_files( /// Check file system for differences (including unexpected files) using the given configuration pub fn check_all_files( - backend: &crate::backend::Backend, + backend: &crate::backend::ConcreteBackend, backend_config: &crate::backend::BackendConfiguration, filecheck_config: &crate::config::CommonFileCheckConfiguration, unexpected_cfg: &crate::config::CheckAllFilesConfiguration, ) -> anyhow::Result<(Interner, Vec)> { + let interner = Interner::new(); // Collect distro files let backend_impl = backend - .create_files(backend_config) + .create_files(backend_config, &interner) .with_context(|| format!("Failed to create backend for {backend}"))?; - let interner = Interner::new(); // Get distro specific file list - let results = backend_impl + let mut expected_files = backend_impl .files(&interner) .with_context(|| format!("Failed to collect information from backend {backend}",))?; - let results = mismatching_and_unexpected_files(results, filecheck_config, unexpected_cfg)?; - Ok((interner, results)) + // Possibly canonicalize paths + if unexpected_cfg.canonicalize_paths { + log::debug!("Canonicalizing paths"); + canonicalize_file_entries(&mut expected_files); + } + + log::debug!("Preparing data structures"); + // We want a hashmap from path to data here. + let path_map = create_path_map(&expected_files); + + let mismatches = mismatching_and_unexpected_files( + &expected_files, + &path_map, + filecheck_config, + unexpected_cfg, + )?; + + // Drop on a background thread, this help a bit. + drop(path_map); + rayon::spawn(move || { + drop(expected_files); + }); + Ok((interner, mismatches)) } /// Find mismatching and unexpected files @@ -108,25 +125,13 @@ pub fn check_all_files( /// /// Returned will be a list of issues found (along with which package is /// associated with that file if known). -pub fn mismatching_and_unexpected_files( - mut expected_files: Vec, +pub fn mismatching_and_unexpected_files<'a>( + expected_files: &'a Vec, + path_map: &PathMap<'a>, filecheck_config: &crate::config::CommonFileCheckConfiguration, unexpected_cfg: &crate::config::CheckAllFilesConfiguration, ) -> anyhow::Result, Issue)>> { - // Possibly canonicalize paths - if unexpected_cfg.canonicalize_paths { - log::debug!(target: "paketkoll_core::backend", "Canonicalizing paths"); - canonicalize_file_entries(&mut expected_files); - } - - log::debug!(target: "paketkoll_core::backend", "Preparing data structures"); - // We want a hashmap from path to data here. - let path_map: DashMap<&Path, &FileEntry, ahash::RandomState> = - DashMap::with_capacity_and_hasher(expected_files.len(), ahash::RandomState::new()); - expected_files.par_iter().for_each(|file_entry| { - path_map.insert(&file_entry.path, file_entry); - }); - + log::debug!("Building ignores"); // Build glob set of ignores let overrides = { let mut builder = OverrideBuilder::new("/"); @@ -141,7 +146,7 @@ pub fn mismatching_and_unexpected_files( builder.build()? }; - log::debug!(target: "paketkoll_core::backend", "Walking file system"); + log::debug!("Walking file system"); let walker = WalkBuilder::new("/") .hidden(false) .parents(false) @@ -166,8 +171,7 @@ pub fn mismatching_and_unexpected_files( file_entry .seen .store(true, std::sync::atomic::Ordering::Relaxed); - match crate::backend::filesystem::check_file(&file_entry, filecheck_config) - { + match crate::backend::filesystem::check_file(file_entry, filecheck_config) { Ok(Some(inner)) => { collector .send((file_entry.package, inner)) @@ -213,6 +217,7 @@ pub fn mismatching_and_unexpected_files( }) }); + log::debug!("Identifying and processing missing files"); // Identify missing files (we should have seen them walking through the file system) expected_files.par_iter().for_each(|file_entry| { if file_entry.seen.load(std::sync::atomic::Ordering::Relaxed) { @@ -224,6 +229,14 @@ pub fn mismatching_and_unexpected_files( ) { return; } + // We also need to check the parent directories against ignores + for parent in file_entry.path.ancestors() { + match overrides.matched(parent, true) { + Match::None => (), + Match::Ignore(_) => return, + Match::Whitelist(_) => break, + } + } collector .send(( file_entry.package, @@ -236,25 +249,29 @@ pub fn mismatching_and_unexpected_files( .expect("Unbounded queue"); }); + log::debug!("Collecting results"); // Collect all items from queue into vec let mut mismatches = Vec::new(); for item in collected_issues.drain() { mismatches.push(item); } + Ok(mismatches) +} - // Drop on a background thread, this help a bit. - drop(path_map); - rayon::spawn(move || { - drop(expected_files); +/// Create a path map for a set of expected files +pub fn create_path_map(expected_files: &[FileEntry]) -> PathMap<'_> { + let mut path_map: PathMap<'_> = + PathMap::with_capacity_and_hasher(expected_files.len(), ahash::RandomState::new()); + expected_files.iter().for_each(|file_entry| { + path_map.insert(&file_entry.path, file_entry); }); - - Ok(mismatches) + path_map } /// Canonicalize paths in file entries. /// /// This is needed for Debian as packages don't make sense wrt /usr-merge -fn canonicalize_file_entries(results: &mut Vec) { +pub fn canonicalize_file_entries(results: &mut Vec) { results.par_iter_mut().for_each(|file_entry| { if file_entry.path.as_os_str().as_bytes() == b"/" { return; diff --git a/crates/paketkoll_core/src/package_ops.rs b/crates/paketkoll_core/src/package_ops.rs index c83508be..a69c2956 100644 --- a/crates/paketkoll_core/src/package_ops.rs +++ b/crates/paketkoll_core/src/package_ops.rs @@ -6,13 +6,13 @@ use paketkoll_types::{intern::Interner, package::PackageInterned}; /// Get a list of all installed packages pub fn installed_packages( - backend: &crate::backend::Backend, + backend: &crate::backend::ConcreteBackend, backend_config: &crate::backend::BackendConfiguration, ) -> anyhow::Result<(Interner, Vec)> { + let interner = Interner::new(); let backend_impl = backend - .create_packages(backend_config) + .create_packages(backend_config, &interner) .with_context(|| format!("Failed to create backend for {backend}"))?; - let interner = Interner::new(); let packages = backend_impl .packages(&interner) .with_context(|| format!("Failed to collect information from backend {backend}"))?; diff --git a/crates/paketkoll_core/src/utils.rs b/crates/paketkoll_core/src/utils.rs index 3bcb5829..014dffdc 100644 --- a/crates/paketkoll_core/src/utils.rs +++ b/crates/paketkoll_core/src/utils.rs @@ -10,25 +10,24 @@ use std::{ path::PathBuf, }; -/// Mask out the bits of the mode that are actual permissions -pub(crate) const MODE_MASK: u32 = 0o7777; - /// Helper to do a generic package manager transaction pub(crate) fn package_manager_transaction( program_name: &str, - mode: &str, - pkg_list: &[compact_str::CompactString], + flags: &[&str], + pkg_list: &[&str], ask_confirmation: Option<&str>, ) -> anyhow::Result<()> { - let mut apt_get = std::process::Command::new(program_name); - apt_get.arg(mode); + let mut cmd = std::process::Command::new(program_name); + for arg in flags { + cmd.arg(arg); + } if let Some(flag) = ask_confirmation { - apt_get.arg(flag); + cmd.arg(flag); } for pkg in pkg_list { - apt_get.arg(pkg.as_str()); + cmd.arg(pkg); } - let status = apt_get + let status = cmd .status() .with_context(|| format!("Failed to execute {program_name}"))?; if !status.success() { @@ -84,7 +83,7 @@ impl<'archive, R: Read + 'archive> Read for CompressionFormat<'archive, R> { #[cfg(feature = "__extraction")] pub(crate) fn group_queries_by_pkg( - queries: &[crate::backend::OriginalFileQuery], + queries: &[paketkoll_types::backend::OriginalFileQuery], ) -> AHashMap<&str, AHashSet<&str>> { let mut queries_by_pkg: AHashMap<&str, AHashSet<&str>> = AHashMap::new(); @@ -146,9 +145,9 @@ pub(crate) fn locate_package_file( // Nothing found, try downloading the package if downloaded { - log::error!(target: "paketkoll_core::utils", "Failed to find package for {pkg}"); + log::error!("Failed to find package for {pkg}"); } else { - log::info!(target: "paketkoll_core::utils", "Downloading package for {pkg}"); + log::info!("Downloading package for {pkg}"); download_pkg(pkg)?; } } @@ -160,7 +159,7 @@ pub(crate) fn locate_package_file( pub(crate) fn extract_files( mut archive: tar::Archive, queries: &AHashSet<&str>, - results: &mut AHashMap>, + results: &mut AHashMap>, pkg: &str, name_manger: impl Fn(&str) -> CompactString, ) -> Result<(), anyhow::Error> { @@ -181,7 +180,7 @@ pub(crate) fn extract_files( let mut contents = Vec::new(); entry.read_to_end(&mut contents)?; results.insert( - crate::backend::OriginalFileQuery { + paketkoll_types::backend::OriginalFileQuery { package: pkg.into(), path, }, diff --git a/crates/paketkoll_types/Cargo.toml b/crates/paketkoll_types/Cargo.toml index 7727c166..08ec736b 100644 --- a/crates/paketkoll_types/Cargo.toml +++ b/crates/paketkoll_types/Cargo.toml @@ -14,9 +14,11 @@ version = "0.1.0" serde = ["dep:serde", "smallvec/serde", "bitflags/serde", "compact_str/serde"] [dependencies] +ahash.workspace = true anyhow.workspace = true bitflags.workspace = true compact_str.workspace = true +dashmap.workspace = true derive_builder.workspace = true faster-hex = { workspace = true, features = ["std"] } lasso = { workspace = true, features = [ @@ -26,11 +28,9 @@ lasso = { workspace = true, features = [ ] } nix = { workspace = true, features = ["fs", "user"] } serde = { workspace = true, optional = true, features = ["derive"] } -smallvec = { workspace = true, features = [ - "const_generics", - "const_new", - "union", -] } +smallvec.workspace = true +strum.workspace = true +thiserror.workspace = true [lints] workspace = true diff --git a/crates/paketkoll_types/README.md b/crates/paketkoll_types/README.md new file mode 100644 index 00000000..cf1d0c16 --- /dev/null +++ b/crates/paketkoll_types/README.md @@ -0,0 +1,6 @@ +# paketkoll_types + +Public types used by paketkoll and related projects. + +You should most likely use this indirectly via paketkoll_core unless you are +in this repository. diff --git a/crates/paketkoll_types/src/backend.rs b/crates/paketkoll_types/src/backend.rs new file mode 100644 index 00000000..c831b1b5 --- /dev/null +++ b/crates/paketkoll_types/src/backend.rs @@ -0,0 +1,147 @@ +//! Declaration of backends + +use crate::files::FileEntry; +use crate::intern::{Interner, PackageRef}; +use crate::package::PackageInterned; +use ahash::AHashMap; +use ahash::AHashSet; +use anyhow::{anyhow, Context}; +use compact_str::CompactString; +use dashmap::DashMap; +use std::collections::BTreeMap; +use std::path::{Path, PathBuf}; +use std::sync::Arc; + +/// Which backend to use for the system package manager +#[derive( + Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, strum::Display, strum::EnumString, +)] +pub enum Backend { + /// Backend for Arch Linux and derived distros (pacman) + #[strum(to_string = "pacman")] + Pacman, + /// Backend for Debian and derived distros (dpkg/apt) + #[strum(to_string = "apt")] + Apt, + /// Backend for flatpak (package list only) + #[strum(to_string = "flatpak")] + Flatpak, + /// Backend for systemd-tmpfiles (file list only) + #[strum(to_string = "systemd-tmpfiles")] + SystemdTmpfiles, +} + +/// Type for a mapping of package IDs to package data +pub type PackageMap = AHashMap; + +/// Type for a mapping from backend to package map +pub type PackageMapMap = BTreeMap>; + +/// Type of map of package backends +pub type PackageBackendMap = BTreeMap>; + +/// Type of map of file backends +pub type FilesBackendMap = BTreeMap>; + +/// Get the name of a backend (useful in dynamic dispatch for generating reports) +pub trait Name: Send + Sync + std::fmt::Debug { + /// The name of the backend (for logging and debugging purposes) + fn name(&self) -> &'static str; + + /// The backend enum value corresponding to this backend + fn as_backend_enum(&self) -> Backend; +} + +/// A package manager backend +pub trait Files: Name { + /// Collect a list of files managed by the package manager including + /// any available metadata such as checksums or timestamps about those files + fn files(&self, interner: &Interner) -> anyhow::Result>; + + /// True if this backend may benefit from path canonicalization for certain scans + /// (i.e. paths may be inaccuarate) + fn may_need_canonicalization(&self) -> bool { + false + } + + /// Find the owners of the specified files + fn owning_packages( + &self, + paths: &AHashSet<&Path>, + interner: &Interner, + ) -> anyhow::Result, ahash::RandomState>>; + + /// Get the original contents of files + fn original_files( + &self, + queries: &[OriginalFileQuery], + packages: &PackageMap, + interner: &Interner, + ) -> anyhow::Result>>; +} + +/// Query type for original file contents +#[derive(Debug, Clone, Eq, PartialEq, Hash)] +pub struct OriginalFileQuery { + pub package: CompactString, + pub path: CompactString, +} + +/// A package manager backend (reading list of packages) +pub trait Packages: Name { + /// Collect a list of all installed packages + fn packages(&self, interner: &Interner) -> anyhow::Result>; + + /// Collect a map of packages with all alternative names as keys + fn package_map_complete(&self, interner: &Interner) -> anyhow::Result { + let packages = self + .packages(interner) + .with_context(|| anyhow!("Failed to load package list"))?; + Ok(packages_to_package_map(packages)) + } + + /// Perform installation and uninstallation of a bunch of packages + /// + /// The package name format depends on the backend. + fn transact( + &self, + install: &[&str], + uninstall: &[&str], + ask_confirmation: bool, + ) -> Result<(), PackageManagerError>; + + /// Mark packages as dependencies and manually installed + fn mark(&self, dependencies: &[&str], manual: &[&str]) -> Result<(), PackageManagerError>; + + /// Ask package manager to uninstall unused packages + /// + /// If needed, this should internally repeat until no more packages can be removed (or the used aborted) + fn remove_unused(&self, ask_confirmation: bool) -> Result<(), PackageManagerError>; +} + +/// Errors that package manager transactions can produce +#[derive(Debug, thiserror::Error)] +pub enum PackageManagerError { + /// This operation isn't supported by this backend + #[error("Operation not supported: {0}")] + UnsupportedOperation(&'static str), + /// All other errors + #[error("{0:?}")] + Other(#[from] anyhow::Error), +} + +/// Convert a package vector to a package map +pub fn packages_to_package_map(packages: Vec) -> PackageMap { + let mut package_map = + AHashMap::with_capacity_and_hasher(packages.len(), ahash::RandomState::new()); + for package in packages.into_iter() { + if package.ids.is_empty() { + package_map.insert(package.name, package); + } else { + for id in &package.ids { + package_map.insert(*id, package.clone()); + } + } + } + package_map +} diff --git a/crates/paketkoll_types/src/files.rs b/crates/paketkoll_types/src/files.rs index 6f545ac4..a2c54834 100644 --- a/crates/paketkoll_types/src/files.rs +++ b/crates/paketkoll_types/src/files.rs @@ -2,7 +2,7 @@ use crate::intern::PackageRef; use std::fmt::Octal; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; use std::sync::atomic::AtomicBool; use std::time::SystemTime; @@ -14,12 +14,12 @@ pub struct Mode(u32); impl Mode { #[inline] - pub fn new(value: u32) -> Self { + pub const fn new(value: u32) -> Self { Self(value) } #[inline] - pub fn as_raw(&self) -> u32 { + pub const fn as_raw(&self) -> u32 { self.0 } @@ -272,6 +272,9 @@ pub struct Directory { pub group: Gid, } +/// A mapping from paths to file entries +pub type PathMap<'a> = ahash::AHashMap<&'a Path, &'a FileEntry>; + /// A file entry from the package database #[derive(Debug)] pub struct FileEntry { @@ -353,6 +356,22 @@ pub enum Properties { } impl Properties { + pub fn is_regular_file(&self) -> Option { + match self { + Properties::RegularFileBasic(_) => Some(true), + Properties::RegularFileSystemd(_) => Some(true), + Properties::RegularFile(_) => Some(true), + Properties::Symlink(_) => Some(false), + Properties::Directory(_) => Some(false), + Properties::Fifo(_) => Some(false), + Properties::DeviceNode(_) => Some(false), + Properties::Special => Some(false), + Properties::Removed => None, + Properties::Unknown => None, + Properties::Permissions(_) => None, + } + } + pub fn is_dir(&self) -> Option { match self { Properties::RegularFileBasic(_) => Some(false), @@ -368,6 +387,55 @@ impl Properties { Properties::Permissions(_) => None, } } + + /// Get mode (if available) + pub fn mode(&self) -> Option { + match self { + Properties::RegularFileBasic(_) => None, + Properties::RegularFileSystemd(val) => Some(val.mode), + Properties::RegularFile(val) => Some(val.mode), + Properties::Symlink(_) => None, + Properties::Directory(val) => Some(val.mode), + Properties::Fifo(val) => Some(val.mode), + Properties::DeviceNode(val) => Some(val.mode), + Properties::Special => None, + Properties::Removed => None, + Properties::Unknown => None, + Properties::Permissions(val) => Some(val.mode), + } + } + + pub fn owner(&self) -> Option { + match self { + Properties::RegularFileBasic(_) => None, + Properties::RegularFileSystemd(val) => Some(val.owner), + Properties::RegularFile(val) => Some(val.owner), + Properties::Symlink(val) => Some(val.owner), + Properties::Directory(val) => Some(val.owner), + Properties::Fifo(val) => Some(val.owner), + Properties::DeviceNode(val) => Some(val.owner), + Properties::Special => None, + Properties::Removed => None, + Properties::Unknown => None, + Properties::Permissions(val) => Some(val.owner), + } + } + + pub fn group(&self) -> Option { + match self { + Properties::RegularFileBasic(_) => None, + Properties::RegularFileSystemd(val) => Some(val.group), + Properties::RegularFile(val) => Some(val.group), + Properties::Symlink(val) => Some(val.group), + Properties::Directory(val) => Some(val.group), + Properties::Fifo(val) => Some(val.group), + Properties::DeviceNode(val) => Some(val.group), + Properties::Special => None, + Properties::Removed => None, + Properties::Unknown => None, + Properties::Permissions(val) => Some(val.group), + } + } } /// A set of permissions diff --git a/crates/paketkoll_types/src/lib.rs b/crates/paketkoll_types/src/lib.rs index ef34e886..dd42f3ae 100644 --- a/crates/paketkoll_types/src/lib.rs +++ b/crates/paketkoll_types/src/lib.rs @@ -1,5 +1,6 @@ //! Public types used by paketkoll and related projects +pub mod backend; pub mod files; pub mod intern; pub mod issue; diff --git a/crates/paketkoll_types/src/package.rs b/crates/paketkoll_types/src/package.rs index 35b5544b..de677e4c 100644 --- a/crates/paketkoll_types/src/package.rs +++ b/crates/paketkoll_types/src/package.rs @@ -2,6 +2,7 @@ use crate::intern::{ArchitectureRef, Interner, PackageRef}; use compact_str::CompactString; +use smallvec::SmallVec; /// The reason a package is installed #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -53,9 +54,11 @@ where pub reason: Option, /// Install status pub status: PackageInstallStatus, - /// ID for package (if not same as name) - #[builder(default = "None")] - pub id: Option, + /// IDs for package (if not same as name). + /// + /// The first one should be the preferred or canonical one. + #[builder(default = "smallvec::smallvec![]")] + pub ids: SmallVec<[PackageT; 4]>, } /// Interned compact package @@ -63,6 +66,20 @@ pub type PackageInterned = Package; /// Package with strings in them, for serialisation purposes pub type PackageDirect = Package; +impl Package +where + PackageT: std::fmt::Debug + PartialEq + Eq + Clone, + ArchitectureT: std::fmt::Debug + PartialEq + Eq + Clone, +{ + pub fn canonical_id(&self) -> &PackageT { + if self.ids.is_empty() { + &self.name + } else { + &self.ids[0] + } + } +} + impl Package where PackageT: std::fmt::Debug + PartialEq + Eq + Clone + Copy, @@ -97,7 +114,11 @@ impl PackageInterned { .collect(), reason: self.reason, status: self.status, - id: self.id, + ids: self + .ids + .into_iter() + .flat_map(|pkg| pkg.try_to_str(interner).map(Into::into)) + .collect(), } } } @@ -118,7 +139,7 @@ impl serde::Serialize for PackageDirect { state.serialize_field("provides", &self.provides)?; state.serialize_field("reason", &self.reason)?; state.serialize_field("status", &self.status)?; - state.serialize_field("id", &self.id)?; + state.serialize_field("id", &self.ids)?; state.end() } } diff --git a/crates/paketkoll_utils/README.md b/crates/paketkoll_utils/README.md new file mode 100644 index 00000000..f688d6fc --- /dev/null +++ b/crates/paketkoll_utils/README.md @@ -0,0 +1,5 @@ +# paketkoll_utils + +Internal helper crate for paketkoll & konfigkoll. + +Not for external usage. No stability guarantees whatsoever. diff --git a/crates/paketkoll_utils/src/lib.rs b/crates/paketkoll_utils/src/lib.rs index 46b04542..951bd6fc 100644 --- a/crates/paketkoll_utils/src/lib.rs +++ b/crates/paketkoll_utils/src/lib.rs @@ -1,3 +1,8 @@ -//! Internal helper crate for paketkoll & konfigkoll. Not for external usage. +//! Internal helper crate for paketkoll & konfigkoll. +//! +//! Not for external usage. No stability guarantees whatsoever. pub mod checksum; + +/// Mask out the bits of the mode that are actual permissions +pub const MODE_MASK: u32 = 0o7777; diff --git a/crates/systemd_tmpfiles/Cargo.toml b/crates/systemd_tmpfiles/Cargo.toml index c7a5d1a9..6d2c48fd 100644 --- a/crates/systemd_tmpfiles/Cargo.toml +++ b/crates/systemd_tmpfiles/Cargo.toml @@ -28,12 +28,8 @@ dirs = { workspace = true, optional = true } libc.workspace = true memchr.workspace = true nix = { workspace = true, features = ["feature"] } -smallvec = { workspace = true, features = [ - "const_generics", - "const_new", - "union", -] } -strum = { workspace = true, features = ["derive"] } +smallvec.workspace = true +strum.workspace = true thiserror.workspace = true winnow = { workspace = true, features = ["simd"] } diff --git a/crates/xtask/Cargo.toml b/crates/xtask/Cargo.toml new file mode 100644 index 00000000..ce7c308e --- /dev/null +++ b/crates/xtask/Cargo.toml @@ -0,0 +1,39 @@ +[package] +description = "Cargo-xtask pattern. Do not upload to crates.io" +edition = "2021" +license = "MPL-2.0" +name = "xtask" +repository = "https://github.com/VorpalBlade/paketkoll" +rust-version = "1.79.0" +version = "0.1.0" +publish = false + +[features] +# Default features +default = ["debian", "arch_linux", "json"] + +# Include the Arch Linux backend +arch_linux = ["paketkoll/arch_linux"] + +# Include support for the Debian backend +debian = ["paketkoll/debian"] + +# Include support for JSON output +json = ["paketkoll/json"] + +# Include support for the systemd-tmpfiles backend (EXPERIMENTAL) +systemd_tmpfiles = ["paketkoll/systemd_tmpfiles"] + +[dependencies] +anyhow = { workspace = true, features = ["backtrace"] } +camino.workspace = true +clap = { workspace = true, features = ["derive"] } +clap_complete.workspace = true +clap_mangen.workspace = true +env_logger.workspace = true +konfigkoll = { version = "0.1.0", path = "../konfigkoll" } +paketkoll = { version = "0.2.3", path = "../paketkoll" } +log.workspace = true + +[lints] +workspace = true diff --git a/crates/xtask/README.md b/crates/xtask/README.md new file mode 100644 index 00000000..618ca034 --- /dev/null +++ b/crates/xtask/README.md @@ -0,0 +1,9 @@ +# xtask + +This crate implements the [cargo-xtask] pattern for paketkoll and konfigkoll. + +It is primarily used to generate man pages and shell completion. + +It is not published to crates.io. + +[cargo-xtask]: https://github.com/matklad/cargo-xtask diff --git a/crates/xtask/src/cli.rs b/crates/xtask/src/cli.rs new file mode 100644 index 00000000..d8bf4c61 --- /dev/null +++ b/crates/xtask/src/cli.rs @@ -0,0 +1,47 @@ +use camino::Utf8PathBuf; +use clap::{Parser, Subcommand}; + +#[derive(Debug, Parser)] +#[command(version, about, long_about = None)] +#[command(propagate_version = true)] +#[clap(disable_help_subcommand = true)] +pub(crate) struct Cli { + /// Operation to perform + #[command(subcommand)] + pub(crate) command: Commands, +} + +#[derive(Debug, Subcommand)] +pub(crate) enum Commands { + /// Generate man page + Man { + /// Output directory + #[arg(short, long)] + output: Utf8PathBuf, + /// Command to generate for + cmd: CommandName, + }, + /// Generate shell completions + Completions { + /// Output directory + #[arg(short, long)] + output: Utf8PathBuf, + /// Command to generate for + cmd: CommandName, + }, +} + +#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy, clap::ValueEnum)] +pub(crate) enum CommandName { + Paketkoll, + Konfigkoll, +} + +impl std::fmt::Display for CommandName { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + CommandName::Paketkoll => write!(f, "paketkoll"), + CommandName::Konfigkoll => write!(f, "konfigkoll"), + } + } +} diff --git a/crates/xtask/src/main.rs b/crates/xtask/src/main.rs new file mode 100644 index 00000000..039aba00 --- /dev/null +++ b/crates/xtask/src/main.rs @@ -0,0 +1,35 @@ +use clap::{CommandFactory, Parser, ValueEnum}; +use clap_complete::Shell; +use cli::Commands; + +mod cli; + +fn main() -> anyhow::Result<()> { + let mut builder = + env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info")); + builder.init(); + let cli = cli::Cli::parse(); + + match cli.command { + Commands::Man { output, cmd } => { + let cmd = match cmd { + cli::CommandName::Paketkoll => paketkoll::cli::Cli::command(), + cli::CommandName::Konfigkoll => konfigkoll::cli::Cli::command(), + }; + std::fs::create_dir_all(&output)?; + clap_mangen::generate_to(cmd, &output)?; + } + Commands::Completions { output, cmd } => { + let bin_name = cmd.to_string(); + let mut cmd = match cmd { + cli::CommandName::Paketkoll => paketkoll::cli::Cli::command(), + cli::CommandName::Konfigkoll => konfigkoll::cli::Cli::command(), + }; + std::fs::create_dir_all(&output)?; + for &shell in Shell::value_variants() { + clap_complete::generate_to(shell, &mut cmd, &bin_name, &output)?; + } + } + } + Ok(()) +} diff --git a/deny.toml b/deny.toml index 2079b4d1..19efa2f0 100644 --- a/deny.toml +++ b/deny.toml @@ -94,6 +94,7 @@ ignore = [ allow = [ "Apache-2.0", "BSD-2-Clause", + "BSD-3-Clause", "CC0-1.0", "ISC", "MIT", diff --git a/doc/.gitignore b/doc/.gitignore new file mode 100644 index 00000000..8ba436e7 --- /dev/null +++ b/doc/.gitignore @@ -0,0 +1,2 @@ +/api +/book diff --git a/doc/book.toml b/doc/book.toml new file mode 100644 index 00000000..4ea88141 --- /dev/null +++ b/doc/book.toml @@ -0,0 +1,11 @@ +[book] +authors = ["Arvid Norlander"] +language = "en" +multilingual = false +src = "src" +title = "Konfigkoll & paketkoll Documentation" + +[output.html] +site-url = "/paketkoll/" +git-repository-url = "https://github.com/VorpalBlade/paketkoll/tree/main" +edit-url-template = "https://github.com/VorpalBlade/paketkoll/edit/main/docs/{path}" diff --git a/doc/src/README.md b/doc/src/README.md new file mode 100644 index 00000000..39a86ea0 --- /dev/null +++ b/doc/src/README.md @@ -0,0 +1,53 @@ +# Konfigkoll and paketkoll + +This repository contains two tools, described below. + +## Paketkoll + +Paketkoll does a bunch of things: + +* On Debian: + * Faster alternative to `debsums`: Checking integrity of installed files with respect to packages. + * Faster alternative to `dpkg-query -S`: Listing which package owns a given file +* On Arch Linux: + * Faster alternative to `pacman -Qkk` / `paccheck`: Checking integrity of installed files with respect to packages. + * Faster alternative to `pacman -Qo`: Listing which package owns files +* Listing installed packages in a Linux distro neutral way (Debian, Arch Linux, and derivatives).\ + Also supports listing flatpak. +* Getting the original file contents for a given path. + +## Konfigkoll + +Konfigkoll is a work in progress cross distro configuration manager. It aims to solve the problem +"I have too many computers and want to keep the system configs in sync", rather than +"I am a sysadmin and want to manage a fleet". As such it is a *personal* system configuration manager. + +The design of konfigkoll is heavily inspired by the excellent [Aconfmgr](https://github.com/CyberShadow/aconfmgr), +but with a few key differences: + +* Aconfmgr is Arch Linux specific, konfigkoll aims to be cross distro + (currently Arch Linux + work in progress support for Debian & derivatives). +* Aconfmgr is written in Bash, and is rather slow. Konfigkoll is written in Rust, and is much faster.\ + As an example, applying my personal config with aconfmgr on my system takes about 30 seconds, while konfigkoll + takes about 2 seconds for the equivalent config. (This is assuming `--trust-mtime`, both are + significantly slowed down if checksums are verified for every file). +* Aconfmgr uses bash as the configuration language, konfigkoll uses [Rune]. + +### Comparisons + +Unlike tools such as ansible, puppet, etc: + +* Konfigkoll only manages the computer it is running on, not remote systems over the network. +* Konfigkoll can save the system state to a file, giving you a full template config to work from. + (You definitely want to customise this saved config though.) + +There is perhaps more similarity with NixOS and Guix, but, unlike those: + +* You can still use normal management tools and save changes to the config afterwards.\ + With NixOS/Guix every change starts at the config. +* NixOS provides specific config keys for every package, konfigkoll is more general: + You can patch any config file with sed-like instructions (or custom code), there is + no special support for specific packages. (There is special support for enabling systemd + services and working with systemd-sysusers though, since those are such common operations.) + +[Rune]: https://rune-rs.github.io/ diff --git a/doc/src/SUMMARY.md b/doc/src/SUMMARY.md new file mode 100644 index 00000000..25a2ae71 --- /dev/null +++ b/doc/src/SUMMARY.md @@ -0,0 +1,21 @@ +# Summary + +[Introduction](./README.md) + +# Konfigkoll + +- [Installation](./konfigkoll/installation.md) +- [Getting started](./konfigkoll/getting_started.md) +- [Managing packages](./konfigkoll/packages.md) +- [Managing files](./konfigkoll/files.md) +- [Integrations (systemd, passwd, etc) & advanced topics](./konfigkoll/integrations/README.md) + - [Systemd units](./konfigkoll/integrations/systemd_units.md) + - [`/etc/passwd`, `/etc/group` and shadow files](./konfigkoll/integrations/passwd.md) + - [Getting system information](./konfigkoll/integrations/sysinfo.md) +- [Advanced topics](./konfigkoll/advanced/README.md) + - [Invoking external commands](./konfigkoll/advanced/process.md) + - [Host file system access](./konfigkoll/advanced/host_file_system_access.md) +- [Cookbook: Examples & snippets](./konfigkoll/cookbook.md) +- [API documentation](./konfigkoll/api.md) +- [Defaults](./konfigkoll/defaults.md) +- [Limitations](./konfigkoll/limitations.md) diff --git a/doc/src/highlight.js b/doc/src/highlight.js new file mode 100644 index 00000000..4822be5f --- /dev/null +++ b/doc/src/highlight.js @@ -0,0 +1,45 @@ +/* + Highlight.js 10.1.1 (039da4f1) + License: BSD-3-Clause + Copyright (c) 2006-2020, Ivan Sagalaev +*/ +var hljs=function(){"use strict";function e(n){Object.freeze(n);var t="function"==typeof n;return Object.getOwnPropertyNames(n).forEach((function(r){!Object.hasOwnProperty.call(n,r)||null===n[r]||"object"!=typeof n[r]&&"function"!=typeof n[r]||t&&("caller"===r||"callee"===r||"arguments"===r)||Object.isFrozen(n[r])||e(n[r])})),n}class n{constructor(e){void 0===e.data&&(e.data={}),this.data=e.data}ignoreMatch(){this.ignore=!0}}function t(e){return e.replace(/&/g,"&").replace(//g,">").replace(/"/g,""").replace(/'/g,"'")}function r(e,...n){var t={};for(const n in e)t[n]=e[n];return n.forEach((function(e){for(const n in e)t[n]=e[n]})),t}function a(e){return e.nodeName.toLowerCase()}var i=Object.freeze({__proto__:null,escapeHTML:t,inherit:r,nodeStream:function(e){var n=[];return function e(t,r){for(var i=t.firstChild;i;i=i.nextSibling)3===i.nodeType?r+=i.nodeValue.length:1===i.nodeType&&(n.push({event:"start",offset:r,node:i}),r=e(i,r),a(i).match(/br|hr|img|input/)||n.push({event:"stop",offset:r,node:i}));return r}(e,0),n},mergeStreams:function(e,n,r){var i=0,s="",o=[];function l(){return e.length&&n.length?e[0].offset!==n[0].offset?e[0].offset"}function u(e){s+=""}function g(e){("start"===e.event?c:u)(e.node)}for(;e.length||n.length;){var d=l();if(s+=t(r.substring(i,d[0].offset)),i=d[0].offset,d===e){o.reverse().forEach(u);do{g(d.splice(0,1)[0]),d=l()}while(d===e&&d.length&&d[0].offset===i);o.reverse().forEach(c)}else"start"===d[0].event?o.push(d[0].node):o.pop(),g(d.splice(0,1)[0])}return s+t(r.substr(i))}});const s="",o=e=>!!e.kind;class l{constructor(e,n){this.buffer="",this.classPrefix=n.classPrefix,e.walk(this)}addText(e){this.buffer+=t(e)}openNode(e){if(!o(e))return;let n=e.kind;e.sublanguage||(n=`${this.classPrefix}${n}`),this.span(n)}closeNode(e){o(e)&&(this.buffer+=s)}value(){return this.buffer}span(e){this.buffer+=``}}class c{constructor(){this.rootNode={children:[]},this.stack=[this.rootNode]}get top(){return this.stack[this.stack.length-1]}get root(){return this.rootNode}add(e){this.top.children.push(e)}openNode(e){const n={kind:e,children:[]};this.add(n),this.stack.push(n)}closeNode(){if(this.stack.length>1)return this.stack.pop()}closeAllNodes(){for(;this.closeNode(););}toJSON(){return JSON.stringify(this.rootNode,null,4)}walk(e){return this.constructor._walk(e,this.rootNode)}static _walk(e,n){return"string"==typeof n?e.addText(n):n.children&&(e.openNode(n),n.children.forEach(n=>this._walk(e,n)),e.closeNode(n)),e}static _collapse(e){"string"!=typeof e&&e.children&&(e.children.every(e=>"string"==typeof e)?e.children=[e.children.join("")]:e.children.forEach(e=>{c._collapse(e)}))}}class u extends c{constructor(e){super(),this.options=e}addKeyword(e,n){""!==e&&(this.openNode(n),this.addText(e),this.closeNode())}addText(e){""!==e&&this.add(e)}addSublanguage(e,n){const t=e.root;t.kind=n,t.sublanguage=!0,this.add(t)}toHTML(){return new l(this,this.options).value()}finalize(){return!0}}function g(e){return e?"string"==typeof e?e:e.source:null}const d="(-?)(\\b0[xX][a-fA-F0-9]+|(\\b\\d+(\\.\\d*)?|\\.\\d+)([eE][-+]?\\d+)?)",h={begin:"\\\\[\\s\\S]",relevance:0},f={className:"string",begin:"'",end:"'",illegal:"\\n",contains:[h]},p={className:"string",begin:'"',end:'"',illegal:"\\n",contains:[h]},m={begin:/\b(a|an|the|are|I'm|isn't|don't|doesn't|won't|but|just|should|pretty|simply|enough|gonna|going|wtf|so|such|will|you|your|they|like|more)\b/},b=function(e,n,t={}){var a=r({className:"comment",begin:e,end:n,contains:[]},t);return a.contains.push(m),a.contains.push({className:"doctag",begin:"(?:TODO|FIXME|NOTE|BUG|OPTIMIZE|HACK|XXX):",relevance:0}),a},v=b("//","$"),x=b("/\\*","\\*/"),E=b("#","$");var _=Object.freeze({__proto__:null,IDENT_RE:"[a-zA-Z]\\w*",UNDERSCORE_IDENT_RE:"[a-zA-Z_]\\w*",NUMBER_RE:"\\b\\d+(\\.\\d+)?",C_NUMBER_RE:d,BINARY_NUMBER_RE:"\\b(0b[01]+)",RE_STARTERS_RE:"!|!=|!==|%|%=|&|&&|&=|\\*|\\*=|\\+|\\+=|,|-|-=|/=|/|:|;|<<|<<=|<=|<|===|==|=|>>>=|>>=|>=|>>>|>>|>|\\?|\\[|\\{|\\(|\\^|\\^=|\\||\\|=|\\|\\||~",SHEBANG:(e={})=>{const n=/^#![ ]*\//;return e.binary&&(e.begin=function(...e){return e.map(e=>g(e)).join("")}(n,/.*\b/,e.binary,/\b.*/)),r({className:"meta",begin:n,end:/$/,relevance:0,"on:begin":(e,n)=>{0!==e.index&&n.ignoreMatch()}},e)},BACKSLASH_ESCAPE:h,APOS_STRING_MODE:f,QUOTE_STRING_MODE:p,PHRASAL_WORDS_MODE:m,COMMENT:b,C_LINE_COMMENT_MODE:v,C_BLOCK_COMMENT_MODE:x,HASH_COMMENT_MODE:E,NUMBER_MODE:{className:"number",begin:"\\b\\d+(\\.\\d+)?",relevance:0},C_NUMBER_MODE:{className:"number",begin:d,relevance:0},BINARY_NUMBER_MODE:{className:"number",begin:"\\b(0b[01]+)",relevance:0},CSS_NUMBER_MODE:{className:"number",begin:"\\b\\d+(\\.\\d+)?(%|em|ex|ch|rem|vw|vh|vmin|vmax|cm|mm|in|pt|pc|px|deg|grad|rad|turn|s|ms|Hz|kHz|dpi|dpcm|dppx)?",relevance:0},REGEXP_MODE:{begin:/(?=\/[^/\n]*\/)/,contains:[{className:"regexp",begin:/\//,end:/\/[gimuy]*/,illegal:/\n/,contains:[h,{begin:/\[/,end:/\]/,relevance:0,contains:[h]}]}]},TITLE_MODE:{className:"title",begin:"[a-zA-Z]\\w*",relevance:0},UNDERSCORE_TITLE_MODE:{className:"title",begin:"[a-zA-Z_]\\w*",relevance:0},METHOD_GUARD:{begin:"\\.\\s*[a-zA-Z_]\\w*",relevance:0},END_SAME_AS_BEGIN:function(e){return Object.assign(e,{"on:begin":(e,n)=>{n.data._beginMatch=e[1]},"on:end":(e,n)=>{n.data._beginMatch!==e[1]&&n.ignoreMatch()}})}}),w="of and for in not or if then".split(" ");function N(e,n){return n?+n:function(e){return w.includes(e.toLowerCase())}(e)?0:1}const y={props:["language","code","autodetect"],data:function(){return{detectedLanguage:"",unknownLanguage:!1}},computed:{className(){return this.unknownLanguage?"":"hljs "+this.detectedLanguage},highlighted(){if(!this.autoDetect&&!hljs.getLanguage(this.language))return console.warn(`The language "${this.language}" you specified could not be found.`),this.unknownLanguage=!0,t(this.code);let e;return this.autoDetect?(e=hljs.highlightAuto(this.code),this.detectedLanguage=e.language):(e=hljs.highlight(this.language,this.code,this.ignoreIllegals),this.detectectLanguage=this.language),e.value},autoDetect(){return!(this.language&&(e=this.autodetect,!e&&""!==e));var e},ignoreIllegals:()=>!0},render(e){return e("pre",{},[e("code",{class:this.className,domProps:{innerHTML:this.highlighted}})])}},R={install(e){e.component("highlightjs",y)}},k=t,O=r,{nodeStream:M,mergeStreams:L}=i,T=Symbol("nomatch");return function(t){var a=[],i=Object.create(null),s=Object.create(null),o=[],l=!0,c=/(^(<[^>]+>|\t|)+|\n)/gm,d="Could not find the language '{}', did you forget to load/include a language module?";const h={disableAutodetect:!0,name:"Plain text",contains:[]};var f={noHighlightRe:/^(no-?highlight)$/i,languageDetectRe:/\blang(?:uage)?-([\w-]+)\b/i,classPrefix:"hljs-",tabReplace:null,useBR:!1,languages:null,__emitter:u};function p(e){return f.noHighlightRe.test(e)}function m(e,n,t,r){var a={code:n,language:e};S("before:highlight",a);var i=a.result?a.result:b(a.language,a.code,t,r);return i.code=a.code,S("after:highlight",i),i}function b(e,t,a,s){var o=t;function c(e,n){var t=E.case_insensitive?n[0].toLowerCase():n[0];return Object.prototype.hasOwnProperty.call(e.keywords,t)&&e.keywords[t]}function u(){null!=R.subLanguage?function(){if(""!==L){var e=null;if("string"==typeof R.subLanguage){if(!i[R.subLanguage])return void M.addText(L);e=b(R.subLanguage,L,!0,O[R.subLanguage]),O[R.subLanguage]=e.top}else e=v(L,R.subLanguage.length?R.subLanguage:null);R.relevance>0&&(j+=e.relevance),M.addSublanguage(e.emitter,e.language)}}():function(){if(!R.keywords)return void M.addText(L);let e=0;R.keywordPatternRe.lastIndex=0;let n=R.keywordPatternRe.exec(L),t="";for(;n;){t+=L.substring(e,n.index);const r=c(R,n);if(r){const[e,a]=r;M.addText(t),t="",j+=a,M.addKeyword(n[0],e)}else t+=n[0];e=R.keywordPatternRe.lastIndex,n=R.keywordPatternRe.exec(L)}t+=L.substr(e),M.addText(t)}(),L=""}function h(e){return e.className&&M.openNode(e.className),R=Object.create(e,{parent:{value:R}})}function p(e){return 0===R.matcher.regexIndex?(L+=e[0],1):(I=!0,0)}var m={};function x(t,r){var i=r&&r[0];if(L+=t,null==i)return u(),0;if("begin"===m.type&&"end"===r.type&&m.index===r.index&&""===i){if(L+=o.slice(r.index,r.index+1),!l){const n=Error("0 width match regex");throw n.languageName=e,n.badRule=m.rule,n}return 1}if(m=r,"begin"===r.type)return function(e){var t=e[0],r=e.rule;const a=new n(r),i=[r.__beforeBegin,r["on:begin"]];for(const n of i)if(n&&(n(e,a),a.ignore))return p(t);return r&&r.endSameAsBegin&&(r.endRe=RegExp(t.replace(/[-/\\^$*+?.()|[\]{}]/g,"\\$&"),"m")),r.skip?L+=t:(r.excludeBegin&&(L+=t),u(),r.returnBegin||r.excludeBegin||(L=t)),h(r),r.returnBegin?0:t.length}(r);if("illegal"===r.type&&!a){const e=Error('Illegal lexeme "'+i+'" for mode "'+(R.className||"")+'"');throw e.mode=R,e}if("end"===r.type){var s=function(e){var t=e[0],r=o.substr(e.index),a=function e(t,r,a){let i=function(e,n){var t=e&&e.exec(n);return t&&0===t.index}(t.endRe,a);if(i){if(t["on:end"]){const e=new n(t);t["on:end"](r,e),e.ignore&&(i=!1)}if(i){for(;t.endsParent&&t.parent;)t=t.parent;return t}}if(t.endsWithParent)return e(t.parent,r,a)}(R,e,r);if(!a)return T;var i=R;i.skip?L+=t:(i.returnEnd||i.excludeEnd||(L+=t),u(),i.excludeEnd&&(L=t));do{R.className&&M.closeNode(),R.skip||R.subLanguage||(j+=R.relevance),R=R.parent}while(R!==a.parent);return a.starts&&(a.endSameAsBegin&&(a.starts.endRe=a.endRe),h(a.starts)),i.returnEnd?0:t.length}(r);if(s!==T)return s}if("illegal"===r.type&&""===i)return 1;if(S>1e5&&S>3*r.index)throw Error("potential infinite loop, way more iterations than matches");return L+=i,i.length}var E=y(e);if(!E)throw console.error(d.replace("{}",e)),Error('Unknown language: "'+e+'"');var _=function(e){function n(n,t){return RegExp(g(n),"m"+(e.case_insensitive?"i":"")+(t?"g":""))}class t{constructor(){this.matchIndexes={},this.regexes=[],this.matchAt=1,this.position=0}addRule(e,n){n.position=this.position++,this.matchIndexes[this.matchAt]=n,this.regexes.push([n,e]),this.matchAt+=function(e){return RegExp(e.toString()+"|").exec("").length-1}(e)+1}compile(){0===this.regexes.length&&(this.exec=()=>null);const e=this.regexes.map(e=>e[1]);this.matcherRe=n(function(e,n="|"){for(var t=/\[(?:[^\\\]]|\\.)*\]|\(\??|\\([1-9][0-9]*)|\\./,r=0,a="",i=0;i0&&(a+=n),a+="(";o.length>0;){var l=t.exec(o);if(null==l){a+=o;break}a+=o.substring(0,l.index),o=o.substring(l.index+l[0].length),"\\"===l[0][0]&&l[1]?a+="\\"+(+l[1]+s):(a+=l[0],"("===l[0]&&r++)}a+=")"}return a}(e),!0),this.lastIndex=0}exec(e){this.matcherRe.lastIndex=this.lastIndex;const n=this.matcherRe.exec(e);if(!n)return null;const t=n.findIndex((e,n)=>n>0&&void 0!==e),r=this.matchIndexes[t];return n.splice(0,t),Object.assign(n,r)}}class a{constructor(){this.rules=[],this.multiRegexes=[],this.count=0,this.lastIndex=0,this.regexIndex=0}getMatcher(e){if(this.multiRegexes[e])return this.multiRegexes[e];const n=new t;return this.rules.slice(e).forEach(([e,t])=>n.addRule(e,t)),n.compile(),this.multiRegexes[e]=n,n}resumingScanAtSamePosition(){return 0!=this.regexIndex}considerAll(){this.regexIndex=0}addRule(e,n){this.rules.push([e,n]),"begin"===n.type&&this.count++}exec(e){const n=this.getMatcher(this.regexIndex);n.lastIndex=this.lastIndex;const t=n.exec(e);return t&&(this.regexIndex+=t.position+1,this.regexIndex===this.count&&(this.regexIndex=0)),t}}function i(e,n){const t=e.input[e.index-1],r=e.input[e.index+e[0].length];"."!==t&&"."!==r||n.ignoreMatch()}if(e.contains&&e.contains.includes("self"))throw Error("ERR: contains `self` is not supported at the top-level of a language. See documentation.");return function t(s,o){const l=s;if(s.compiled)return l;s.compiled=!0,s.__beforeBegin=null,s.keywords=s.keywords||s.beginKeywords;let c=null;if("object"==typeof s.keywords&&(c=s.keywords.$pattern,delete s.keywords.$pattern),s.keywords&&(s.keywords=function(e,n){var t={};return"string"==typeof e?r("keyword",e):Object.keys(e).forEach((function(n){r(n,e[n])})),t;function r(e,r){n&&(r=r.toLowerCase()),r.split(" ").forEach((function(n){var r=n.split("|");t[r[0]]=[e,N(r[0],r[1])]}))}}(s.keywords,e.case_insensitive)),s.lexemes&&c)throw Error("ERR: Prefer `keywords.$pattern` to `mode.lexemes`, BOTH are not allowed. (see mode reference) ");return l.keywordPatternRe=n(s.lexemes||c||/\w+/,!0),o&&(s.beginKeywords&&(s.begin="\\b("+s.beginKeywords.split(" ").join("|")+")(?=\\b|\\s)",s.__beforeBegin=i),s.begin||(s.begin=/\B|\b/),l.beginRe=n(s.begin),s.endSameAsBegin&&(s.end=s.begin),s.end||s.endsWithParent||(s.end=/\B|\b/),s.end&&(l.endRe=n(s.end)),l.terminator_end=g(s.end)||"",s.endsWithParent&&o.terminator_end&&(l.terminator_end+=(s.end?"|":"")+o.terminator_end)),s.illegal&&(l.illegalRe=n(s.illegal)),void 0===s.relevance&&(s.relevance=1),s.contains||(s.contains=[]),s.contains=[].concat(...s.contains.map((function(e){return function(e){return e.variants&&!e.cached_variants&&(e.cached_variants=e.variants.map((function(n){return r(e,{variants:null},n)}))),e.cached_variants?e.cached_variants:function e(n){return!!n&&(n.endsWithParent||e(n.starts))}(e)?r(e,{starts:e.starts?r(e.starts):null}):Object.isFrozen(e)?r(e):e}("self"===e?s:e)}))),s.contains.forEach((function(e){t(e,l)})),s.starts&&t(s.starts,o),l.matcher=function(e){const n=new a;return e.contains.forEach(e=>n.addRule(e.begin,{rule:e,type:"begin"})),e.terminator_end&&n.addRule(e.terminator_end,{type:"end"}),e.illegal&&n.addRule(e.illegal,{type:"illegal"}),n}(l),l}(e)}(E),w="",R=s||_,O={},M=new f.__emitter(f);!function(){for(var e=[],n=R;n!==E;n=n.parent)n.className&&e.unshift(n.className);e.forEach(e=>M.openNode(e))}();var L="",j=0,A=0,S=0,I=!1;try{for(R.matcher.considerAll();;){S++,I?I=!1:(R.matcher.lastIndex=A,R.matcher.considerAll());const e=R.matcher.exec(o);if(!e&&R.matcher.resumingScanAtSamePosition()){L+=o[A],A+=1;continue}if(!e)break;const n=x(o.substring(A,e.index),e);A=e.index+n}return x(o.substr(A)),M.closeAllNodes(),M.finalize(),w=M.toHTML(),{relevance:j,value:w,language:e,illegal:!1,emitter:M,top:R}}catch(n){if(n.message&&n.message.includes("Illegal"))return{illegal:!0,illegalBy:{msg:n.message,context:o.slice(A-100,A+100),mode:n.mode},sofar:w,relevance:0,value:k(o),emitter:M};if(l)return{illegal:!1,relevance:0,value:k(o),emitter:M,language:e,top:R,errorRaised:n};throw n}}function v(e,n){n=n||f.languages||Object.keys(i);var t=function(e){const n={relevance:0,emitter:new f.__emitter(f),value:k(e),illegal:!1,top:h};return n.emitter.addText(e),n}(e),r=t;return n.filter(y).filter(A).forEach((function(n){var a=b(n,e,!1);a.language=n,a.relevance>r.relevance&&(r=a),a.relevance>t.relevance&&(r=t,t=a)})),r.language&&(t.second_best=r),t}function x(e){return f.tabReplace||f.useBR?e.replace(c,e=>"\n"===e?f.useBR?"
":e:f.tabReplace?e.replace(/\t/g,f.tabReplace):e):e}function E(e){let n=null;const t=function(e){var n=e.className+" ";n+=e.parentNode?e.parentNode.className:"";const t=f.languageDetectRe.exec(n);if(t){var r=y(t[1]);return r||(console.warn(d.replace("{}",t[1])),console.warn("Falling back to no-highlight mode for this block.",e)),r?t[1]:"no-highlight"}return n.split(/\s+/).find(e=>p(e)||y(e))}(e);if(p(t))return;S("before:highlightBlock",{block:e,language:t}),f.useBR?(n=document.createElement("div")).innerHTML=e.innerHTML.replace(/\n/g,"").replace(//g,"\n"):n=e;const r=n.textContent,a=t?m(t,r,!0):v(r),i=M(n);if(i.length){const e=document.createElement("div");e.innerHTML=a.value,a.value=L(i,M(e),r)}a.value=x(a.value),S("after:highlightBlock",{block:e,result:a}),e.innerHTML=a.value,e.className=function(e,n,t){var r=n?s[n]:t,a=[e.trim()];return e.match(/\bhljs\b/)||a.push("hljs"),e.includes(r)||a.push(r),a.join(" ").trim()}(e.className,t,a.language),e.result={language:a.language,re:a.relevance,relavance:a.relevance},a.second_best&&(e.second_best={language:a.second_best.language,re:a.second_best.relevance,relavance:a.second_best.relevance})}const w=()=>{if(!w.called){w.called=!0;var e=document.querySelectorAll("pre code");a.forEach.call(e,E)}};function y(e){return e=(e||"").toLowerCase(),i[e]||i[s[e]]}function j(e,{languageName:n}){"string"==typeof e&&(e=[e]),e.forEach(e=>{s[e]=n})}function A(e){var n=y(e);return n&&!n.disableAutodetect}function S(e,n){var t=e;o.forEach((function(e){e[t]&&e[t](n)}))}Object.assign(t,{highlight:m,highlightAuto:v,fixMarkup:function(e){return console.warn("fixMarkup is deprecated and will be removed entirely in v11.0"),console.warn("Please see https://github.com/highlightjs/highlight.js/issues/2534"),x(e)},highlightBlock:E,configure:function(e){f=O(f,e)},initHighlighting:w,initHighlightingOnLoad:function(){window.addEventListener("DOMContentLoaded",w,!1)},registerLanguage:function(e,n){var r=null;try{r=n(t)}catch(n){if(console.error("Language definition for '{}' could not be registered.".replace("{}",e)),!l)throw n;console.error(n),r=h}r.name||(r.name=e),i[e]=r,r.rawDefinition=n.bind(null,t),r.aliases&&j(r.aliases,{languageName:e})},listLanguages:function(){return Object.keys(i)},getLanguage:y,registerAliases:j,requireLanguage:function(e){var n=y(e);if(n)return n;throw Error("The '{}' language is required, but not loaded.".replace("{}",e))},autoDetection:A,inherit:O,addPlugin:function(e){o.push(e)},vuePlugin:R}),t.debugMode=function(){l=!1},t.safeMode=function(){l=!0},t.versionString="10.1.1";for(const n in _)"object"==typeof _[n]&&e(_[n]);return Object.assign(t,_),t}({})}();"object"==typeof exports&&"undefined"!=typeof module&&(module.exports=hljs); +hljs.registerLanguage("apache",function(){"use strict";return function(e){var n={className:"number",begin:"\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}(:\\d{1,5})?"};return{name:"Apache config",aliases:["apacheconf"],case_insensitive:!0,contains:[e.HASH_COMMENT_MODE,{className:"section",begin:"",contains:[n,{className:"number",begin:":\\d{1,5}"},e.inherit(e.QUOTE_STRING_MODE,{relevance:0})]},{className:"attribute",begin:/\w+/,relevance:0,keywords:{nomarkup:"order deny allow setenv rewriterule rewriteengine rewritecond documentroot sethandler errordocument loadmodule options header listen serverroot servername"},starts:{end:/$/,relevance:0,keywords:{literal:"on off all deny allow"},contains:[{className:"meta",begin:"\\s\\[",end:"\\]$"},{className:"variable",begin:"[\\$%]\\{",end:"\\}",contains:["self",{className:"number",begin:"[\\$%]\\d+"}]},n,{className:"number",begin:"\\d+"},e.QUOTE_STRING_MODE]}}],illegal:/\S/}}}()); +hljs.registerLanguage("bash",function(){"use strict";return function(e){const s={};Object.assign(s,{className:"variable",variants:[{begin:/\$[\w\d#@][\w\d_]*/},{begin:/\$\{/,end:/\}/,contains:[{begin:/:-/,contains:[s]}]}]});const t={className:"subst",begin:/\$\(/,end:/\)/,contains:[e.BACKSLASH_ESCAPE]},n={className:"string",begin:/"/,end:/"/,contains:[e.BACKSLASH_ESCAPE,s,t]};t.contains.push(n);const a={begin:/\$\(\(/,end:/\)\)/,contains:[{begin:/\d+#[0-9a-f]+/,className:"number"},e.NUMBER_MODE,s]},i=e.SHEBANG({binary:"(fish|bash|zsh|sh|csh|ksh|tcsh|dash|scsh)",relevance:10}),c={className:"function",begin:/\w[\w\d_]*\s*\(\s*\)\s*\{/,returnBegin:!0,contains:[e.inherit(e.TITLE_MODE,{begin:/\w[\w\d_]*/})],relevance:0};return{name:"Bash",aliases:["sh","zsh"],keywords:{$pattern:/\b-?[a-z\._-]+\b/,keyword:"if then else elif fi for while in do done case esac function",literal:"true false",built_in:"break cd continue eval exec exit export getopts hash pwd readonly return shift test times trap umask unset alias bind builtin caller command declare echo enable help let local logout mapfile printf read readarray source type typeset ulimit unalias set shopt autoload bg bindkey bye cap chdir clone comparguments compcall compctl compdescribe compfiles compgroups compquote comptags comptry compvalues dirs disable disown echotc echoti emulate fc fg float functions getcap getln history integer jobs kill limit log noglob popd print pushd pushln rehash sched setcap setopt stat suspend ttyctl unfunction unhash unlimit unsetopt vared wait whence where which zcompile zformat zftp zle zmodload zparseopts zprof zpty zregexparse zsocket zstyle ztcp",_:"-ne -eq -lt -gt -f -d -e -s -l -a"},contains:[i,e.SHEBANG(),c,a,e.HASH_COMMENT_MODE,n,{className:"",begin:/\\"/},{className:"string",begin:/'/,end:/'/},s]}}}()); +hljs.registerLanguage("c-like",function(){"use strict";return function(e){function t(e){return"(?:"+e+")?"}var n="(decltype\\(auto\\)|"+t("[a-zA-Z_]\\w*::")+"[a-zA-Z_]\\w*"+t("<.*?>")+")",r={className:"keyword",begin:"\\b[a-z\\d_]*_t\\b"},a={className:"string",variants:[{begin:'(u8?|U|L)?"',end:'"',illegal:"\\n",contains:[e.BACKSLASH_ESCAPE]},{begin:"(u8?|U|L)?'(\\\\(x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4,8}|[0-7]{3}|\\S)|.)",end:"'",illegal:"."},e.END_SAME_AS_BEGIN({begin:/(?:u8?|U|L)?R"([^()\\ ]{0,16})\(/,end:/\)([^()\\ ]{0,16})"/})]},i={className:"number",variants:[{begin:"\\b(0b[01']+)"},{begin:"(-?)\\b([\\d']+(\\.[\\d']*)?|\\.[\\d']+)(u|U|l|L|ul|UL|f|F|b|B)"},{begin:"(-?)(\\b0[xX][a-fA-F0-9']+|(\\b[\\d']+(\\.[\\d']*)?|\\.[\\d']+)([eE][-+]?[\\d']+)?)"}],relevance:0},s={className:"meta",begin:/#\s*[a-z]+\b/,end:/$/,keywords:{"meta-keyword":"if else elif endif define undef warning error line pragma _Pragma ifdef ifndef include"},contains:[{begin:/\\\n/,relevance:0},e.inherit(a,{className:"meta-string"}),{className:"meta-string",begin:/<.*?>/,end:/$/,illegal:"\\n"},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},o={className:"title",begin:t("[a-zA-Z_]\\w*::")+e.IDENT_RE,relevance:0},c=t("[a-zA-Z_]\\w*::")+e.IDENT_RE+"\\s*\\(",l={keyword:"int float while private char char8_t char16_t char32_t catch import module export virtual operator sizeof dynamic_cast|10 typedef const_cast|10 const for static_cast|10 union namespace unsigned long volatile static protected bool template mutable if public friend do goto auto void enum else break extern using asm case typeid wchar_t short reinterpret_cast|10 default double register explicit signed typename try this switch continue inline delete alignas alignof constexpr consteval constinit decltype concept co_await co_return co_yield requires noexcept static_assert thread_local restrict final override atomic_bool atomic_char atomic_schar atomic_uchar atomic_short atomic_ushort atomic_int atomic_uint atomic_long atomic_ulong atomic_llong atomic_ullong new throw return and and_eq bitand bitor compl not not_eq or or_eq xor xor_eq",built_in:"std string wstring cin cout cerr clog stdin stdout stderr stringstream istringstream ostringstream auto_ptr deque list queue stack vector map set pair bitset multiset multimap unordered_set unordered_map unordered_multiset unordered_multimap priority_queue make_pair array shared_ptr abort terminate abs acos asin atan2 atan calloc ceil cosh cos exit exp fabs floor fmod fprintf fputs free frexp fscanf future isalnum isalpha iscntrl isdigit isgraph islower isprint ispunct isspace isupper isxdigit tolower toupper labs ldexp log10 log malloc realloc memchr memcmp memcpy memset modf pow printf putchar puts scanf sinh sin snprintf sprintf sqrt sscanf strcat strchr strcmp strcpy strcspn strlen strncat strncmp strncpy strpbrk strrchr strspn strstr tanh tan vfprintf vprintf vsprintf endl initializer_list unique_ptr _Bool complex _Complex imaginary _Imaginary",literal:"true false nullptr NULL"},d=[r,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,i,a],_={variants:[{begin:/=/,end:/;/},{begin:/\(/,end:/\)/},{beginKeywords:"new throw return else",end:/;/}],keywords:l,contains:d.concat([{begin:/\(/,end:/\)/,keywords:l,contains:d.concat(["self"]),relevance:0}]),relevance:0},u={className:"function",begin:"("+n+"[\\*&\\s]+)+"+c,returnBegin:!0,end:/[{;=]/,excludeEnd:!0,keywords:l,illegal:/[^\w\s\*&:<>]/,contains:[{begin:"decltype\\(auto\\)",keywords:l,relevance:0},{begin:c,returnBegin:!0,contains:[o],relevance:0},{className:"params",begin:/\(/,end:/\)/,keywords:l,relevance:0,contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,a,i,r,{begin:/\(/,end:/\)/,keywords:l,relevance:0,contains:["self",e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,a,i,r]}]},r,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,s]};return{aliases:["c","cc","h","c++","h++","hpp","hh","hxx","cxx"],keywords:l,disableAutodetect:!0,illegal:"",keywords:l,contains:["self",r]},{begin:e.IDENT_RE+"::",keywords:l},{className:"class",beginKeywords:"class struct",end:/[{;:]/,contains:[{begin://,contains:["self"]},e.TITLE_MODE]}]),exports:{preprocessor:s,strings:a,keywords:l}}}}()); +hljs.registerLanguage("c",function(){"use strict";return function(e){var n=e.requireLanguage("c-like").rawDefinition();return n.name="C",n.aliases=["c","h"],n}}()); +hljs.registerLanguage("coffeescript",function(){"use strict";const e=["as","in","of","if","for","while","finally","var","new","function","do","return","void","else","break","catch","instanceof","with","throw","case","default","try","switch","continue","typeof","delete","let","yield","const","class","debugger","async","await","static","import","from","export","extends"],n=["true","false","null","undefined","NaN","Infinity"],a=[].concat(["setInterval","setTimeout","clearInterval","clearTimeout","require","exports","eval","isFinite","isNaN","parseFloat","parseInt","decodeURI","decodeURIComponent","encodeURI","encodeURIComponent","escape","unescape"],["arguments","this","super","console","window","document","localStorage","module","global"],["Intl","DataView","Number","Math","Date","String","RegExp","Object","Function","Boolean","Error","Symbol","Set","Map","WeakSet","WeakMap","Proxy","Reflect","JSON","Promise","Float64Array","Int16Array","Int32Array","Int8Array","Uint16Array","Uint32Array","Float32Array","Array","Uint8Array","Uint8ClampedArray","ArrayBuffer"],["EvalError","InternalError","RangeError","ReferenceError","SyntaxError","TypeError","URIError"]);return function(r){var t={keyword:e.concat(["then","unless","until","loop","by","when","and","or","is","isnt","not"]).filter((e=>n=>!e.includes(n))(["var","const","let","function","static"])).join(" "),literal:n.concat(["yes","no","on","off"]).join(" "),built_in:a.concat(["npm","print"]).join(" ")},i="[A-Za-z$_][0-9A-Za-z$_]*",s={className:"subst",begin:/#\{/,end:/}/,keywords:t},o=[r.BINARY_NUMBER_MODE,r.inherit(r.C_NUMBER_MODE,{starts:{end:"(\\s*/)?",relevance:0}}),{className:"string",variants:[{begin:/'''/,end:/'''/,contains:[r.BACKSLASH_ESCAPE]},{begin:/'/,end:/'/,contains:[r.BACKSLASH_ESCAPE]},{begin:/"""/,end:/"""/,contains:[r.BACKSLASH_ESCAPE,s]},{begin:/"/,end:/"/,contains:[r.BACKSLASH_ESCAPE,s]}]},{className:"regexp",variants:[{begin:"///",end:"///",contains:[s,r.HASH_COMMENT_MODE]},{begin:"//[gim]{0,3}(?=\\W)",relevance:0},{begin:/\/(?![ *]).*?(?![\\]).\/[gim]{0,3}(?=\W)/}]},{begin:"@"+i},{subLanguage:"javascript",excludeBegin:!0,excludeEnd:!0,variants:[{begin:"```",end:"```"},{begin:"`",end:"`"}]}];s.contains=o;var c=r.inherit(r.TITLE_MODE,{begin:i}),l={className:"params",begin:"\\([^\\(]",returnBegin:!0,contains:[{begin:/\(/,end:/\)/,keywords:t,contains:["self"].concat(o)}]};return{name:"CoffeeScript",aliases:["coffee","cson","iced"],keywords:t,illegal:/\/\*/,contains:o.concat([r.COMMENT("###","###"),r.HASH_COMMENT_MODE,{className:"function",begin:"^\\s*"+i+"\\s*=\\s*(\\(.*\\))?\\s*\\B[-=]>",end:"[-=]>",returnBegin:!0,contains:[c,l]},{begin:/[:\(,=]\s*/,relevance:0,contains:[{className:"function",begin:"(\\(.*\\))?\\s*\\B[-=]>",end:"[-=]>",returnBegin:!0,contains:[l]}]},{className:"class",beginKeywords:"class",end:"$",illegal:/[:="\[\]]/,contains:[{beginKeywords:"extends",endsWithParent:!0,illegal:/[:="\[\]]/,contains:[c]},c]},{begin:i+":",end:":",returnBegin:!0,returnEnd:!0,relevance:0}])}}}()); +hljs.registerLanguage("cpp",function(){"use strict";return function(e){var i=e.requireLanguage("c-like").rawDefinition();return i.disableAutodetect=!1,i.name="C++",i.aliases=["cc","c++","h++","hpp","hh","hxx","cxx"],i}}()); +hljs.registerLanguage("csharp",function(){"use strict";return function(e){var n={keyword:"abstract as base bool break byte case catch char checked const continue decimal default delegate do double enum event explicit extern finally fixed float for foreach goto if implicit in init int interface internal is lock long object operator out override params private protected public readonly ref sbyte sealed short sizeof stackalloc static string struct switch this try typeof uint ulong unchecked unsafe ushort using virtual void volatile while add alias ascending async await by descending dynamic equals from get global group into join let nameof on orderby partial remove select set value var when where yield",literal:"null false true"},i=e.inherit(e.TITLE_MODE,{begin:"[a-zA-Z](\\.?\\w)*"}),a={className:"number",variants:[{begin:"\\b(0b[01']+)"},{begin:"(-?)\\b([\\d']+(\\.[\\d']*)?|\\.[\\d']+)(u|U|l|L|ul|UL|f|F|b|B)"},{begin:"(-?)(\\b0[xX][a-fA-F0-9']+|(\\b[\\d']+(\\.[\\d']*)?|\\.[\\d']+)([eE][-+]?[\\d']+)?)"}],relevance:0},s={className:"string",begin:'@"',end:'"',contains:[{begin:'""'}]},t=e.inherit(s,{illegal:/\n/}),l={className:"subst",begin:"{",end:"}",keywords:n},r=e.inherit(l,{illegal:/\n/}),c={className:"string",begin:/\$"/,end:'"',illegal:/\n/,contains:[{begin:"{{"},{begin:"}}"},e.BACKSLASH_ESCAPE,r]},o={className:"string",begin:/\$@"/,end:'"',contains:[{begin:"{{"},{begin:"}}"},{begin:'""'},l]},g=e.inherit(o,{illegal:/\n/,contains:[{begin:"{{"},{begin:"}}"},{begin:'""'},r]});l.contains=[o,c,s,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,a,e.C_BLOCK_COMMENT_MODE],r.contains=[g,c,t,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,a,e.inherit(e.C_BLOCK_COMMENT_MODE,{illegal:/\n/})];var d={variants:[o,c,s,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE]},E={begin:"<",end:">",contains:[{beginKeywords:"in out"},i]},_=e.IDENT_RE+"(<"+e.IDENT_RE+"(\\s*,\\s*"+e.IDENT_RE+")*>)?(\\[\\])?",b={begin:"@"+e.IDENT_RE,relevance:0};return{name:"C#",aliases:["cs","c#"],keywords:n,illegal:/::/,contains:[e.COMMENT("///","$",{returnBegin:!0,contains:[{className:"doctag",variants:[{begin:"///",relevance:0},{begin:"\x3c!--|--\x3e"},{begin:""}]}]}),e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,{className:"meta",begin:"#",end:"$",keywords:{"meta-keyword":"if else elif endif define undef warning error line region endregion pragma checksum"}},d,a,{beginKeywords:"class interface",end:/[{;=]/,illegal:/[^\s:,]/,contains:[{beginKeywords:"where class"},i,E,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{beginKeywords:"namespace",end:/[{;=]/,illegal:/[^\s:]/,contains:[i,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{beginKeywords:"record",end:/[{;=]/,illegal:/[^\s:]/,contains:[i,E,e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{className:"meta",begin:"^\\s*\\[",excludeBegin:!0,end:"\\]",excludeEnd:!0,contains:[{className:"meta-string",begin:/"/,end:/"/}]},{beginKeywords:"new return throw await else",relevance:0},{className:"function",begin:"("+_+"\\s+)+"+e.IDENT_RE+"\\s*(\\<.+\\>)?\\s*\\(",returnBegin:!0,end:/\s*[{;=]/,excludeEnd:!0,keywords:n,contains:[{begin:e.IDENT_RE+"\\s*(\\<.+\\>)?\\s*\\(",returnBegin:!0,contains:[e.TITLE_MODE,E],relevance:0},{className:"params",begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,keywords:n,relevance:0,contains:[d,a,e.C_BLOCK_COMMENT_MODE]},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},b]}}}()); +hljs.registerLanguage("css",function(){"use strict";return function(e){var n={begin:/(?:[A-Z\_\.\-]+|--[a-zA-Z0-9_-]+)\s*:/,returnBegin:!0,end:";",endsWithParent:!0,contains:[{className:"attribute",begin:/\S/,end:":",excludeEnd:!0,starts:{endsWithParent:!0,excludeEnd:!0,contains:[{begin:/[\w-]+\(/,returnBegin:!0,contains:[{className:"built_in",begin:/[\w-]+/},{begin:/\(/,end:/\)/,contains:[e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,e.CSS_NUMBER_MODE]}]},e.CSS_NUMBER_MODE,e.QUOTE_STRING_MODE,e.APOS_STRING_MODE,e.C_BLOCK_COMMENT_MODE,{className:"number",begin:"#[0-9A-Fa-f]+"},{className:"meta",begin:"!important"}]}}]};return{name:"CSS",case_insensitive:!0,illegal:/[=\/|'\$]/,contains:[e.C_BLOCK_COMMENT_MODE,{className:"selector-id",begin:/#[A-Za-z0-9_-]+/},{className:"selector-class",begin:/\.[A-Za-z0-9_-]+/},{className:"selector-attr",begin:/\[/,end:/\]/,illegal:"$",contains:[e.APOS_STRING_MODE,e.QUOTE_STRING_MODE]},{className:"selector-pseudo",begin:/:(:)?[a-zA-Z0-9\_\-\+\(\)"'.]+/},{begin:"@(page|font-face)",lexemes:"@[a-z-]+",keywords:"@page @font-face"},{begin:"@",end:"[{;]",illegal:/:/,returnBegin:!0,contains:[{className:"keyword",begin:/@\-?\w[\w]*(\-\w+)*/},{begin:/\s/,endsWithParent:!0,excludeEnd:!0,relevance:0,keywords:"and or not only",contains:[{begin:/[a-z-]+:/,className:"attribute"},e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,e.CSS_NUMBER_MODE]}]},{className:"selector-tag",begin:"[a-zA-Z-][a-zA-Z0-9_-]*",relevance:0},{begin:"{",end:"}",illegal:/\S/,contains:[e.C_BLOCK_COMMENT_MODE,n]}]}}}()); +hljs.registerLanguage("diff",function(){"use strict";return function(e){return{name:"Diff",aliases:["patch"],contains:[{className:"meta",relevance:10,variants:[{begin:/^@@ +\-\d+,\d+ +\+\d+,\d+ +@@$/},{begin:/^\*\*\* +\d+,\d+ +\*\*\*\*$/},{begin:/^\-\-\- +\d+,\d+ +\-\-\-\-$/}]},{className:"comment",variants:[{begin:/Index: /,end:/$/},{begin:/={3,}/,end:/$/},{begin:/^\-{3}/,end:/$/},{begin:/^\*{3} /,end:/$/},{begin:/^\+{3}/,end:/$/},{begin:/^\*{15}$/}]},{className:"addition",begin:"^\\+",end:"$"},{className:"deletion",begin:"^\\-",end:"$"},{className:"addition",begin:"^\\!",end:"$"}]}}}()); +hljs.registerLanguage("go",function(){"use strict";return function(e){var n={keyword:"break default func interface select case map struct chan else goto package switch const fallthrough if range type continue for import return var go defer bool byte complex64 complex128 float32 float64 int8 int16 int32 int64 string uint8 uint16 uint32 uint64 int uint uintptr rune",literal:"true false iota nil",built_in:"append cap close complex copy imag len make new panic print println real recover delete"};return{name:"Go",aliases:["golang"],keywords:n,illegal:"e(n)).join("")}return function(a){var s={className:"number",relevance:0,variants:[{begin:/([\+\-]+)?[\d]+_[\d_]+/},{begin:a.NUMBER_RE}]},i=a.COMMENT();i.variants=[{begin:/;/,end:/$/},{begin:/#/,end:/$/}];var t={className:"variable",variants:[{begin:/\$[\w\d"][\w\d_]*/},{begin:/\$\{(.*?)}/}]},r={className:"literal",begin:/\bon|off|true|false|yes|no\b/},l={className:"string",contains:[a.BACKSLASH_ESCAPE],variants:[{begin:"'''",end:"'''",relevance:10},{begin:'"""',end:'"""',relevance:10},{begin:'"',end:'"'},{begin:"'",end:"'"}]},c={begin:/\[/,end:/\]/,contains:[i,r,t,l,s,"self"],relevance:0},g="("+[/[A-Za-z0-9_-]+/,/"(\\"|[^"])*"/,/'[^']*'/].map(n=>e(n)).join("|")+")";return{name:"TOML, also INI",aliases:["toml"],case_insensitive:!0,illegal:/\S/,contains:[i,{className:"section",begin:/\[+/,end:/\]+/},{begin:n(g,"(\\s*\\.\\s*",g,")*",n("(?=",/\s*=\s*[^#\s]/,")")),className:"attr",starts:{end:/$/,contains:[i,c,r,t,l,s]}}]}}}()); +hljs.registerLanguage("java",function(){"use strict";function e(e){return e?"string"==typeof e?e:e.source:null}function n(e){return a("(",e,")?")}function a(...n){return n.map(n=>e(n)).join("")}function s(...n){return"("+n.map(n=>e(n)).join("|")+")"}return function(e){var t="false synchronized int abstract float private char boolean var static null if const for true while long strictfp finally protected import native final void enum else break transient catch instanceof byte super volatile case assert short package default double public try this switch continue throws protected public private module requires exports do",i={className:"meta",begin:"@[À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*",contains:[{begin:/\(/,end:/\)/,contains:["self"]}]},r=e=>a("[",e,"]+([",e,"_]*[",e,"]+)?"),c={className:"number",variants:[{begin:`\\b(0[bB]${r("01")})[lL]?`},{begin:`\\b(0${r("0-7")})[dDfFlL]?`},{begin:a(/\b0[xX]/,s(a(r("a-fA-F0-9"),/\./,r("a-fA-F0-9")),a(r("a-fA-F0-9"),/\.?/),a(/\./,r("a-fA-F0-9"))),/([pP][+-]?(\d+))?/,/[fFdDlL]?/)},{begin:a(/\b/,s(a(/\d*\./,r("\\d")),r("\\d")),/[eE][+-]?[\d]+[dDfF]?/)},{begin:a(/\b/,r(/\d/),n(/\.?/),n(r(/\d/)),/[dDfFlL]?/)}],relevance:0};return{name:"Java",aliases:["jsp"],keywords:t,illegal:/<\/|#/,contains:[e.COMMENT("/\\*\\*","\\*/",{relevance:0,contains:[{begin:/\w+@/,relevance:0},{className:"doctag",begin:"@[A-Za-z]+"}]}),e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,{className:"class",beginKeywords:"class interface enum",end:/[{;=]/,excludeEnd:!0,keywords:"class interface enum",illegal:/[:"\[\]]/,contains:[{beginKeywords:"extends implements"},e.UNDERSCORE_TITLE_MODE]},{beginKeywords:"new throw return else",relevance:0},{className:"function",begin:"([À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*(<[À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*(\\s*,\\s*[À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*)*>)?\\s+)+"+e.UNDERSCORE_IDENT_RE+"\\s*\\(",returnBegin:!0,end:/[{;=]/,excludeEnd:!0,keywords:t,contains:[{begin:e.UNDERSCORE_IDENT_RE+"\\s*\\(",returnBegin:!0,relevance:0,contains:[e.UNDERSCORE_TITLE_MODE]},{className:"params",begin:/\(/,end:/\)/,keywords:t,relevance:0,contains:[i,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,e.C_NUMBER_MODE,e.C_BLOCK_COMMENT_MODE]},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},c,i]}}}()); +hljs.registerLanguage("javascript",function(){"use strict";const e=["as","in","of","if","for","while","finally","var","new","function","do","return","void","else","break","catch","instanceof","with","throw","case","default","try","switch","continue","typeof","delete","let","yield","const","class","debugger","async","await","static","import","from","export","extends"],n=["true","false","null","undefined","NaN","Infinity"],a=[].concat(["setInterval","setTimeout","clearInterval","clearTimeout","require","exports","eval","isFinite","isNaN","parseFloat","parseInt","decodeURI","decodeURIComponent","encodeURI","encodeURIComponent","escape","unescape"],["arguments","this","super","console","window","document","localStorage","module","global"],["Intl","DataView","Number","Math","Date","String","RegExp","Object","Function","Boolean","Error","Symbol","Set","Map","WeakSet","WeakMap","Proxy","Reflect","JSON","Promise","Float64Array","Int16Array","Int32Array","Int8Array","Uint16Array","Uint32Array","Float32Array","Array","Uint8Array","Uint8ClampedArray","ArrayBuffer"],["EvalError","InternalError","RangeError","ReferenceError","SyntaxError","TypeError","URIError"]);function s(e){return r("(?=",e,")")}function r(...e){return e.map(e=>(function(e){return e?"string"==typeof e?e:e.source:null})(e)).join("")}return function(t){var i="[A-Za-z$_][0-9A-Za-z$_]*",c={begin:/<[A-Za-z0-9\\._:-]+/,end:/\/[A-Za-z0-9\\._:-]+>|\/>/},o={$pattern:"[A-Za-z$_][0-9A-Za-z$_]*",keyword:e.join(" "),literal:n.join(" "),built_in:a.join(" ")},l={className:"number",variants:[{begin:"\\b(0[bB][01]+)n?"},{begin:"\\b(0[oO][0-7]+)n?"},{begin:t.C_NUMBER_RE+"n?"}],relevance:0},E={className:"subst",begin:"\\$\\{",end:"\\}",keywords:o,contains:[]},d={begin:"html`",end:"",starts:{end:"`",returnEnd:!1,contains:[t.BACKSLASH_ESCAPE,E],subLanguage:"xml"}},g={begin:"css`",end:"",starts:{end:"`",returnEnd:!1,contains:[t.BACKSLASH_ESCAPE,E],subLanguage:"css"}},u={className:"string",begin:"`",end:"`",contains:[t.BACKSLASH_ESCAPE,E]};E.contains=[t.APOS_STRING_MODE,t.QUOTE_STRING_MODE,d,g,u,l,t.REGEXP_MODE];var b=E.contains.concat([{begin:/\(/,end:/\)/,contains:["self"].concat(E.contains,[t.C_BLOCK_COMMENT_MODE,t.C_LINE_COMMENT_MODE])},t.C_BLOCK_COMMENT_MODE,t.C_LINE_COMMENT_MODE]),_={className:"params",begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,contains:b};return{name:"JavaScript",aliases:["js","jsx","mjs","cjs"],keywords:o,contains:[t.SHEBANG({binary:"node",relevance:5}),{className:"meta",relevance:10,begin:/^\s*['"]use (strict|asm)['"]/},t.APOS_STRING_MODE,t.QUOTE_STRING_MODE,d,g,u,t.C_LINE_COMMENT_MODE,t.COMMENT("/\\*\\*","\\*/",{relevance:0,contains:[{className:"doctag",begin:"@[A-Za-z]+",contains:[{className:"type",begin:"\\{",end:"\\}",relevance:0},{className:"variable",begin:i+"(?=\\s*(-)|$)",endsParent:!0,relevance:0},{begin:/(?=[^\n])\s/,relevance:0}]}]}),t.C_BLOCK_COMMENT_MODE,l,{begin:r(/[{,\n]\s*/,s(r(/(((\/\/.*$)|(\/\*(.|\n)*\*\/))\s*)*/,i+"\\s*:"))),relevance:0,contains:[{className:"attr",begin:i+s("\\s*:"),relevance:0}]},{begin:"("+t.RE_STARTERS_RE+"|\\b(case|return|throw)\\b)\\s*",keywords:"return throw case",contains:[t.C_LINE_COMMENT_MODE,t.C_BLOCK_COMMENT_MODE,t.REGEXP_MODE,{className:"function",begin:"(\\([^(]*(\\([^(]*(\\([^(]*\\))?\\))?\\)|"+t.UNDERSCORE_IDENT_RE+")\\s*=>",returnBegin:!0,end:"\\s*=>",contains:[{className:"params",variants:[{begin:t.UNDERSCORE_IDENT_RE},{className:null,begin:/\(\s*\)/,skip:!0},{begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,keywords:o,contains:b}]}]},{begin:/,/,relevance:0},{className:"",begin:/\s/,end:/\s*/,skip:!0},{variants:[{begin:"<>",end:""},{begin:c.begin,end:c.end}],subLanguage:"xml",contains:[{begin:c.begin,end:c.end,skip:!0,contains:["self"]}]}],relevance:0},{className:"function",beginKeywords:"function",end:/\{/,excludeEnd:!0,contains:[t.inherit(t.TITLE_MODE,{begin:i}),_],illegal:/\[|%/},{begin:/\$[(.]/},t.METHOD_GUARD,{className:"class",beginKeywords:"class",end:/[{;=]/,excludeEnd:!0,illegal:/[:"\[\]]/,contains:[{beginKeywords:"extends"},t.UNDERSCORE_TITLE_MODE]},{beginKeywords:"constructor",end:/\{/,excludeEnd:!0},{begin:"(get|set)\\s+(?="+i+"\\()",end:/{/,keywords:"get set",contains:[t.inherit(t.TITLE_MODE,{begin:i}),{begin:/\(\)/},_]}],illegal:/#(?!!)/}}}()); +hljs.registerLanguage("json",function(){"use strict";return function(n){var e={literal:"true false null"},i=[n.C_LINE_COMMENT_MODE,n.C_BLOCK_COMMENT_MODE],t=[n.QUOTE_STRING_MODE,n.C_NUMBER_MODE],a={end:",",endsWithParent:!0,excludeEnd:!0,contains:t,keywords:e},l={begin:"{",end:"}",contains:[{className:"attr",begin:/"/,end:/"/,contains:[n.BACKSLASH_ESCAPE],illegal:"\\n"},n.inherit(a,{begin:/:/})].concat(i),illegal:"\\S"},s={begin:"\\[",end:"\\]",contains:[n.inherit(a)],illegal:"\\S"};return t.push(l,s),i.forEach((function(n){t.push(n)})),{name:"JSON",contains:t,keywords:e,illegal:"\\S"}}}()); +hljs.registerLanguage("kotlin",function(){"use strict";return function(e){var n={keyword:"abstract as val var vararg get set class object open private protected public noinline crossinline dynamic final enum if else do while for when throw try catch finally import package is in fun override companion reified inline lateinit init interface annotation data sealed internal infix operator out by constructor super tailrec where const inner suspend typealias external expect actual",built_in:"Byte Short Char Int Long Boolean Float Double Void Unit Nothing",literal:"true false null"},a={className:"symbol",begin:e.UNDERSCORE_IDENT_RE+"@"},i={className:"subst",begin:"\\${",end:"}",contains:[e.C_NUMBER_MODE]},s={className:"variable",begin:"\\$"+e.UNDERSCORE_IDENT_RE},t={className:"string",variants:[{begin:'"""',end:'"""(?=[^"])',contains:[s,i]},{begin:"'",end:"'",illegal:/\n/,contains:[e.BACKSLASH_ESCAPE]},{begin:'"',end:'"',illegal:/\n/,contains:[e.BACKSLASH_ESCAPE,s,i]}]};i.contains.push(t);var r={className:"meta",begin:"@(?:file|property|field|get|set|receiver|param|setparam|delegate)\\s*:(?:\\s*"+e.UNDERSCORE_IDENT_RE+")?"},l={className:"meta",begin:"@"+e.UNDERSCORE_IDENT_RE,contains:[{begin:/\(/,end:/\)/,contains:[e.inherit(t,{className:"meta-string"})]}]},c=e.COMMENT("/\\*","\\*/",{contains:[e.C_BLOCK_COMMENT_MODE]}),o={variants:[{className:"type",begin:e.UNDERSCORE_IDENT_RE},{begin:/\(/,end:/\)/,contains:[]}]},d=o;return d.variants[1].contains=[o],o.variants[1].contains=[d],{name:"Kotlin",aliases:["kt"],keywords:n,contains:[e.COMMENT("/\\*\\*","\\*/",{relevance:0,contains:[{className:"doctag",begin:"@[A-Za-z]+"}]}),e.C_LINE_COMMENT_MODE,c,{className:"keyword",begin:/\b(break|continue|return|this)\b/,starts:{contains:[{className:"symbol",begin:/@\w+/}]}},a,r,l,{className:"function",beginKeywords:"fun",end:"[(]|$",returnBegin:!0,excludeEnd:!0,keywords:n,illegal:/fun\s+(<.*>)?[^\s\(]+(\s+[^\s\(]+)\s*=/,relevance:5,contains:[{begin:e.UNDERSCORE_IDENT_RE+"\\s*\\(",returnBegin:!0,relevance:0,contains:[e.UNDERSCORE_TITLE_MODE]},{className:"type",begin://,keywords:"reified",relevance:0},{className:"params",begin:/\(/,end:/\)/,endsParent:!0,keywords:n,relevance:0,contains:[{begin:/:/,end:/[=,\/]/,endsWithParent:!0,contains:[o,e.C_LINE_COMMENT_MODE,c],relevance:0},e.C_LINE_COMMENT_MODE,c,r,l,t,e.C_NUMBER_MODE]},c]},{className:"class",beginKeywords:"class interface trait",end:/[:\{(]|$/,excludeEnd:!0,illegal:"extends implements",contains:[{beginKeywords:"public protected internal private constructor"},e.UNDERSCORE_TITLE_MODE,{className:"type",begin://,excludeBegin:!0,excludeEnd:!0,relevance:0},{className:"type",begin:/[,:]\s*/,end:/[<\(,]|$/,excludeBegin:!0,returnEnd:!0},r,l]},t,{className:"meta",begin:"^#!/usr/bin/env",end:"$",illegal:"\n"},{className:"number",begin:"\\b(0[bB]([01]+[01_]+[01]+|[01]+)|0[xX]([a-fA-F0-9]+[a-fA-F0-9_]+[a-fA-F0-9]+|[a-fA-F0-9]+)|(([\\d]+[\\d_]+[\\d]+|[\\d]+)(\\.([\\d]+[\\d_]+[\\d]+|[\\d]+))?|\\.([\\d]+[\\d_]+[\\d]+|[\\d]+))([eE][-+]?\\d+)?)[lLfF]?",relevance:0}]}}}()); +hljs.registerLanguage("less",function(){"use strict";return function(e){var n="([\\w-]+|@{[\\w-]+})",a=[],s=[],t=function(e){return{className:"string",begin:"~?"+e+".*?"+e}},r=function(e,n,a){return{className:e,begin:n,relevance:a}},i={begin:"\\(",end:"\\)",contains:s,relevance:0};s.push(e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,t("'"),t('"'),e.CSS_NUMBER_MODE,{begin:"(url|data-uri)\\(",starts:{className:"string",end:"[\\)\\n]",excludeEnd:!0}},r("number","#[0-9A-Fa-f]+\\b"),i,r("variable","@@?[\\w-]+",10),r("variable","@{[\\w-]+}"),r("built_in","~?`[^`]*?`"),{className:"attribute",begin:"[\\w-]+\\s*:",end:":",returnBegin:!0,excludeEnd:!0},{className:"meta",begin:"!important"});var c=s.concat({begin:"{",end:"}",contains:a}),l={beginKeywords:"when",endsWithParent:!0,contains:[{beginKeywords:"and not"}].concat(s)},o={begin:n+"\\s*:",returnBegin:!0,end:"[;}]",relevance:0,contains:[{className:"attribute",begin:n,end:":",excludeEnd:!0,starts:{endsWithParent:!0,illegal:"[<=$]",relevance:0,contains:s}}]},g={className:"keyword",begin:"@(import|media|charset|font-face|(-[a-z]+-)?keyframes|supports|document|namespace|page|viewport|host)\\b",starts:{end:"[;{}]",returnEnd:!0,contains:s,relevance:0}},d={className:"variable",variants:[{begin:"@[\\w-]+\\s*:",relevance:15},{begin:"@[\\w-]+"}],starts:{end:"[;}]",returnEnd:!0,contains:c}},b={variants:[{begin:"[\\.#:&\\[>]",end:"[;{}]"},{begin:n,end:"{"}],returnBegin:!0,returnEnd:!0,illegal:"[<='$\"]",relevance:0,contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,l,r("keyword","all\\b"),r("variable","@{[\\w-]+}"),r("selector-tag",n+"%?",0),r("selector-id","#"+n),r("selector-class","\\."+n,0),r("selector-tag","&",0),{className:"selector-attr",begin:"\\[",end:"\\]"},{className:"selector-pseudo",begin:/:(:)?[a-zA-Z0-9\_\-\+\(\)"'.]+/},{begin:"\\(",end:"\\)",contains:c},{begin:"!important"}]};return a.push(e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,g,d,o,b),{name:"Less",case_insensitive:!0,illegal:"[=>'/<($\"]",contains:a}}}()); +hljs.registerLanguage("lua",function(){"use strict";return function(e){var t={begin:"\\[=*\\[",end:"\\]=*\\]",contains:["self"]},a=[e.COMMENT("--(?!\\[=*\\[)","$"),e.COMMENT("--\\[=*\\[","\\]=*\\]",{contains:[t],relevance:10})];return{name:"Lua",keywords:{$pattern:e.UNDERSCORE_IDENT_RE,literal:"true false nil",keyword:"and break do else elseif end for goto if in local not or repeat return then until while",built_in:"_G _ENV _VERSION __index __newindex __mode __call __metatable __tostring __len __gc __add __sub __mul __div __mod __pow __concat __unm __eq __lt __le assert collectgarbage dofile error getfenv getmetatable ipairs load loadfile loadstring module next pairs pcall print rawequal rawget rawset require select setfenv setmetatable tonumber tostring type unpack xpcall arg self coroutine resume yield status wrap create running debug getupvalue debug sethook getmetatable gethook setmetatable setlocal traceback setfenv getinfo setupvalue getlocal getregistry getfenv io lines write close flush open output type read stderr stdin input stdout popen tmpfile math log max acos huge ldexp pi cos tanh pow deg tan cosh sinh random randomseed frexp ceil floor rad abs sqrt modf asin min mod fmod log10 atan2 exp sin atan os exit setlocale date getenv difftime remove time clock tmpname rename execute package preload loadlib loaded loaders cpath config path seeall string sub upper len gfind rep find match char dump gmatch reverse byte format gsub lower table setn insert getn foreachi maxn foreach concat sort remove"},contains:a.concat([{className:"function",beginKeywords:"function",end:"\\)",contains:[e.inherit(e.TITLE_MODE,{begin:"([_a-zA-Z]\\w*\\.)*([_a-zA-Z]\\w*:)?[_a-zA-Z]\\w*"}),{className:"params",begin:"\\(",endsWithParent:!0,contains:a}].concat(a)},e.C_NUMBER_MODE,e.APOS_STRING_MODE,e.QUOTE_STRING_MODE,{className:"string",begin:"\\[=*\\[",end:"\\]=*\\]",contains:[t],relevance:5}])}}}()); +hljs.registerLanguage("makefile",function(){"use strict";return function(e){var i={className:"variable",variants:[{begin:"\\$\\("+e.UNDERSCORE_IDENT_RE+"\\)",contains:[e.BACKSLASH_ESCAPE]},{begin:/\$[@%`]+/}]}]}]};return{name:"HTML, XML",aliases:["html","xhtml","rss","atom","xjb","xsd","xsl","plist","wsf","svg"],case_insensitive:!0,contains:[{className:"meta",begin:"",relevance:10,contains:[a,i,t,s,{begin:"\\[",end:"\\]",contains:[{className:"meta",begin:"",contains:[a,s,i,t]}]}]},e.COMMENT("\x3c!--","--\x3e",{relevance:10}),{begin:"<\\!\\[CDATA\\[",end:"\\]\\]>",relevance:10},n,{className:"meta",begin:/<\?xml/,end:/\?>/,relevance:10},{className:"tag",begin:")",end:">",keywords:{name:"style"},contains:[c],starts:{end:"",returnEnd:!0,subLanguage:["css","xml"]}},{className:"tag",begin:")",end:">",keywords:{name:"script"},contains:[c],starts:{end:"<\/script>",returnEnd:!0,subLanguage:["javascript","handlebars","xml"]}},{className:"tag",begin:"",contains:[{className:"name",begin:/[^\/><\s]+/,relevance:0},c]}]}}}()); +hljs.registerLanguage("markdown",function(){"use strict";return function(n){const e={begin:"<",end:">",subLanguage:"xml",relevance:0},a={begin:"\\[.+?\\][\\(\\[].*?[\\)\\]]",returnBegin:!0,contains:[{className:"string",begin:"\\[",end:"\\]",excludeBegin:!0,returnEnd:!0,relevance:0},{className:"link",begin:"\\]\\(",end:"\\)",excludeBegin:!0,excludeEnd:!0},{className:"symbol",begin:"\\]\\[",end:"\\]",excludeBegin:!0,excludeEnd:!0}],relevance:10},i={className:"strong",contains:[],variants:[{begin:/_{2}/,end:/_{2}/},{begin:/\*{2}/,end:/\*{2}/}]},s={className:"emphasis",contains:[],variants:[{begin:/\*(?!\*)/,end:/\*/},{begin:/_(?!_)/,end:/_/,relevance:0}]};i.contains.push(s),s.contains.push(i);var c=[e,a];return i.contains=i.contains.concat(c),s.contains=s.contains.concat(c),{name:"Markdown",aliases:["md","mkdown","mkd"],contains:[{className:"section",variants:[{begin:"^#{1,6}",end:"$",contains:c=c.concat(i,s)},{begin:"(?=^.+?\\n[=-]{2,}$)",contains:[{begin:"^[=-]*$"},{begin:"^",end:"\\n",contains:c}]}]},e,{className:"bullet",begin:"^[ \t]*([*+-]|(\\d+\\.))(?=\\s+)",end:"\\s+",excludeEnd:!0},i,s,{className:"quote",begin:"^>\\s+",contains:c,end:"$"},{className:"code",variants:[{begin:"(`{3,})(.|\\n)*?\\1`*[ ]*"},{begin:"(~{3,})(.|\\n)*?\\1~*[ ]*"},{begin:"```",end:"```+[ ]*$"},{begin:"~~~",end:"~~~+[ ]*$"},{begin:"`.+?`"},{begin:"(?=^( {4}|\\t))",contains:[{begin:"^( {4}|\\t)",end:"(\\n)$"}],relevance:0}]},{begin:"^[-\\*]{3,}",end:"$"},a,{begin:/^\[[^\n]+\]:/,returnBegin:!0,contains:[{className:"symbol",begin:/\[/,end:/\]/,excludeBegin:!0,excludeEnd:!0},{className:"link",begin:/:\s*/,end:/$/,excludeBegin:!0}]}]}}}()); +hljs.registerLanguage("nginx",function(){"use strict";return function(e){var n={className:"variable",variants:[{begin:/\$\d+/},{begin:/\$\{/,end:/}/},{begin:"[\\$\\@]"+e.UNDERSCORE_IDENT_RE}]},a={endsWithParent:!0,keywords:{$pattern:"[a-z/_]+",literal:"on off yes no true false none blocked debug info notice warn error crit select break last permanent redirect kqueue rtsig epoll poll /dev/poll"},relevance:0,illegal:"=>",contains:[e.HASH_COMMENT_MODE,{className:"string",contains:[e.BACKSLASH_ESCAPE,n],variants:[{begin:/"/,end:/"/},{begin:/'/,end:/'/}]},{begin:"([a-z]+):/",end:"\\s",endsWithParent:!0,excludeEnd:!0,contains:[n]},{className:"regexp",contains:[e.BACKSLASH_ESCAPE,n],variants:[{begin:"\\s\\^",end:"\\s|{|;",returnEnd:!0},{begin:"~\\*?\\s+",end:"\\s|{|;",returnEnd:!0},{begin:"\\*(\\.[a-z\\-]+)+"},{begin:"([a-z\\-]+\\.)+\\*"}]},{className:"number",begin:"\\b\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}(:\\d{1,5})?\\b"},{className:"number",begin:"\\b\\d+[kKmMgGdshdwy]*\\b",relevance:0},n]};return{name:"Nginx config",aliases:["nginxconf"],contains:[e.HASH_COMMENT_MODE,{begin:e.UNDERSCORE_IDENT_RE+"\\s+{",returnBegin:!0,end:"{",contains:[{className:"section",begin:e.UNDERSCORE_IDENT_RE}],relevance:0},{begin:e.UNDERSCORE_IDENT_RE+"\\s",end:";|{",returnBegin:!0,contains:[{className:"attribute",begin:e.UNDERSCORE_IDENT_RE,starts:a}],relevance:0}],illegal:"[^\\s\\}]"}}}()); +hljs.registerLanguage("objectivec",function(){"use strict";return function(e){var n=/[a-zA-Z@][a-zA-Z0-9_]*/,_={$pattern:n,keyword:"@interface @class @protocol @implementation"};return{name:"Objective-C",aliases:["mm","objc","obj-c"],keywords:{$pattern:n,keyword:"int float while char export sizeof typedef const struct for union unsigned long volatile static bool mutable if do return goto void enum else break extern asm case short default double register explicit signed typename this switch continue wchar_t inline readonly assign readwrite self @synchronized id typeof nonatomic super unichar IBOutlet IBAction strong weak copy in out inout bycopy byref oneway __strong __weak __block __autoreleasing @private @protected @public @try @property @end @throw @catch @finally @autoreleasepool @synthesize @dynamic @selector @optional @required @encode @package @import @defs @compatibility_alias __bridge __bridge_transfer __bridge_retained __bridge_retain __covariant __contravariant __kindof _Nonnull _Nullable _Null_unspecified __FUNCTION__ __PRETTY_FUNCTION__ __attribute__ getter setter retain unsafe_unretained nonnull nullable null_unspecified null_resettable class instancetype NS_DESIGNATED_INITIALIZER NS_UNAVAILABLE NS_REQUIRES_SUPER NS_RETURNS_INNER_POINTER NS_INLINE NS_AVAILABLE NS_DEPRECATED NS_ENUM NS_OPTIONS NS_SWIFT_UNAVAILABLE NS_ASSUME_NONNULL_BEGIN NS_ASSUME_NONNULL_END NS_REFINED_FOR_SWIFT NS_SWIFT_NAME NS_SWIFT_NOTHROW NS_DURING NS_HANDLER NS_ENDHANDLER NS_VALUERETURN NS_VOIDRETURN",literal:"false true FALSE TRUE nil YES NO NULL",built_in:"BOOL dispatch_once_t dispatch_queue_t dispatch_sync dispatch_async dispatch_once"},illegal:"/,end:/$/,illegal:"\\n"},e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE]},{className:"class",begin:"("+_.keyword.split(" ").join("|")+")\\b",end:"({|$)",excludeEnd:!0,keywords:_,contains:[e.UNDERSCORE_TITLE_MODE]},{begin:"\\."+e.UNDERSCORE_IDENT_RE,relevance:0}]}}}()); +hljs.registerLanguage("perl",function(){"use strict";return function(e){var n={$pattern:/[\w.]+/,keyword:"getpwent getservent quotemeta msgrcv scalar kill dbmclose undef lc ma syswrite tr send umask sysopen shmwrite vec qx utime local oct semctl localtime readpipe do return format read sprintf dbmopen pop getpgrp not getpwnam rewinddir qq fileno qw endprotoent wait sethostent bless s|0 opendir continue each sleep endgrent shutdown dump chomp connect getsockname die socketpair close flock exists index shmget sub for endpwent redo lstat msgctl setpgrp abs exit select print ref gethostbyaddr unshift fcntl syscall goto getnetbyaddr join gmtime symlink semget splice x|0 getpeername recv log setsockopt cos last reverse gethostbyname getgrnam study formline endhostent times chop length gethostent getnetent pack getprotoent getservbyname rand mkdir pos chmod y|0 substr endnetent printf next open msgsnd readdir use unlink getsockopt getpriority rindex wantarray hex system getservbyport endservent int chr untie rmdir prototype tell listen fork shmread ucfirst setprotoent else sysseek link getgrgid shmctl waitpid unpack getnetbyname reset chdir grep split require caller lcfirst until warn while values shift telldir getpwuid my getprotobynumber delete and sort uc defined srand accept package seekdir getprotobyname semop our rename seek if q|0 chroot sysread setpwent no crypt getc chown sqrt write setnetent setpriority foreach tie sin msgget map stat getlogin unless elsif truncate exec keys glob tied closedir ioctl socket readlink eval xor readline binmode setservent eof ord bind alarm pipe atan2 getgrent exp time push setgrent gt lt or ne m|0 break given say state when"},t={className:"subst",begin:"[$@]\\{",end:"\\}",keywords:n},s={begin:"->{",end:"}"},r={variants:[{begin:/\$\d/},{begin:/[\$%@](\^\w\b|#\w+(::\w+)*|{\w+}|\w+(::\w*)*)/},{begin:/[\$%@][^\s\w{]/,relevance:0}]},i=[e.BACKSLASH_ESCAPE,t,r],a=[r,e.HASH_COMMENT_MODE,e.COMMENT("^\\=\\w","\\=cut",{endsWithParent:!0}),s,{className:"string",contains:i,variants:[{begin:"q[qwxr]?\\s*\\(",end:"\\)",relevance:5},{begin:"q[qwxr]?\\s*\\[",end:"\\]",relevance:5},{begin:"q[qwxr]?\\s*\\{",end:"\\}",relevance:5},{begin:"q[qwxr]?\\s*\\|",end:"\\|",relevance:5},{begin:"q[qwxr]?\\s*\\<",end:"\\>",relevance:5},{begin:"qw\\s+q",end:"q",relevance:5},{begin:"'",end:"'",contains:[e.BACKSLASH_ESCAPE]},{begin:'"',end:'"'},{begin:"`",end:"`",contains:[e.BACKSLASH_ESCAPE]},{begin:"{\\w+}",contains:[],relevance:0},{begin:"-?\\w+\\s*\\=\\>",contains:[],relevance:0}]},{className:"number",begin:"(\\b0[0-7_]+)|(\\b0x[0-9a-fA-F_]+)|(\\b[1-9][0-9_]*(\\.[0-9_]+)?)|[0_]\\b",relevance:0},{begin:"(\\/\\/|"+e.RE_STARTERS_RE+"|\\b(split|return|print|reverse|grep)\\b)\\s*",keywords:"split return print reverse grep",relevance:0,contains:[e.HASH_COMMENT_MODE,{className:"regexp",begin:"(s|tr|y)/(\\\\.|[^/])*/(\\\\.|[^/])*/[a-z]*",relevance:10},{className:"regexp",begin:"(m|qr)?/",end:"/[a-z]*",contains:[e.BACKSLASH_ESCAPE],relevance:0}]},{className:"function",beginKeywords:"sub",end:"(\\s*\\(.*?\\))?[;{]",excludeEnd:!0,relevance:5,contains:[e.TITLE_MODE]},{begin:"-\\w\\b",relevance:0},{begin:"^__DATA__$",end:"^__END__$",subLanguage:"mojolicious",contains:[{begin:"^@@.*",end:"$",className:"comment"}]}];return t.contains=a,s.contains=a,{name:"Perl",aliases:["pl","pm"],keywords:n,contains:a}}}()); +hljs.registerLanguage("php",function(){"use strict";return function(e){var r={begin:"\\$+[a-zA-Z_-ÿ][a-zA-Z0-9_-ÿ]*"},t={className:"meta",variants:[{begin:/<\?php/,relevance:10},{begin:/<\?[=]?/},{begin:/\?>/}]},a={className:"subst",variants:[{begin:/\$\w+/},{begin:/\{\$/,end:/\}/}]},n=e.inherit(e.APOS_STRING_MODE,{illegal:null}),i=e.inherit(e.QUOTE_STRING_MODE,{illegal:null,contains:e.QUOTE_STRING_MODE.contains.concat(a)}),o=e.END_SAME_AS_BEGIN({begin:/<<<[ \t]*(\w+)\n/,end:/[ \t]*(\w+)\b/,contains:e.QUOTE_STRING_MODE.contains.concat(a)}),l={className:"string",contains:[e.BACKSLASH_ESCAPE,t],variants:[e.inherit(n,{begin:"b'",end:"'"}),e.inherit(i,{begin:'b"',end:'"'}),i,n,o]},s={variants:[e.BINARY_NUMBER_MODE,e.C_NUMBER_MODE]},c={keyword:"__CLASS__ __DIR__ __FILE__ __FUNCTION__ __LINE__ __METHOD__ __NAMESPACE__ __TRAIT__ die echo exit include include_once print require require_once array abstract and as binary bool boolean break callable case catch class clone const continue declare default do double else elseif empty enddeclare endfor endforeach endif endswitch endwhile eval extends final finally float for foreach from global goto if implements instanceof insteadof int integer interface isset iterable list new object or private protected public real return string switch throw trait try unset use var void while xor yield",literal:"false null true",built_in:"Error|0 AppendIterator ArgumentCountError ArithmeticError ArrayIterator ArrayObject AssertionError BadFunctionCallException BadMethodCallException CachingIterator CallbackFilterIterator CompileError Countable DirectoryIterator DivisionByZeroError DomainException EmptyIterator ErrorException Exception FilesystemIterator FilterIterator GlobIterator InfiniteIterator InvalidArgumentException IteratorIterator LengthException LimitIterator LogicException MultipleIterator NoRewindIterator OutOfBoundsException OutOfRangeException OuterIterator OverflowException ParentIterator ParseError RangeException RecursiveArrayIterator RecursiveCachingIterator RecursiveCallbackFilterIterator RecursiveDirectoryIterator RecursiveFilterIterator RecursiveIterator RecursiveIteratorIterator RecursiveRegexIterator RecursiveTreeIterator RegexIterator RuntimeException SeekableIterator SplDoublyLinkedList SplFileInfo SplFileObject SplFixedArray SplHeap SplMaxHeap SplMinHeap SplObjectStorage SplObserver SplObserver SplPriorityQueue SplQueue SplStack SplSubject SplSubject SplTempFileObject TypeError UnderflowException UnexpectedValueException ArrayAccess Closure Generator Iterator IteratorAggregate Serializable Throwable Traversable WeakReference Directory __PHP_Incomplete_Class parent php_user_filter self static stdClass"};return{aliases:["php","php3","php4","php5","php6","php7"],case_insensitive:!0,keywords:c,contains:[e.HASH_COMMENT_MODE,e.COMMENT("//","$",{contains:[t]}),e.COMMENT("/\\*","\\*/",{contains:[{className:"doctag",begin:"@[A-Za-z]+"}]}),e.COMMENT("__halt_compiler.+?;",!1,{endsWithParent:!0,keywords:"__halt_compiler"}),t,{className:"keyword",begin:/\$this\b/},r,{begin:/(::|->)+[a-zA-Z_\x7f-\xff][a-zA-Z0-9_\x7f-\xff]*/},{className:"function",beginKeywords:"fn function",end:/[;{]/,excludeEnd:!0,illegal:"[$%\\[]",contains:[e.UNDERSCORE_TITLE_MODE,{className:"params",begin:"\\(",end:"\\)",excludeBegin:!0,excludeEnd:!0,keywords:c,contains:["self",r,e.C_BLOCK_COMMENT_MODE,l,s]}]},{className:"class",beginKeywords:"class interface",end:"{",excludeEnd:!0,illegal:/[:\(\$"]/,contains:[{beginKeywords:"extends implements"},e.UNDERSCORE_TITLE_MODE]},{beginKeywords:"namespace",end:";",illegal:/[\.']/,contains:[e.UNDERSCORE_TITLE_MODE]},{beginKeywords:"use",end:";",contains:[e.UNDERSCORE_TITLE_MODE]},{begin:"=>"},l,s]}}}()); +hljs.registerLanguage("php-template",function(){"use strict";return function(n){return{name:"PHP template",subLanguage:"xml",contains:[{begin:/<\?(php|=)?/,end:/\?>/,subLanguage:"php",contains:[{begin:"/\\*",end:"\\*/",skip:!0},{begin:'b"',end:'"',skip:!0},{begin:"b'",end:"'",skip:!0},n.inherit(n.APOS_STRING_MODE,{illegal:null,className:null,contains:null,skip:!0}),n.inherit(n.QUOTE_STRING_MODE,{illegal:null,className:null,contains:null,skip:!0})]}]}}}()); +hljs.registerLanguage("plaintext",function(){"use strict";return function(t){return{name:"Plain text",aliases:["text","txt"],disableAutodetect:!0}}}()); +hljs.registerLanguage("properties",function(){"use strict";return function(e){var n="[ \\t\\f]*",t="("+n+"[:=]"+n+"|[ \\t\\f]+)",a="([^\\\\:= \\t\\f\\n]|\\\\.)+",s={end:t,relevance:0,starts:{className:"string",end:/$/,relevance:0,contains:[{begin:"\\\\\\n"}]}};return{name:".properties",case_insensitive:!0,illegal:/\S/,contains:[e.COMMENT("^\\s*[!#]","$"),{begin:"([^\\\\\\W:= \\t\\f\\n]|\\\\.)+"+t,returnBegin:!0,contains:[{className:"attr",begin:"([^\\\\\\W:= \\t\\f\\n]|\\\\.)+",endsParent:!0,relevance:0}],starts:s},{begin:a+t,returnBegin:!0,relevance:0,contains:[{className:"meta",begin:a,endsParent:!0,relevance:0}],starts:s},{className:"attr",relevance:0,begin:a+n+"$"}]}}}()); +hljs.registerLanguage("python",function(){"use strict";return function(e){var n={keyword:"and elif is global as in if from raise for except finally print import pass return exec else break not with class assert yield try while continue del or def lambda async await nonlocal|10",built_in:"Ellipsis NotImplemented",literal:"False None True"},a={className:"meta",begin:/^(>>>|\.\.\.) /},i={className:"subst",begin:/\{/,end:/\}/,keywords:n,illegal:/#/},s={begin:/\{\{/,relevance:0},r={className:"string",contains:[e.BACKSLASH_ESCAPE],variants:[{begin:/(u|b)?r?'''/,end:/'''/,contains:[e.BACKSLASH_ESCAPE,a],relevance:10},{begin:/(u|b)?r?"""/,end:/"""/,contains:[e.BACKSLASH_ESCAPE,a],relevance:10},{begin:/(fr|rf|f)'''/,end:/'''/,contains:[e.BACKSLASH_ESCAPE,a,s,i]},{begin:/(fr|rf|f)"""/,end:/"""/,contains:[e.BACKSLASH_ESCAPE,a,s,i]},{begin:/(u|r|ur)'/,end:/'/,relevance:10},{begin:/(u|r|ur)"/,end:/"/,relevance:10},{begin:/(b|br)'/,end:/'/},{begin:/(b|br)"/,end:/"/},{begin:/(fr|rf|f)'/,end:/'/,contains:[e.BACKSLASH_ESCAPE,s,i]},{begin:/(fr|rf|f)"/,end:/"/,contains:[e.BACKSLASH_ESCAPE,s,i]},e.APOS_STRING_MODE,e.QUOTE_STRING_MODE]},l={className:"number",relevance:0,variants:[{begin:e.BINARY_NUMBER_RE+"[lLjJ]?"},{begin:"\\b(0o[0-7]+)[lLjJ]?"},{begin:e.C_NUMBER_RE+"[lLjJ]?"}]},t={className:"params",variants:[{begin:/\(\s*\)/,skip:!0,className:null},{begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,contains:["self",a,l,r,e.HASH_COMMENT_MODE]}]};return i.contains=[r,l,a],{name:"Python",aliases:["py","gyp","ipython"],keywords:n,illegal:/(<\/|->|\?)|=>/,contains:[a,l,{beginKeywords:"if",relevance:0},r,e.HASH_COMMENT_MODE,{variants:[{className:"function",beginKeywords:"def"},{className:"class",beginKeywords:"class"}],end:/:/,illegal:/[${=;\n,]/,contains:[e.UNDERSCORE_TITLE_MODE,t,{begin:/->/,endsWithParent:!0,keywords:"None"}]},{className:"meta",begin:/^[\t ]*@/,end:/$/},{begin:/\b(print|exec)\(/}]}}}()); +hljs.registerLanguage("python-repl",function(){"use strict";return function(n){return{aliases:["pycon"],contains:[{className:"meta",starts:{end:/ |$/,starts:{end:"$",subLanguage:"python"}},variants:[{begin:/^>>>(?=[ ]|$)/},{begin:/^\.\.\.(?=[ ]|$)/}]}]}}}()); +hljs.registerLanguage("ruby",function(){"use strict";return function(e){var n="[a-zA-Z_]\\w*[!?=]?|[-+~]\\@|<<|>>|=~|===?|<=>|[<>]=?|\\*\\*|[-/+%^&*~`|]|\\[\\]=?",a={keyword:"and then defined module in return redo if BEGIN retry end for self when next until do begin unless END rescue else break undef not super class case require yield alias while ensure elsif or include attr_reader attr_writer attr_accessor",literal:"true false nil"},s={className:"doctag",begin:"@[A-Za-z]+"},i={begin:"#<",end:">"},r=[e.COMMENT("#","$",{contains:[s]}),e.COMMENT("^\\=begin","^\\=end",{contains:[s],relevance:10}),e.COMMENT("^__END__","\\n$")],c={className:"subst",begin:"#\\{",end:"}",keywords:a},t={className:"string",contains:[e.BACKSLASH_ESCAPE,c],variants:[{begin:/'/,end:/'/},{begin:/"/,end:/"/},{begin:/`/,end:/`/},{begin:"%[qQwWx]?\\(",end:"\\)"},{begin:"%[qQwWx]?\\[",end:"\\]"},{begin:"%[qQwWx]?{",end:"}"},{begin:"%[qQwWx]?<",end:">"},{begin:"%[qQwWx]?/",end:"/"},{begin:"%[qQwWx]?%",end:"%"},{begin:"%[qQwWx]?-",end:"-"},{begin:"%[qQwWx]?\\|",end:"\\|"},{begin:/\B\?(\\\d{1,3}|\\x[A-Fa-f0-9]{1,2}|\\u[A-Fa-f0-9]{4}|\\?\S)\b/},{begin:/<<[-~]?'?(\w+)(?:.|\n)*?\n\s*\1\b/,returnBegin:!0,contains:[{begin:/<<[-~]?'?/},e.END_SAME_AS_BEGIN({begin:/(\w+)/,end:/(\w+)/,contains:[e.BACKSLASH_ESCAPE,c]})]}]},b={className:"params",begin:"\\(",end:"\\)",endsParent:!0,keywords:a},d=[t,i,{className:"class",beginKeywords:"class module",end:"$|;",illegal:/=/,contains:[e.inherit(e.TITLE_MODE,{begin:"[A-Za-z_]\\w*(::\\w+)*(\\?|\\!)?"}),{begin:"<\\s*",contains:[{begin:"("+e.IDENT_RE+"::)?"+e.IDENT_RE}]}].concat(r)},{className:"function",beginKeywords:"def",end:"$|;",contains:[e.inherit(e.TITLE_MODE,{begin:n}),b].concat(r)},{begin:e.IDENT_RE+"::"},{className:"symbol",begin:e.UNDERSCORE_IDENT_RE+"(\\!|\\?)?:",relevance:0},{className:"symbol",begin:":(?!\\s)",contains:[t,{begin:n}],relevance:0},{className:"number",begin:"(\\b0[0-7_]+)|(\\b0x[0-9a-fA-F_]+)|(\\b[1-9][0-9_]*(\\.[0-9_]+)?)|[0_]\\b",relevance:0},{begin:"(\\$\\W)|((\\$|\\@\\@?)(\\w+))"},{className:"params",begin:/\|/,end:/\|/,keywords:a},{begin:"("+e.RE_STARTERS_RE+"|unless)\\s*",keywords:"unless",contains:[i,{className:"regexp",contains:[e.BACKSLASH_ESCAPE,c],illegal:/\n/,variants:[{begin:"/",end:"/[a-z]*"},{begin:"%r{",end:"}[a-z]*"},{begin:"%r\\(",end:"\\)[a-z]*"},{begin:"%r!",end:"![a-z]*"},{begin:"%r\\[",end:"\\][a-z]*"}]}].concat(r),relevance:0}].concat(r);c.contains=d,b.contains=d;var g=[{begin:/^\s*=>/,starts:{end:"$",contains:d}},{className:"meta",begin:"^([>?]>|[\\w#]+\\(\\w+\\):\\d+:\\d+>|(\\w+-)?\\d+\\.\\d+\\.\\d(p\\d+)?[^>]+>)",starts:{end:"$",contains:d}}];return{name:"Ruby",aliases:["rb","gemspec","podspec","thor","irb"],keywords:a,illegal:/\/\*/,contains:r.concat(g).concat(d)}}}()); +hljs.registerLanguage("rune",function(){"use strict";return function(e){var n="dbg drop is_readable is_writable print println unit byte bool int float String Option Result String Vec Object Tuple Future Generator Stream GeneratorState ";return{name:"Rune",aliases:["rn"],keywords:{$pattern:e.IDENT_RE+"!?",keyword:"abstract as async await become box break const continue crate do dyn else enum extern false final fn for if impl in let loop macro match mod move mut override priv pub ref return self Self static struct super trait true try type typeof unsafe unsized use virtual where while yield",literal:"true false Some None Ok Err",built_in:n},illegal:""}]}}}()); +hljs.registerLanguage("rust",function(){"use strict";return function(e){var n="([ui](8|16|32|64|128|size)|f(32|64))?",t="drop i8 i16 i32 i64 i128 isize u8 u16 u32 u64 u128 usize f32 f64 str char bool Box Option Result String Vec Copy Send Sized Sync Drop Fn FnMut FnOnce ToOwned Clone Debug PartialEq PartialOrd Eq Ord AsRef AsMut Into From Default Iterator Extend IntoIterator DoubleEndedIterator ExactSizeIterator SliceConcatExt ToString assert! assert_eq! bitflags! bytes! cfg! col! concat! concat_idents! debug_assert! debug_assert_eq! env! panic! file! format! format_args! include_bin! include_str! line! local_data_key! module_path! option_env! print! println! select! stringify! try! unimplemented! unreachable! vec! write! writeln! macro_rules! assert_ne! debug_assert_ne!";return{name:"Rust",aliases:["rs"],keywords:{$pattern:e.IDENT_RE+"!?",keyword:"abstract as async await become box break const continue crate do dyn else enum extern false final fn for if impl in let loop macro match mod move mut override priv pub ref return self Self static struct super trait true try type typeof unsafe unsized use virtual where while yield",literal:"true false Some None Ok Err",built_in:t},illegal:""}]}}}()); +hljs.registerLanguage("scss",function(){"use strict";return function(e){var t={className:"variable",begin:"(\\$[a-zA-Z-][a-zA-Z0-9_-]*)\\b"},i={className:"number",begin:"#[0-9A-Fa-f]+"};return e.CSS_NUMBER_MODE,e.QUOTE_STRING_MODE,e.APOS_STRING_MODE,e.C_BLOCK_COMMENT_MODE,{name:"SCSS",case_insensitive:!0,illegal:"[=/|']",contains:[e.C_LINE_COMMENT_MODE,e.C_BLOCK_COMMENT_MODE,{className:"selector-id",begin:"\\#[A-Za-z0-9_-]+",relevance:0},{className:"selector-class",begin:"\\.[A-Za-z0-9_-]+",relevance:0},{className:"selector-attr",begin:"\\[",end:"\\]",illegal:"$"},{className:"selector-tag",begin:"\\b(a|abbr|acronym|address|area|article|aside|audio|b|base|big|blockquote|body|br|button|canvas|caption|cite|code|col|colgroup|command|datalist|dd|del|details|dfn|div|dl|dt|em|embed|fieldset|figcaption|figure|footer|form|frame|frameset|(h[1-6])|head|header|hgroup|hr|html|i|iframe|img|input|ins|kbd|keygen|label|legend|li|link|map|mark|meta|meter|nav|noframes|noscript|object|ol|optgroup|option|output|p|param|pre|progress|q|rp|rt|ruby|samp|script|section|select|small|span|strike|strong|style|sub|sup|table|tbody|td|textarea|tfoot|th|thead|time|title|tr|tt|ul|var|video)\\b",relevance:0},{className:"selector-pseudo",begin:":(visited|valid|root|right|required|read-write|read-only|out-range|optional|only-of-type|only-child|nth-of-type|nth-last-of-type|nth-last-child|nth-child|not|link|left|last-of-type|last-child|lang|invalid|indeterminate|in-range|hover|focus|first-of-type|first-line|first-letter|first-child|first|enabled|empty|disabled|default|checked|before|after|active)"},{className:"selector-pseudo",begin:"::(after|before|choices|first-letter|first-line|repeat-index|repeat-item|selection|value)"},t,{className:"attribute",begin:"\\b(src|z-index|word-wrap|word-spacing|word-break|width|widows|white-space|visibility|vertical-align|unicode-bidi|transition-timing-function|transition-property|transition-duration|transition-delay|transition|transform-style|transform-origin|transform|top|text-underline-position|text-transform|text-shadow|text-rendering|text-overflow|text-indent|text-decoration-style|text-decoration-line|text-decoration-color|text-decoration|text-align-last|text-align|tab-size|table-layout|right|resize|quotes|position|pointer-events|perspective-origin|perspective|page-break-inside|page-break-before|page-break-after|padding-top|padding-right|padding-left|padding-bottom|padding|overflow-y|overflow-x|overflow-wrap|overflow|outline-width|outline-style|outline-offset|outline-color|outline|orphans|order|opacity|object-position|object-fit|normal|none|nav-up|nav-right|nav-left|nav-index|nav-down|min-width|min-height|max-width|max-height|mask|marks|margin-top|margin-right|margin-left|margin-bottom|margin|list-style-type|list-style-position|list-style-image|list-style|line-height|letter-spacing|left|justify-content|initial|inherit|ime-mode|image-orientation|image-resolution|image-rendering|icon|hyphens|height|font-weight|font-variant-ligatures|font-variant|font-style|font-stretch|font-size-adjust|font-size|font-language-override|font-kerning|font-feature-settings|font-family|font|float|flex-wrap|flex-shrink|flex-grow|flex-flow|flex-direction|flex-basis|flex|filter|empty-cells|display|direction|cursor|counter-reset|counter-increment|content|column-width|column-span|column-rule-width|column-rule-style|column-rule-color|column-rule|column-gap|column-fill|column-count|columns|color|clip-path|clip|clear|caption-side|break-inside|break-before|break-after|box-sizing|box-shadow|box-decoration-break|bottom|border-width|border-top-width|border-top-style|border-top-right-radius|border-top-left-radius|border-top-color|border-top|border-style|border-spacing|border-right-width|border-right-style|border-right-color|border-right|border-radius|border-left-width|border-left-style|border-left-color|border-left|border-image-width|border-image-source|border-image-slice|border-image-repeat|border-image-outset|border-image|border-color|border-collapse|border-bottom-width|border-bottom-style|border-bottom-right-radius|border-bottom-left-radius|border-bottom-color|border-bottom|border|background-size|background-repeat|background-position|background-origin|background-image|background-color|background-clip|background-attachment|background-blend-mode|background|backface-visibility|auto|animation-timing-function|animation-play-state|animation-name|animation-iteration-count|animation-fill-mode|animation-duration|animation-direction|animation-delay|animation|align-self|align-items|align-content)\\b",illegal:"[^\\s]"},{begin:"\\b(whitespace|wait|w-resize|visible|vertical-text|vertical-ideographic|uppercase|upper-roman|upper-alpha|underline|transparent|top|thin|thick|text|text-top|text-bottom|tb-rl|table-header-group|table-footer-group|sw-resize|super|strict|static|square|solid|small-caps|separate|se-resize|scroll|s-resize|rtl|row-resize|ridge|right|repeat|repeat-y|repeat-x|relative|progress|pointer|overline|outside|outset|oblique|nowrap|not-allowed|normal|none|nw-resize|no-repeat|no-drop|newspaper|ne-resize|n-resize|move|middle|medium|ltr|lr-tb|lowercase|lower-roman|lower-alpha|loose|list-item|line|line-through|line-edge|lighter|left|keep-all|justify|italic|inter-word|inter-ideograph|inside|inset|inline|inline-block|inherit|inactive|ideograph-space|ideograph-parenthesis|ideograph-numeric|ideograph-alpha|horizontal|hidden|help|hand|groove|fixed|ellipsis|e-resize|double|dotted|distribute|distribute-space|distribute-letter|distribute-all-lines|disc|disabled|default|decimal|dashed|crosshair|collapse|col-resize|circle|char|center|capitalize|break-word|break-all|bottom|both|bolder|bold|block|bidi-override|below|baseline|auto|always|all-scroll|absolute|table|table-cell)\\b"},{begin:":",end:";",contains:[t,i,e.CSS_NUMBER_MODE,e.QUOTE_STRING_MODE,e.APOS_STRING_MODE,{className:"meta",begin:"!important"}]},{begin:"@(page|font-face)",lexemes:"@[a-z-]+",keywords:"@page @font-face"},{begin:"@",end:"[{;]",returnBegin:!0,keywords:"and or not only",contains:[{begin:"@[a-z-]+",className:"keyword"},t,e.QUOTE_STRING_MODE,e.APOS_STRING_MODE,i,e.CSS_NUMBER_MODE]}]}}}()); +hljs.registerLanguage("shell",function(){"use strict";return function(s){return{name:"Shell Session",aliases:["console"],contains:[{className:"meta",begin:"^\\s{0,3}[/\\w\\d\\[\\]()@-]*[>%$#]",starts:{end:"$",subLanguage:"bash"}}]}}}()); +hljs.registerLanguage("sql",function(){"use strict";return function(e){var t=e.COMMENT("--","$");return{name:"SQL",case_insensitive:!0,illegal:/[<>{}*]/,contains:[{beginKeywords:"begin end start commit rollback savepoint lock alter create drop rename call delete do handler insert load replace select truncate update set show pragma grant merge describe use explain help declare prepare execute deallocate release unlock purge reset change stop analyze cache flush optimize repair kill install uninstall checksum restore check backup revoke comment values with",end:/;/,endsWithParent:!0,keywords:{$pattern:/[\w\.]+/,keyword:"as abort abs absolute acc acce accep accept access accessed accessible account acos action activate add addtime admin administer advanced advise aes_decrypt aes_encrypt after agent aggregate ali alia alias all allocate allow alter always analyze ancillary and anti any anydata anydataset anyschema anytype apply archive archived archivelog are as asc ascii asin assembly assertion associate asynchronous at atan atn2 attr attri attrib attribu attribut attribute attributes audit authenticated authentication authid authors auto autoallocate autodblink autoextend automatic availability avg backup badfile basicfile before begin beginning benchmark between bfile bfile_base big bigfile bin binary_double binary_float binlog bit_and bit_count bit_length bit_or bit_xor bitmap blob_base block blocksize body both bound bucket buffer_cache buffer_pool build bulk by byte byteordermark bytes cache caching call calling cancel capacity cascade cascaded case cast catalog category ceil ceiling chain change changed char_base char_length character_length characters characterset charindex charset charsetform charsetid check checksum checksum_agg child choose chr chunk class cleanup clear client clob clob_base clone close cluster_id cluster_probability cluster_set clustering coalesce coercibility col collate collation collect colu colum column column_value columns columns_updated comment commit compact compatibility compiled complete composite_limit compound compress compute concat concat_ws concurrent confirm conn connec connect connect_by_iscycle connect_by_isleaf connect_by_root connect_time connection consider consistent constant constraint constraints constructor container content contents context contributors controlfile conv convert convert_tz corr corr_k corr_s corresponding corruption cos cost count count_big counted covar_pop covar_samp cpu_per_call cpu_per_session crc32 create creation critical cross cube cume_dist curdate current current_date current_time current_timestamp current_user cursor curtime customdatum cycle data database databases datafile datafiles datalength date_add date_cache date_format date_sub dateadd datediff datefromparts datename datepart datetime2fromparts day day_to_second dayname dayofmonth dayofweek dayofyear days db_role_change dbtimezone ddl deallocate declare decode decompose decrement decrypt deduplicate def defa defau defaul default defaults deferred defi defin define degrees delayed delegate delete delete_all delimited demand dense_rank depth dequeue des_decrypt des_encrypt des_key_file desc descr descri describ describe descriptor deterministic diagnostics difference dimension direct_load directory disable disable_all disallow disassociate discardfile disconnect diskgroup distinct distinctrow distribute distributed div do document domain dotnet double downgrade drop dumpfile duplicate duration each edition editionable editions element ellipsis else elsif elt empty enable enable_all enclosed encode encoding encrypt end end-exec endian enforced engine engines enqueue enterprise entityescaping eomonth error errors escaped evalname evaluate event eventdata events except exception exceptions exchange exclude excluding execu execut execute exempt exists exit exp expire explain explode export export_set extended extent external external_1 external_2 externally extract failed failed_login_attempts failover failure far fast feature_set feature_value fetch field fields file file_name_convert filesystem_like_logging final finish first first_value fixed flash_cache flashback floor flush following follows for forall force foreign form forma format found found_rows freelist freelists freepools fresh from from_base64 from_days ftp full function general generated get get_format get_lock getdate getutcdate global global_name globally go goto grant grants greatest group group_concat group_id grouping grouping_id groups gtid_subtract guarantee guard handler hash hashkeys having hea head headi headin heading heap help hex hierarchy high high_priority hosts hour hours http id ident_current ident_incr ident_seed identified identity idle_time if ifnull ignore iif ilike ilm immediate import in include including increment index indexes indexing indextype indicator indices inet6_aton inet6_ntoa inet_aton inet_ntoa infile initial initialized initially initrans inmemory inner innodb input insert install instance instantiable instr interface interleaved intersect into invalidate invisible is is_free_lock is_ipv4 is_ipv4_compat is_not is_not_null is_used_lock isdate isnull isolation iterate java join json json_exists keep keep_duplicates key keys kill language large last last_day last_insert_id last_value lateral lax lcase lead leading least leaves left len lenght length less level levels library like like2 like4 likec limit lines link list listagg little ln load load_file lob lobs local localtime localtimestamp locate locator lock locked log log10 log2 logfile logfiles logging logical logical_reads_per_call logoff logon logs long loop low low_priority lower lpad lrtrim ltrim main make_set makedate maketime managed management manual map mapping mask master master_pos_wait match matched materialized max maxextents maximize maxinstances maxlen maxlogfiles maxloghistory maxlogmembers maxsize maxtrans md5 measures median medium member memcompress memory merge microsecond mid migration min minextents minimum mining minus minute minutes minvalue missing mod mode model modification modify module monitoring month months mount move movement multiset mutex name name_const names nan national native natural nav nchar nclob nested never new newline next nextval no no_write_to_binlog noarchivelog noaudit nobadfile nocheck nocompress nocopy nocycle nodelay nodiscardfile noentityescaping noguarantee nokeep nologfile nomapping nomaxvalue nominimize nominvalue nomonitoring none noneditionable nonschema noorder nopr nopro noprom nopromp noprompt norely noresetlogs noreverse normal norowdependencies noschemacheck noswitch not nothing notice notnull notrim novalidate now nowait nth_value nullif nulls num numb numbe nvarchar nvarchar2 object ocicoll ocidate ocidatetime ociduration ociinterval ociloblocator ocinumber ociref ocirefcursor ocirowid ocistring ocitype oct octet_length of off offline offset oid oidindex old on online only opaque open operations operator optimal optimize option optionally or oracle oracle_date oradata ord ordaudio orddicom orddoc order ordimage ordinality ordvideo organization orlany orlvary out outer outfile outline output over overflow overriding package pad parallel parallel_enable parameters parent parse partial partition partitions pascal passing password password_grace_time password_lock_time password_reuse_max password_reuse_time password_verify_function patch path patindex pctincrease pctthreshold pctused pctversion percent percent_rank percentile_cont percentile_disc performance period period_add period_diff permanent physical pi pipe pipelined pivot pluggable plugin policy position post_transaction pow power pragma prebuilt precedes preceding precision prediction prediction_cost prediction_details prediction_probability prediction_set prepare present preserve prior priority private private_sga privileges procedural procedure procedure_analyze processlist profiles project prompt protection public publishingservername purge quarter query quick quiesce quota quotename radians raise rand range rank raw read reads readsize rebuild record records recover recovery recursive recycle redo reduced ref reference referenced references referencing refresh regexp_like register regr_avgx regr_avgy regr_count regr_intercept regr_r2 regr_slope regr_sxx regr_sxy reject rekey relational relative relaylog release release_lock relies_on relocate rely rem remainder rename repair repeat replace replicate replication required reset resetlogs resize resource respect restore restricted result result_cache resumable resume retention return returning returns reuse reverse revoke right rlike role roles rollback rolling rollup round row row_count rowdependencies rowid rownum rows rtrim rules safe salt sample save savepoint sb1 sb2 sb4 scan schema schemacheck scn scope scroll sdo_georaster sdo_topo_geometry search sec_to_time second seconds section securefile security seed segment select self semi sequence sequential serializable server servererror session session_user sessions_per_user set sets settings sha sha1 sha2 share shared shared_pool short show shrink shutdown si_averagecolor si_colorhistogram si_featurelist si_positionalcolor si_stillimage si_texture siblings sid sign sin size size_t sizes skip slave sleep smalldatetimefromparts smallfile snapshot some soname sort soundex source space sparse spfile split sql sql_big_result sql_buffer_result sql_cache sql_calc_found_rows sql_small_result sql_variant_property sqlcode sqldata sqlerror sqlname sqlstate sqrt square standalone standby start starting startup statement static statistics stats_binomial_test stats_crosstab stats_ks_test stats_mode stats_mw_test stats_one_way_anova stats_t_test_ stats_t_test_indep stats_t_test_one stats_t_test_paired stats_wsr_test status std stddev stddev_pop stddev_samp stdev stop storage store stored str str_to_date straight_join strcmp strict string struct stuff style subdate subpartition subpartitions substitutable substr substring subtime subtring_index subtype success sum suspend switch switchoffset switchover sync synchronous synonym sys sys_xmlagg sysasm sysaux sysdate sysdatetimeoffset sysdba sysoper system system_user sysutcdatetime table tables tablespace tablesample tan tdo template temporary terminated tertiary_weights test than then thread through tier ties time time_format time_zone timediff timefromparts timeout timestamp timestampadd timestampdiff timezone_abbr timezone_minute timezone_region to to_base64 to_date to_days to_seconds todatetimeoffset trace tracking transaction transactional translate translation treat trigger trigger_nestlevel triggers trim truncate try_cast try_convert try_parse type ub1 ub2 ub4 ucase unarchived unbounded uncompress under undo unhex unicode uniform uninstall union unique unix_timestamp unknown unlimited unlock unnest unpivot unrecoverable unsafe unsigned until untrusted unusable unused update updated upgrade upped upper upsert url urowid usable usage use use_stored_outlines user user_data user_resources users using utc_date utc_timestamp uuid uuid_short validate validate_password_strength validation valist value values var var_samp varcharc vari varia variab variabl variable variables variance varp varraw varrawc varray verify version versions view virtual visible void wait wallet warning warnings week weekday weekofyear wellformed when whene whenev wheneve whenever where while whitespace window with within without work wrapped xdb xml xmlagg xmlattributes xmlcast xmlcolattval xmlelement xmlexists xmlforest xmlindex xmlnamespaces xmlpi xmlquery xmlroot xmlschema xmlserialize xmltable xmltype xor year year_to_month years yearweek",literal:"true false null unknown",built_in:"array bigint binary bit blob bool boolean char character date dec decimal float int int8 integer interval number numeric real record serial serial8 smallint text time timestamp tinyint varchar varchar2 varying void"},contains:[{className:"string",begin:"'",end:"'",contains:[{begin:"''"}]},{className:"string",begin:'"',end:'"',contains:[{begin:'""'}]},{className:"string",begin:"`",end:"`"},e.C_NUMBER_MODE,e.C_BLOCK_COMMENT_MODE,t,e.HASH_COMMENT_MODE]},e.C_BLOCK_COMMENT_MODE,t,e.HASH_COMMENT_MODE]}}}()); +hljs.registerLanguage("swift",function(){"use strict";return function(e){var i={keyword:"#available #colorLiteral #column #else #elseif #endif #file #fileLiteral #function #if #imageLiteral #line #selector #sourceLocation _ __COLUMN__ __FILE__ __FUNCTION__ __LINE__ Any as as! as? associatedtype associativity break case catch class continue convenience default defer deinit didSet do dynamic dynamicType else enum extension fallthrough false fileprivate final for func get guard if import in indirect infix init inout internal is lazy left let mutating nil none nonmutating open operator optional override postfix precedence prefix private protocol Protocol public repeat required rethrows return right self Self set static struct subscript super switch throw throws true try try! try? Type typealias unowned var weak where while willSet",literal:"true false nil",built_in:"abs advance alignof alignofValue anyGenerator assert assertionFailure bridgeFromObjectiveC bridgeFromObjectiveCUnconditional bridgeToObjectiveC bridgeToObjectiveCUnconditional c compactMap contains count countElements countLeadingZeros debugPrint debugPrintln distance dropFirst dropLast dump encodeBitsAsWords enumerate equal fatalError filter find getBridgedObjectiveCType getVaList indices insertionSort isBridgedToObjectiveC isBridgedVerbatimToObjectiveC isUniquelyReferenced isUniquelyReferencedNonObjC join lazy lexicographicalCompare map max maxElement min minElement numericCast overlaps partition posix precondition preconditionFailure print println quickSort readLine reduce reflect reinterpretCast reverse roundUpToAlignment sizeof sizeofValue sort split startsWith stride strideof strideofValue swap toString transcode underestimateCount unsafeAddressOf unsafeBitCast unsafeDowncast unsafeUnwrap unsafeReflect withExtendedLifetime withObjectAtPlusZero withUnsafePointer withUnsafePointerToObject withUnsafeMutablePointer withUnsafeMutablePointers withUnsafePointer withUnsafePointers withVaList zip"},n=e.COMMENT("/\\*","\\*/",{contains:["self"]}),t={className:"subst",begin:/\\\(/,end:"\\)",keywords:i,contains:[]},a={className:"string",contains:[e.BACKSLASH_ESCAPE,t],variants:[{begin:/"""/,end:/"""/},{begin:/"/,end:/"/}]},r={className:"number",begin:"\\b([\\d_]+(\\.[\\deE_]+)?|0x[a-fA-F0-9_]+(\\.[a-fA-F0-9p_]+)?|0b[01_]+|0o[0-7_]+)\\b",relevance:0};return t.contains=[r],{name:"Swift",keywords:i,contains:[a,e.C_LINE_COMMENT_MODE,n,{className:"type",begin:"\\b[A-Z][\\wÀ-ʸ']*[!?]"},{className:"type",begin:"\\b[A-Z][\\wÀ-ʸ']*",relevance:0},r,{className:"function",beginKeywords:"func",end:"{",excludeEnd:!0,contains:[e.inherit(e.TITLE_MODE,{begin:/[A-Za-z$_][0-9A-Za-z$_]*/}),{begin://},{className:"params",begin:/\(/,end:/\)/,endsParent:!0,keywords:i,contains:["self",r,a,e.C_BLOCK_COMMENT_MODE,{begin:":"}],illegal:/["']/}],illegal:/\[|%/},{className:"class",beginKeywords:"struct protocol class extension enum",keywords:i,end:"\\{",excludeEnd:!0,contains:[e.inherit(e.TITLE_MODE,{begin:/[A-Za-z$_][\u00C0-\u02B80-9A-Za-z$_]*/})]},{className:"meta",begin:"(@discardableResult|@warn_unused_result|@exported|@lazy|@noescape|@NSCopying|@NSManaged|@objc|@objcMembers|@convention|@required|@noreturn|@IBAction|@IBDesignable|@IBInspectable|@IBOutlet|@infix|@prefix|@postfix|@autoclosure|@testable|@available|@nonobjc|@NSApplicationMain|@UIApplicationMain|@dynamicMemberLookup|@propertyWrapper)\\b"},{beginKeywords:"import",end:/$/,contains:[e.C_LINE_COMMENT_MODE,n]}]}}}()); +hljs.registerLanguage("typescript",function(){"use strict";const e=["as","in","of","if","for","while","finally","var","new","function","do","return","void","else","break","catch","instanceof","with","throw","case","default","try","switch","continue","typeof","delete","let","yield","const","class","debugger","async","await","static","import","from","export","extends"],n=["true","false","null","undefined","NaN","Infinity"],a=[].concat(["setInterval","setTimeout","clearInterval","clearTimeout","require","exports","eval","isFinite","isNaN","parseFloat","parseInt","decodeURI","decodeURIComponent","encodeURI","encodeURIComponent","escape","unescape"],["arguments","this","super","console","window","document","localStorage","module","global"],["Intl","DataView","Number","Math","Date","String","RegExp","Object","Function","Boolean","Error","Symbol","Set","Map","WeakSet","WeakMap","Proxy","Reflect","JSON","Promise","Float64Array","Int16Array","Int32Array","Int8Array","Uint16Array","Uint32Array","Float32Array","Array","Uint8Array","Uint8ClampedArray","ArrayBuffer"],["EvalError","InternalError","RangeError","ReferenceError","SyntaxError","TypeError","URIError"]);return function(r){var t={$pattern:"[A-Za-z$_][0-9A-Za-z$_]*",keyword:e.concat(["type","namespace","typedef","interface","public","private","protected","implements","declare","abstract","readonly"]).join(" "),literal:n.join(" "),built_in:a.concat(["any","void","number","boolean","string","object","never","enum"]).join(" ")},s={className:"meta",begin:"@[A-Za-z$_][0-9A-Za-z$_]*"},i={className:"number",variants:[{begin:"\\b(0[bB][01]+)n?"},{begin:"\\b(0[oO][0-7]+)n?"},{begin:r.C_NUMBER_RE+"n?"}],relevance:0},o={className:"subst",begin:"\\$\\{",end:"\\}",keywords:t,contains:[]},c={begin:"html`",end:"",starts:{end:"`",returnEnd:!1,contains:[r.BACKSLASH_ESCAPE,o],subLanguage:"xml"}},l={begin:"css`",end:"",starts:{end:"`",returnEnd:!1,contains:[r.BACKSLASH_ESCAPE,o],subLanguage:"css"}},E={className:"string",begin:"`",end:"`",contains:[r.BACKSLASH_ESCAPE,o]};o.contains=[r.APOS_STRING_MODE,r.QUOTE_STRING_MODE,c,l,E,i,r.REGEXP_MODE];var d={begin:"\\(",end:/\)/,keywords:t,contains:["self",r.QUOTE_STRING_MODE,r.APOS_STRING_MODE,r.NUMBER_MODE]},u={className:"params",begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,keywords:t,contains:[r.C_LINE_COMMENT_MODE,r.C_BLOCK_COMMENT_MODE,s,d]};return{name:"TypeScript",aliases:["ts"],keywords:t,contains:[r.SHEBANG(),{className:"meta",begin:/^\s*['"]use strict['"]/},r.APOS_STRING_MODE,r.QUOTE_STRING_MODE,c,l,E,r.C_LINE_COMMENT_MODE,r.C_BLOCK_COMMENT_MODE,i,{begin:"("+r.RE_STARTERS_RE+"|\\b(case|return|throw)\\b)\\s*",keywords:"return throw case",contains:[r.C_LINE_COMMENT_MODE,r.C_BLOCK_COMMENT_MODE,r.REGEXP_MODE,{className:"function",begin:"(\\([^(]*(\\([^(]*(\\([^(]*\\))?\\))?\\)|"+r.UNDERSCORE_IDENT_RE+")\\s*=>",returnBegin:!0,end:"\\s*=>",contains:[{className:"params",variants:[{begin:r.UNDERSCORE_IDENT_RE},{className:null,begin:/\(\s*\)/,skip:!0},{begin:/\(/,end:/\)/,excludeBegin:!0,excludeEnd:!0,keywords:t,contains:d.contains}]}]}],relevance:0},{className:"function",beginKeywords:"function",end:/[\{;]/,excludeEnd:!0,keywords:t,contains:["self",r.inherit(r.TITLE_MODE,{begin:"[A-Za-z$_][0-9A-Za-z$_]*"}),u],illegal:/%/,relevance:0},{beginKeywords:"constructor",end:/[\{;]/,excludeEnd:!0,contains:["self",u]},{begin:/module\./,keywords:{built_in:"module"},relevance:0},{beginKeywords:"module",end:/\{/,excludeEnd:!0},{beginKeywords:"interface",end:/\{/,excludeEnd:!0,keywords:"interface extends"},{begin:/\$[(.]/},{begin:"\\."+r.IDENT_RE,relevance:0},s,d]}}}()); +hljs.registerLanguage("yaml",function(){"use strict";return function(e){var n="true false yes no null",a="[\\w#;/?:@&=+$,.~*\\'()[\\]]+",s={className:"string",relevance:0,variants:[{begin:/'/,end:/'/},{begin:/"/,end:/"/},{begin:/\S+/}],contains:[e.BACKSLASH_ESCAPE,{className:"template-variable",variants:[{begin:"{{",end:"}}"},{begin:"%{",end:"}"}]}]},i=e.inherit(s,{variants:[{begin:/'/,end:/'/},{begin:/"/,end:/"/},{begin:/[^\s,{}[\]]+/}]}),l={end:",",endsWithParent:!0,excludeEnd:!0,contains:[],keywords:n,relevance:0},t={begin:"{",end:"}",contains:[l],illegal:"\\n",relevance:0},g={begin:"\\[",end:"\\]",contains:[l],illegal:"\\n",relevance:0},b=[{className:"attr",variants:[{begin:"\\w[\\w :\\/.-]*:(?=[ \t]|$)"},{begin:'"\\w[\\w :\\/.-]*":(?=[ \t]|$)'},{begin:"'\\w[\\w :\\/.-]*':(?=[ \t]|$)"}]},{className:"meta",begin:"^---s*$",relevance:10},{className:"string",begin:"[\\|>]([0-9]?[+-])?[ ]*\\n( *)[\\S ]+\\n(\\2[\\S ]+\\n?)*"},{begin:"<%[%=-]?",end:"[%-]?%>",subLanguage:"ruby",excludeBegin:!0,excludeEnd:!0,relevance:0},{className:"type",begin:"!\\w+!"+a},{className:"type",begin:"!<"+a+">"},{className:"type",begin:"!"+a},{className:"type",begin:"!!"+a},{className:"meta",begin:"&"+e.UNDERSCORE_IDENT_RE+"$"},{className:"meta",begin:"\\*"+e.UNDERSCORE_IDENT_RE+"$"},{className:"bullet",begin:"\\-(?=[ ]|$)",relevance:0},e.HASH_COMMENT_MODE,{beginKeywords:n,keywords:{literal:n}},{className:"number",begin:"\\b[0-9]{4}(-[0-9][0-9]){0,2}([Tt \\t][0-9][0-9]?(:[0-9][0-9]){2})?(\\.[0-9]*)?([ \\t])*(Z|[-+][0-9][0-9]?(:[0-9][0-9])?)?\\b"},{className:"number",begin:e.C_NUMBER_RE+"\\b"},t,g,s],c=[...b];return c.pop(),c.push(i),l.contains=c,{name:"YAML",case_insensitive:!0,aliases:["yml","YAML"],contains:b}}}()); \ No newline at end of file diff --git a/doc/src/konfigkoll/advanced/README.md b/doc/src/konfigkoll/advanced/README.md new file mode 100644 index 00000000..f7e6e6b2 --- /dev/null +++ b/doc/src/konfigkoll/advanced/README.md @@ -0,0 +1,5 @@ +# Advanced topics + +This chapter covers some more advanced features of Konfigkoll. + +See the sub-pages for more details. diff --git a/doc/src/konfigkoll/advanced/host_file_system_access.md b/doc/src/konfigkoll/advanced/host_file_system_access.md new file mode 100644 index 00000000..c753a63e --- /dev/null +++ b/doc/src/konfigkoll/advanced/host_file_system_access.md @@ -0,0 +1,109 @@ +# Host file system access + +> This assumes you have read [Managing Files](../files.md) before. +> This chapter builds directly on that. + +Like with the previous chapter on [processes](./process.md) this is an advanced +feature that can be dangerous! In particular be careful, you could easily make +your config non-idempotent. + +> Idempotency is a fancy way of saying "running the same thing multiple times +> gives the same result". This is important for a configuration management system +> as you want it to be *deterministic*. + +With that said: Konfigkoll allows you read-only access to files on the host. Some +example of use cases: + +* The main purpose of this is for things that *shouldn't* be stored in your git + managed configuration, in particular for passwords and other secrets: + * Hashed passwords from `/etc/shadow` (use the special support for + [`passwd`](../integrations/passwd.md) instead though, it is a better option) + * Passwords for wireless networks + * Passwords for any services needed (such as databases) +* Another use case is to read some system information from `/sys` that isn't + already exposed by other APIs + +Now, the use case of `/etc/shadow` is better served by the built-in +[`passwd`](../integrations/passwd.md) module. But lets look at some of the +other use cases. + +## Read from `/sys` + +```rune +let is_uefi = filesystem::exists("/sys/firmware/efi")?; +``` + +This determines if `/sys/firmware/efi` exists, which indicates that this system +is using UEFI. + +## Read password for a NetworkManager network + +The idea here is that we still want to manage our network configurations, but +we *don't* want to store the password in our git repository. Instead, we can read +that back from the system before applying the config. + +```rune +// Get the type of network (wifi or not) and the password for the network +fn parse_sys_network(network_name) { + // Open the file (with root privileges) + let fname = `/etc/NetworkManager/system-connections/${network_name}.nmconnection`; + let f = filesystem::File::open_as_root(fname)?; + + // Read the contents of the file + let old_contents = f.read_all_string()?; + + // Split it out and parse it + let lines = old_contents.split("\n").collect::(); + // Iterate over the lines to find the psk one + let psk = lines.iter() + .find(|line| line.starts_with("psk=")) + .map(|v| v.split("=") + .collect::()[1]); + // Do the same, but for the network type + let net_type = lines.iter() + .find(|line| line.starts_with("type=")) + .map(|v| v.split("=") + .collect::()[1]); + Ok((net_type, psk)) +} +``` + +We can then use this to patch our network configs before we apply them: + +```rune +pub fn nm_add_network(cmds, package_managers, hw_type, network_name) { + // Get PSK from system + if let (net_type, psk) = parse_sys_network(network_name)? { + if net_type == Some("wifi") { + let fname = `/etc/NetworkManager/system-connections/${network_name}.nmconnection`; + let edit_actions = [ + (Selector::Regex("^psk=PLACEHOLDER"), + Action::Replace(format!("psk={}", psk.unwrap()))), + ]; + + // Laptops should auto-connect to Wifi, desktops shouldn't, + // they use ethernet normally + if hw_type == SystemType::Laptop { + edit_actions.push((Selector::Regex("^autoconnect=false"), + Action::Delete)); + } + + // This is a wrapper for LineEditor, see the cook-book chapter + patch_file_from_config(cmds, package_managers, fname, edit_actions)?; + // The file should be root only + cmds.chmod(fname, 0o600)?; + } + } else { + return Err("Network not found")?; + } + Ok(()) +} +``` + +This could then be used like this: + +```rune +nm_add_network(cmds, package_managers, hw_type, "My Phone Hotspot")?; +nm_add_network(cmds, package_managers, hw_type, "My Home Wifi")?; +nm_add_network(cmds, package_managers, hw_type, "Some other wifi")?; +``` diff --git a/doc/src/konfigkoll/advanced/process.md b/doc/src/konfigkoll/advanced/process.md new file mode 100644 index 00000000..3c1831fc --- /dev/null +++ b/doc/src/konfigkoll/advanced/process.md @@ -0,0 +1,129 @@ +# Invoking external commands + +> This assumes you have read [Managing Files](../files.md) before. +> This chapter builds directly on that. + +If using `LineEditor` or custom Rune code doesn't cut it you can invoke external +commands. Be careful with this as you could easily make your config non-idempotent. + +> Idempotency is a fancy way of saying "running the same thing multiple times +> gives the same result". This is important for a configuration management system +> as you want it to be *deterministic*. + +In particular, you should not use external commands to write directly to the system. +Instead, you should use a temporary directory if you need filesystem operations. + +## Example with `patch` + +The following example shows how to use `patch` to apply a patch file: + +```rune +async fn patch_zsh(cmds, package_managers) { + // This is relative the config directory + let patch_file = "patches/zsh-modutils.patch"; + // Package we will patch + let pkg = "zsh"; + // The file we want to patch + let file = "/usr/share/zsh/functions/Completion/Linux/_modutils"; + + // Create a temporary directory to operate in + let tmpdir = filesystem::TempDir::new()?; + let tmpdir_path = tmpdir.path(); + + // Read the original file from the package manager + let orig = package_managers.files().original_file_contents(pkg, file)?; + // Write out the original file to the temporary directory, and store the + // full path to it for later use + let orig_path = tmpdir.write("orig", orig)?; + + // We need to know the full path to the patch file, to give it to patch + let absolute_patch_path = filesystem::config_path() + "/" + patch_path; + + // Create a command that describes how to invoke patch + let command = process::Command::new("patch"); + command.arg(orig_path); + command.arg(absolute_patch_path); + + // Start the command + let child = command.spawn()?; + + // Wait for the command to complete + child.wait().await?; + + // Load contents back after patch applied it + let patched = tmpdir.read("orig")?; + + // Add a command to write out the changed file + cmds.write(file, patched)?; + + Ok(()) +} +``` + +As can be seen this is quite a bit more involved than using `LineEditor` +(but the pattern can be encapsulated, see [the cookbook](../cookbook.md#patching-using-patch)). + +There are also some other things to note here: + +* What's up with `async` and `await`? This will be covered in the [next section](#async-and-await). +* The use of `TempDir` to create a temporary directory. The temporary directory + will be automatically removed once the variable goes out of scope. +* External processes are built up using a builder object `process::Command`, and + are then invoked. You can build pipelines and handle stdin/stdout/stderr as well, + see the API docs for details on that. + +## Async and await + +You might have noticed `async fn` a few times before, without it ever being +explained. It is an advanced feature and not one you really need to use much +for Konfigkoll. + +However, the basic idea is that Rust and Rune have functions that can run +concurrently. These are not quite like threads, instead they can run on the same +thread (or separate ones) but can be paused and resumed at certain points. For +example when waiting for IO (or an external process to complete), you could be +doing something else. + +Konfigkoll uses this internally on the Rust side to do things like scanning the +file system for changes at the same time as processing your configuration. + +For talking to external processes this leaks through into the Rune code (otherwise +*you* don't really need to care about it). + +Here is what you have to keep in mind: + +* When you see an `async fn` in the API docs, you need to call it like so: + + ```rune + let result = some_async_fn().await; + ``` + + This means that when `some_async_fn` is called we should wait for it's output. +* You can only use async functions from other async functions. That is, you + can't call an async function from a non-async function. So your `phase_main` + must also be async and so does the whole chain in between your phase_main and + the async API function. +* Async functions don't execute *until they are awaited. That means, they do nothing + until you `await` them. They won't magically run in the background unless you + specifically make them do so (see below). + +### Awaiting multiple things + +If you want to do do multiple things in parallel yourself, you don't need to +*immediately* `await` the `async fn`, the key here is that it has to be awaited +*eventually*. Using `std::future::join` you can wait for multiple async functions: + +```rune +// Prepare a whole bunch of patch jobs +let patches = []; +patches.push(do_patch(cmds, package_managers, "patches/etckeeper-post-install.patch")); +patches.push(do_patch(cmds, package_managers, "patches/etckeeper-pre-install.patch")); +patches.push(do_patch(cmds, package_managers, "patches/zsh-modutils.patch")); + +// Run them and wait for them all +let results = std::future::join(patches).await; +// Process the results to propagate any errors +for result in results { + result?; +} +``` diff --git a/doc/src/konfigkoll/api.md b/doc/src/konfigkoll/api.md new file mode 100644 index 00000000..ff0838b5 --- /dev/null +++ b/doc/src/konfigkoll/api.md @@ -0,0 +1,5 @@ +# API documentation + +Generated Rune API documentation is [available here](https://vorpalblade.github.io/paketkoll/api/). +This covers both the Rune standard library (`std`, `json`, `toml`) and the +konfigkoll specific APIs. diff --git a/doc/src/konfigkoll/cookbook.md b/doc/src/konfigkoll/cookbook.md new file mode 100644 index 00000000..ea59eb5b --- /dev/null +++ b/doc/src/konfigkoll/cookbook.md @@ -0,0 +1,271 @@ +# Cookbook: Examples & snippets + +This contains a bunch of useful patterns and functions you can use in your own +configuration. + +## Using strong types + +While `props` is a generic key value store for passing info between the phases, +it is easy to make a typo (was it `enable_disk_ecryption` or `use_disk_encryption`, etc?) + +A useful pattern is to define one or a few struct that contains all your properties +and store that, then extract it at the start of each phase that needs it. + +```rune +pub struct System { + cpu_arch, + cpu_feature_level, + cpu_vendor, + + has_wifi, + + host_name, + os, + + // ... +} + +pub struct Tasks { + cad_and_3dprinting, + development, + development_rust, + games, + office, + photo_editing, + video_editing, + // ... +} + +pub async fn phase_system_discovery(props, settings) { + /// ... + + // This has system discovery info + props.set("system", system); + // This defines what tasks the system will fulfill + // (like "video editing" and "gaming") + props.set("tasks", tasks); + Ok(()) +} + +pub async fn phase_main(props, cmds, package_managers) { + // Extract the properties + let system = props.get("system")?; + let tasks = props.get("tasks")?; + + // ... + + if tasks.gaming { + // Install steam + package_managers.apt.install("steam")?; + } + + // ... + + Ok(()) +} +``` + +Now, when you access e.g. `tasks.gaming` you will get a loud error from Rune if you +typo it, unlike if you use the properties directly. + +## Creating a context object + +This is a continuation of the previous pattern, and most useful in the main phase: + +You might end up with helper functions that need a whole bunch of objects passed to them: + +```rune +fn configure_grub( + props, + cmds, + package_managers, + system, + tasks, + passwd) +{ + // ... +} +``` + +What if you need yet another one? No the solution here is to pass a single context object +around: + +```rune +/// This is to have fewer parameters to pass around +pub struct Context { + // properties::Properties + props, + // commands::Commands + cmds, + // package_managers::PackageManagers + package_managers, + + // System + system, + // Tasks + tasks, + + // passwd::Passwd + passwd, +} + +pub async fn phase_main(props, cmds, package_managers) { + let system = props.get("system")?; + let tasks = props.get("tasks")?; + let passwd = passwd::Passwd::new(tables::USER_MAPPING, tables::GROUP_MAPPING)?; + + let ctx = Context { + props, + cmds, + package_managers, + system, + tasks, + passwd, + }; + + configure_grub(ctx)?; + configure_network(ctx)?; + configure_systemd(ctx)?; + configure_gaming(ctx)?; + // ... + Ok(()) +} +``` + +## Patching files ergonomically with LineEditor + +Using `LineEditor` directly can get verbose. Consider this (using the context +object idea from above): + +```rune +/// Patch a file (from the config directory) +/// +/// * cmds (Commands) +/// * package_anager (PackageManager) +/// * package (string) +/// * file (string) +/// * patches (Vec<(Selector, Action)>) +pub fn patch_file_from_config(ctx, file, patches) { + let package_manager = ctx.package_managers.files(); + let fd = filesystem::File::open_from_config("files/" + file)?; + let orig = fd.read_all_string()?; + let editor = LineEditor::new(); + for patch in patches { + editor.add(patch.0, patch.1); + } + let contents = editor.apply(orig); + ctx.cmds.write(file, contents.as_bytes())?; + Ok(()) +} + + +/// Patch a file (from a package) to a new destination +/// +/// * cmds (Commands) +/// * package_anager (PackageManager) +/// * package (string) +/// * file (string) +/// * target_file (string) +/// * patches (Vec<(Selector, Action)>) +pub fn patch_file_to(ctx, package, file, target_file, patches) { + let package_manager = ctx.package_managers.files(); + let orig = String::from_utf8(package_manager.original_file_contents(package, file)?)?; + let editor = LineEditor::new(); + for patch in patches { + editor.add(patch.0, patch.1); + } + let contents = editor.apply(orig); + ctx.cmds.write(target_file, contents.as_bytes())?; + Ok(()) +} +``` + +Then you can use this as follows: + +```rune + crate::utils::patch_file(ctx, "bluez", "/etc/bluetooth/main.conf", + [(Selector::Regex("#AutoEnable"), Action::RegexReplace("^#", "")), + (Selector::Regex("#AutoEnable"), Action::RegexReplace("false", "true"))])?; +``` + +Much more compact! In general, consider creating utility functions to simplify +common patterns in your configuration. Though there needs to be a balance, so +you still understand your configuration a few months later. Don't go overboard +with the abstractions. + +## Patching using patch + +This builds on the example in [Processes (advanced)](./advanced/process.md): + +```rune +pub async fn apply_system_patches(ctx) { + let patches = []; + patches.push(do_patch(ctx, "patches/etckeeper-post-install.patch")); + patches.push(do_patch(ctx, "patches/etckeeper-pre-install.patch")); + patches.push(do_patch(ctx, "patches/zsh-modutils.patch")); + + let results = std::future::join(patches).await; + for result in results { + result?; + } + Ok(()) +} + +async fn do_patch(ctx, patch_path) { + // Load patch file + let patch_file = filesystem::File::open_from_config(patch_path)?; + let patch = patch_file.read_all_bytes()?; + let patch_as_str = String::from_utf8(patch)?; + + // The first two lines says which package and file they apply to, extract them + let lines = patch_as_str.split('\n').collect::(); + let pkg = lines[0]; + let file = lines[1]; + + // Create a temporary directory + let tmpdir = filesystem::TempDir::new()?; + let tmpdir_path = tmpdir.path(); + + // Read the original file + let orig = ctx.package_managers.files().original_file_contents(pkg, file)?; + let orig_path = tmpdir.write("orig", orig)?; + let absolute_patch_path = filesystem::config_path() + "/" + patch_path; + + // Shell out to patch command in a temporary directory + let command = process::Command::new("patch"); + command.arg(orig_path); + command.arg(absolute_patch_path); + let child = command.spawn()?; + child.wait().await?; + + // Load contents back + let patched = tmpdir.read("orig")?; + + ctx.cmds.write(file, patched)?; + + Ok(()) +} +``` + +Here the idea is to parse the patch file, which should contain some metadata +at the top for where it should be applied to. Patch will ignore text at the very +top of a diff file and only handle the file from the first `---`. For example: + +```patch +etckeeper +/usr/share/libalpm/hooks/05-etckeeper-pre-install.hook + +--- /proc/self/fd/12 2022-12-19 17:36:30.026865507 +0100 ++++ /usr/share/libalpm/hooks/05-etckeeper-pre-install.hook 2022-12-19 12:43:40.751631786 +0100 +@@ -4,8 +4,8 @@ + Operation = Install + Operation = Upgrade + Operation = Remove +-Type = Path +-Target = etc/* ++Type = Package ++Target = * + + [Action] + Description = etckeeper: pre-transaction commit +``` \ No newline at end of file diff --git a/doc/src/konfigkoll/defaults.md b/doc/src/konfigkoll/defaults.md new file mode 100644 index 00000000..39bd1f3a --- /dev/null +++ b/doc/src/konfigkoll/defaults.md @@ -0,0 +1,45 @@ +# Defaults + +This section documents some defaults for settings in Konfigkoll. + +## Default ignores + +Some paths are always ignored in the file system scan: + +* `**/lost+found` +* `/dev/` +* `/home/` +* `/media/` +* `/mnt/` +* `/proc/` +* `/root/` +* `/run/` +* `/sys/` +* `/tmp/` +* `/var/tmp/` + +## Default early configurations + +Some configurations are always applied early (before packages are installed) +in the configuration process (you can add additional with `settings.early_config` +during the system discovery phase): + +* `/etc/passwd` +* `/etc/group` +* `/etc/shadow` +* `/etc/gshadow` + +The reason these are applied early is to ensure consistent ID assignment when +installing packages that want to add their own IDs. + +## Default sensitive configurations + +Konfigkoll will not write out the following files when you use `save`, no matter +what. This is done as a security measure to prevent accidental leaks of sensitive +information: + +* `/etc/shadow` +* `/etc/gshadow` + +You can add additional files to this list with `settings.sensitive_file` during +the system discovery phase. diff --git a/doc/src/konfigkoll/files.md b/doc/src/konfigkoll/files.md new file mode 100644 index 00000000..5fb32576 --- /dev/null +++ b/doc/src/konfigkoll/files.md @@ -0,0 +1,221 @@ +# Managing files + +> This assumes you have read [Getting started](./getting_started.md) before. +> This chapter builds directly on that, specifically the +> [section about the main phase](./getting_started.md#the-main-phase) (which in +> turn builds on earlier sections of that chapter). + +## Copying files + +The most basic operation is to copy a file from the `files` directory in your +configuration to the system. This is what `save` will use when saving changes. + +For example + +```rune +pub async fn phase_main(props, cmds, package_managers) { + cmds.copy("/etc/fstab")?; + cmds.copy("/etc/ssh/sshd_config.d/99-local.conf")?; + + Ok(()) +} +``` + +This config would mean that: + +* The file `files/etc/fstab` in your configuration should be copied to `/etc/fstab` +* The file `files/etc/ssh/sshd_config.d/99-local.conf` in your configuration + should be copied to `/etc/ssh/sshd_config.d/99-local.conf` +* Every other (non-ignored) file on the system should be unchanged compared + to the package manager. + +Like with [packages](./packages.md) the configuration is *total*, that is, it +should describe the system state fully. + +Sometimes you might want to rename a file as you copy it. For example to have +host specific configs. `/etc/fstab` is an example of where this can be a good +solution. Then you can use `copy_from` instead of `copy`: + +```rune +pub async fn phase_main(props, cmds, package_managers) { + let sysinfo = sysinfo::SysInfo::new(); + let host_name = sysinfo.host_name(); + cmds.copy_from("/etc/fstab", `/etc/fstab.${host_name}`)?; + + Ok(()) +} +``` + +Here we can also see another feature: In strings surrounded by backquotes you +can use `${}` to interpolate variables. This is a feature of the Rune language. + +You can also check if a file exists: + +```rune +let candidate = std::fmt::format!("/etc/conf.d/lm_sensors.{}", host_name); +if cmds.has_source_file(candidate) { + cmds.copy_from("/etc/conf.d/lm_sensors", candidate)?; +} +``` + +This shows another way to format strings, using `std::fmt::format!`. You can use +either. + +## Writing a file directly from the configuration + +Sometimes you want to write a file directly from the configuration (maybe it is short, +maybe you have complex logic to generate it). This can be done with `write`: + +```rune +pub async fn phase_main(props, cmds, package_managers) { + ctx.cmds.write("/etc/NetworkManager/conf.d/dns.conf", b"[main]\ndns=dnsmasq\n"); + ctx.cmds.write("/etc/hostname", + std::fmt::format!("{}\n", ctx.system.host_name).as_bytes())?; + ctx.cmds.write("/etc/sddm.conf", b""); + Ok(()) +} +``` + +Some notes on what we just saw: + +* We see here the notion of byte strings (`b"..."`). Unlike normal strings these + don't have to be Unicode (UTF-8) encoded, though the Rune source file itself + still does. But you can use escape codes (`b"\001\003"`) to create non-UTF-8 data. +* `write` only take byte strings, if you want to write a UTF-8 string you need to use `.as_bytes()` + on that string, as can be seen for `/etc/hostname`. +* The file `sddm.conf` will end up empty here. +* `write` replaces the whole file in one go, there isn't an `append`. For patching files, see the + next section. + +## Patching a file compared to the package manager state + +Often times you want to use the standard config file but change one or two things about it. +This can be done by extracting the file from the package manager, patching it and then writing it. + +Here is a short example appending a line to a config file + +```rune +// Specifically the package manager that is responsible for general +// files (as opposed to say flatpak) +let package_manager = package_managers.files(); + +// Get the contents of /etc/default/grub, then convert it +// to a UTF-8 string (it is a Bytes by default) +let contents = String::from_utf8( + package_manager.original_file_contents("grub", "/etc/default/grub")?)?; + +// Push an extra line to it +contents.push_str("GRUB_FONT=\"/boot/grubfont.pf2\"\n"); + +// Add a command to write the file +cmds.write(file, contents.as_bytes())?; +``` + +This is a bit cumbersome, but abstractions can be built on top of this general pattern. +In fact, a few such abstractions are already provided by Konfigkoll. + +## Patching a file with LineEditor + +If you are at all familar with sed, `::patch::LineEditor` is basically a Rune/Rust variant of that. +The syntax is different though (not a terse one-liner but a bit more verbose). + +Lets look at patching the grub config again: + +```rune +use patch::LineEditor; +use patch::Action; +use patch::Selector; + +pub fn patch_grub(cmds, package_managers) { + let package_manager = package_managers.files(); + let orig = String::from_utf8(package_manager.original_file_contents(package, file)?)?; + + let editor = LineEditor::new(); + + // Replace the GRUB_CMDLINE_LINUX line with a new one + editor.add(Selector::Regex("GRUB_CMDLINE_LINUX="), + Action::RegexReplace("=\"(.*)\"$", "=\"loglevel=3 security=apparmor\"")); + + // Uncomment the GRUB_DISABLE_OS_PROBER line + editor.add(Selector::Regex("^#GRUB_DISABLE_OS_PROBER"), + Action::RegexReplace("^#", "")); + + // Add a line at the end of the file (EOF) + editor.add(Selector::Eof, + Action::InsertAfter("GRUB_FONT=\"/boot/grubfont.pf2\"")); + + // Apply the commands to the file contents and get the new file contents + let contents = editor.apply(orig); + + // Write it back + cmds.write(target_file, contents.as_bytes())?; +} +``` + +Here we can see the use of `LineEditor` to: + +* Replace a line matching a regex (and the replacement itself is a regex matching part of that line) +* Uncomment a line +* Add a line at the end of the file + +The above also seems a bit cumbersome, but see +[the cookbook](./cookbook.md#patching-files-ergonomically-with-lineeditor) +for a utility function that encapsulates this pattern. + +`LineEditor` has many more features, see the +[API documentation](https://vorpalblade.github.io/paketkoll/api/patch.module.html) +for more details. However, the general idea if that you have a `Selector` that +selects *what* lines a given rule should affect, and an `Action` that describes +*how* those lines should be changed. + +Most powerfully a selector or an action can be a function that you write, so +arbitrary complex manipulations are possible. Nested programs are also possible +to operate on multiple consecutive lines: + +```rune +// Uncomment two consecutive lines when we encounter [multilib] +// This is equivalent to /\[multilib\]/ { s/^#// ; n ; s/^#// } in sed +let sub_prog = LineEditor::new(); +sub_prog.add(Selector::All, Action::RegexReplace("^#", "")); +sub_prog.add(Selector::All, Action::NextLine); +sub_prog.add(Selector::All, Action::RegexReplace("^#", "")); + +editor.add(Selector::Regex("\\[multilib\\]"), Action::sub_program(sub_prog)); +``` + +## Patching a file via invoking an external command + +Sometimes sed line expressions don't cut it, and you don't want to write the +code in Rune, you just want to reuse an existing command. This can be done with +the `process` module to invoke an external command. This will be +[covered in the advanced section](./advanced/process.md). + +## Other file operations (permissions, mkdir, symlinks etc) + +Writing files is not all you can do, you can also: + +* Change permissions (owner, group, mode) +* Create symlinks +* Create directories + +These are all covered in the +[API documentation](https://vorpalblade.github.io/paketkoll/api/command/Commands.struct.html), +but they are relatively simple operations compared to all the variations of +writing file contents, so there will only be a short example: + +```rune +// Create a directory and make it root only access +cmds.mkdir("/etc/cni")?; +cmds.chmod("/etc/cni", 0o700)?; +// You could also write either of these and they would mean the same thing: +cmds.chmod("/etc/cni", "u=rwx")?; +cmds.chmod("/etc/cni", "u=rwx,g=,o=")?; + +// Create a directory owned by colord:colord +cmds.mkdir("/etc/colord")?; +cmds.chown("/etc/colord", "colord")?; +cmds.chgrp("/etc/colord", "colord")?; + +// Create a symlink +cmds.ln("/etc/localtime", "/usr/share/zoneinfo/Europe/Stockholm")?; +``` diff --git a/doc/src/konfigkoll/getting_started.md b/doc/src/konfigkoll/getting_started.md new file mode 100644 index 00000000..f1005794 --- /dev/null +++ b/doc/src/konfigkoll/getting_started.md @@ -0,0 +1,321 @@ +# Getting started + +## Creating a new configuration directory + +The first step is to create a new configuration directory. You can get a template +created using: + +```bash +konfigkoll -c my_conf_dir init +``` + +This will create a few skeleton files in `my_conf_dir`. It is useful to look +at what these files are: + +* `main.rn`: This is the main entry module to your configuration. You can of + course (and probably should, to keep things manageable) create additional + modules and import them here. +* `unsorted.rn`: This file will be overwritten when doing `konfigkoll save`. + The idea is that you should look at this and move the changes you want to keep + into your `main.rn` (or supporting files). +* `.gitignore`: This is a starting point for files to ignore when you check your + config into git. You are going to version control it, right? +* `files/`: `save` will put files that have changed on the system here, and there + are special commands to copy files from `files` to the system for use in your + configuration.\ + The path in `files` should *normally* be the same as the path on the system (e.g. + `files/etc/fstab`), but if you have host specific configs you can use a different + scheme (e.g. `files/etc/fstab.hostname`). + +The only hard requirements from `konfigkoll` is `main.rn` and `unsorted.rn`. `files` +also has special convenient support. The rest is just a suggestion. You can +structure your configuration however you like. + +If you are comming from [aconfmgr] this structure should feel somewhat familiar. + +## The configuration language + +The configuration language in use is [Rune], which is based on Rust when it comes +to syntax. Unlike Rust it is a dynamically typed language with reference counting, +no need to worry about borrow checking, strict types or any of the other features +that make Rust a bit of a learning curve. + +The best documentation on the language itself is [the Rune book](https://rune-rs.github.io/book/), +however for a basic configuration you won't need advanced features. + +The main config file is structured in four *phases* that are called in order. This +is done in order to speed up execution and allow file system and package scanning +to start early in the background. + +This is the basic structure of `main.rn` (don't worry, we will go through it piece +by piece below): + +```rune +/// This phase is for configuring konfigkoll itself and for system discovery. +/// You need to select which backends (pacman, apt, flatpak) to use here +/// +/// Parameters: +/// - props: A persistent properties object that the script can use to store +/// data between phases +/// - settings: Settings for konfigkoll (has methods to enable backends etc) +pub async fn phase_system_discovery(props, settings) { + // Enable backends (if you want to be generic to support multiple distros + // you would do this based on distro in use and maybe hostname) + settings.enable_pkg_backend("pacman")?; + settings.enable_pkg_backend("flatpak")?; + settings.set_file_backend("pacman")? + Ok(()) +} + +/// Here you need to configure which directories to ignore when scanning the +/// file system for changes +pub async fn phase_ignores(props, cmds) { + // Note! Some ignores are built in to konfigkoll, so you don't need to add them here: + // These are things like /dev, /proc, /sys, /home etc. See below for the full list. + + cmds.ignore_path("/var/cache")?; + cmds.ignore_path("/var/lib/flatpak")?; + cmds.ignore_path("/var/lib/pacman")?; + // ... + Ok(()) +} + +/// This is for installing any packages immediately that are later needed to be +/// *executed* by your main configuration. This should very rarely be needed. +pub async fn phase_script_dependencies(props, cmds) { + Ok(()) +} + +/// Main phase, this is where the bulk of your configration should go +/// +/// It is recommended to use the "save" sub-command to create an initial +/// `unsorted.rn` file that you can then copy the parts you want from into here. +/// +/// A tip is to use `konfigkoll -p dry-run save` the first few times to not +/// *actually* save all the files, this helps you figure out what ignores to add +/// above in `phase_ignores()` without copying a ton of files. Once you are happy +/// with the ignores, you can remove the `-p dry-run` part. +pub async fn phase_main(props, cmds, package_managers) { + Ok(()) +} +``` + +Lets look at it once piece at a time: + +### System discovery + +If you want to make your configuration generic to support multiple distros you +need to do some conditional logic based on things detected by the system. This +can vary in how refined it is. Let's say you just want to do this based on OD and +hostname, then something like this might be a good starting point + +```rune +pub async fn phase_system_discovery(props, settings) { + let sysinfo = sysinfo::SysInfo::new(); + let os_id = sysinfo.os_id(); + let host_name = sysinfo.host_name()?; + + println!("Configuring for host {} (distro: {})", host_name, os_id); + + // We need to enable the backends that we want to use + match os_id { + "arch" => { + settings.enable_pkg_backend("pacman")?; + settings.set_file_backend("pacman")? + } + "debian" => { + settings.enable_pkg_backend("apt")?; + settings.set_file_backend("apt")? + } + "ubuntu" => { + settings.enable_pkg_backend("apt")?; + settings.set_file_backend("apt")? + } + _ => return Err("Unsupported OS")?, + } + + match host_name { + "mydesktop" => { + settings.enable_pkg_backend("flatpak")?; + } + "myserver" => { + // This doesn't have flatpak + } + } + + Ok(()) +} +``` + +Some Rune language features of interest here: + +* The `match` statement. This is like a `case` or `switch` statement in many + other languages. +* The use of `?` to propagate errors. This is a common pattern in Rust and Rune, + and is used instead of exceptions that some other languages uses. Basically it + means "if this is a `Result::Error`, abort the function and propagate the error to the + caller". +* The use of `Result` is also why the function has a finak `Ok(())` at the end. This + is because the function needs to return a `Result` type, and `Ok(())` is a way to + return a successful result with no value. +* Why `()` you might ask? Well, `()` is an empty tuple, and is used in Rust and + Rune to represent "no value". This is a bit different from many other languages + where `void` or `None` is used for this purpose. +* You might expect to see `return Ok(());` instead of `Ok(())`, but in Rust and Rune + the `return` keyword is optional if it is the final expression in the function. +* `println!` is a macro that prints to stdout. It is similar to `printf` in C or + `console.log` in JavaScript. The `!` is a special syntax for macros in Rust and Rune + (and the reason it is a macro and not a function isn't really important here). + +The other thing you might want to do in this phase is to set properties that you +can then refer back to later. For example, you might want to abstract away checks +like "install video editing software if this is one of these two computers" by +setting a property in this phase and then checking it in the main phase instead +of having checks for which specific hosts to install on everywhere. This makes it +easier should you add yet another computer (fewer places to update in). + +To support this `props` can be used: + +```rune +pub async fn phase_system_discovery(props, settings) { + // ... + props.set("tasks.videoediting", true); + // ... + Ok(()) +} + +pub async fn phase_main(props, settings) { + // ... + if props.get("tasks.videoediting") { + // Install kdenlive and some other things + } + // ... + Ok(()) +} +``` + +Props is a simple key-value store that is persisted between phases. You can use +it however you want. It is basically a `HashMap` where `Value` can +be any type. + +Even if you only have a single if statement for a particular property, it can be +*cleaner* to separate out the checking for hardware and host name from the actual +installations. This is especially true as the configuration grows. + +### Ignoring files + +The next phase is to ignore files that you don't want to track. This is absolutely +required, as there is a bunch of things (especially in `/var`) that aren't managed +by the package manager. In fact `/var` is awkward since there also *are* managed +things under it. As such the ignore section grows long, it can be a good idea +to put this into a separate file and include it. Let's look at how that would be +done: + +Your main.rn could look like this: + +```rune +mod ignores; + +// System discovery goes here still + +/// Ignored paths +pub async fn phase_ignores(props, cmds) { + ignores::ignores(props, cmds)? + Ok(()) +} + +// The other later phases +``` + +In `ignores.rn` you would then have: + +```rune +pub fn ignores(props, cmds) { + cmds.ignore_path("/var/cache")?; + cmds.ignore_path("/var/lib/flatpak")?; + cmds.ignore_path("/var/lib/pacman")?; + // ... + Ok(()) +} +``` + +The key here is the use of the `mod` keyword to declare another module in the +same directory. This is similar to how you would do it in Rust, and is a way to +split up your configuration into multiple files. + +You can also create nested sub-modules, which is covered in a later section of +the manual. + +### Script dependencies + +You probably won't need this phase, but it is there if you do. If you need to call +out *from your configuration* to a program that isn't installed by default on a +clean system, you should put it here. For example: + +```rune +pub fn phase_script_dependencies(props, cmds) { + // We use patch in the main phase to apply some diff files to a package + cmds.add_pkg("pacman", "patch")?; + Ok(()) +} +``` + +We can see here how to add a package, but this will be covered in more details +in the documentation of the main phase. + +### The main phase + +This is the bread and butter of your configuration. This is where you will do +most of your work. This is where you will install packages, copy files, patch +configurations, etc. + +Lets look at the signature again: + +```rune +pub async fn phase_main(props, cmds, package_managers) { + + Ok(()) +} +``` + +This takes three parameters: + +* `props` we already know, it is the key value store introduced in + [the system discovery phase](#system-discovery). +* `cmds` we have seen (for how to add ignores for example) but it hasn't been + covered in detail, we will get to that now. +* `package_managers` is new, and is your interface to query for what the + *original* contents of a file is. That is, before you changed it. This can be + used to apply small changes such as "I want the stock `/etc/nanorc`, but + uncomment just this one line". + +In fact, let's dwell a bit more on that last bullet point. That (apart from +wholesale copying and replacing configuration files) is the main approach to +configuration management in `konfigkoll`. + +This means you don't have to merge `.pacnew` or `.dpkg-dist` files any more, just +reapply your config: it will apply the same change to the new version of the config. +Of course, it is possible the config has changed *drastically*, in which case you +still have to intervene manually, but almost always that isn't the case. + +Now lets look at the `cmds` parameter. This is where you describe your configuration. +It builds up a list of *actions* internally that will then be compared to the system +at the end by konfigkoll. That comparison is then used to either apply changes to the +system or save missing actions to `unsorted.rn`. + +The brunt of how this works in covered in the next two chapters (to prevent this +section getting far too long): + +* [Managing package](packages.md) +* [Managing files](files.md) + +There are also some speciality topics that are covered in a later chapter: + +* [Systemd (and other integrations)](integrations/README.md) +* There are examples of how to solve specific things in the [cookbook](cookbook.md) chapter. + +There are also plans to publish a complete (but sanitised from sensitive info) +example configuration in the future, this is not yet done. + +[aconfmgr]: https://github.com/CyberShadow/aconfmgr +[Rune]: https://rune-rs.github.io/ diff --git a/doc/src/konfigkoll/installation.md b/doc/src/konfigkoll/installation.md new file mode 100644 index 00000000..63cad973 --- /dev/null +++ b/doc/src/konfigkoll/installation.md @@ -0,0 +1,51 @@ +# Installation + +The preferred method of installing konfigkoll is via your package manager. +For Arch Linux it is available in [AUR](https://aur.archlinux.org/packages/konfigkoll/). + +For other systems you will currently have to download the binary from GitHub releases +or build it yourself. The way to build it yourself is from the [git repository], +`cargo install` from crates.io is not recommended (it will work, but you won't get +shell completion nor man pages). + +There are three binaries of interest: + +* `konfigkoll` - The main binary that will apply and save your configuration. +* `konfigkoll-rune` - This provides LSP language server for the scripting language + ([Rune]) used in konfigkoll. as well as some generic Rune utilities (such as + auto-formatting code, though that has limitations currently). +* `paketkoll` - A query tool similar to `debsums`. Parts of it's code is also + used in konfigkoll, and as such they are maintained in the same git repository. + +To build from source: + +```bash +git clone https://github.com/VorpalBlade/paketkoll \ + --branch konfigkoll-v0.1.0 # Replace with whatever the current version is +cd paketkoll + +# Use one of these: +make install-konfigkoll +make install-paketkoll + +# Or use this if you want both +make install +``` + +You can also select which features to build with, for example to skip the Arch Linux or Debian backends: + +```bash +make install CARGO_FLAGS='--no-default-features --features debian,arch_linux,json,vendored' +# CARGO_FLAGS also work with the other install targets of course +``` + +Remove features from the comma separated list that you don't want. The features are: + +* `arch_linux` - Pacman support +* `debian` - Dpkg/Apt support +* `json` - JSON output support (only relevant for paketkoll) +* `vendored` - Use static libraries instead of linking to dynamic libraries on the host. + This affects compression libraries currently, and not all compression libraries are in use + for all distros. Currently, this only affects liblzma and libbz2 (both only needed on Debian). + +[Rune]: https://rune-rs.github.io/ diff --git a/doc/src/konfigkoll/integrations/README.md b/doc/src/konfigkoll/integrations/README.md new file mode 100644 index 00000000..a3931f01 --- /dev/null +++ b/doc/src/konfigkoll/integrations/README.md @@ -0,0 +1,6 @@ +# Integrations (passwd, systemd, etc) + +Konfigkoll has various convenient integrations for common system files and +services. + +See the various sub-pages in this chapter for more details. diff --git a/doc/src/konfigkoll/integrations/passwd.md b/doc/src/konfigkoll/integrations/passwd.md new file mode 100644 index 00000000..dd0fd9a3 --- /dev/null +++ b/doc/src/konfigkoll/integrations/passwd.md @@ -0,0 +1,151 @@ +# Managing /etc/passwd, /etc/group and shadow files + +Konfigkoll has special support for managing `/etc/passwd`, `/etc/group` and +`/etc/shadow`. This is because these files contain contents from multiple +sources (various packages add their own users) and it is difficult to manage +these otherwise. + +The interface to this is the `::passwd::Passwd` type +([API docs](https://vorpalblade.github.io/paketkoll/api/passwd.module.html)). + +Typically, you would: + +* Create an instance of `::passwd::Passwd` early in the main phase +* Add things to it as needed (next to the associated packages) +* Apply it at the end of the main phase + +A rough example (we will break it into chunks down below): + +```rune +// Mappings for the IDs that systemd auto-assigns inconsistently from computer to computer +const USER_MAPPING = [("systemd-journald", 900), /* ... */] +const GROUP_MAPPING = [("systemd-journald", 900), /* ... */] + +pub async fn phase_main(props, cmds, package_managers) { + let passwd = passwd::Passwd::new(USER_MAPPING, GROUP_MAPPING)?; + + let files = package_managers.files(); + // These two files MUST come first as other files later on refer to them, + // and we are not order independent (unlike the real sysusers.d). + passwd.add_from_sysusers(files, "systemd", "/usr/lib/sysusers.d/basic.conf")?; + passwd.add_from_sysusers(files, "filesystem", "/usr/lib/sysusers.d/arch.conf")?; + + // Various other packages and other changes ... + passwd.add_from_sysusers(files, "dbus", "/usr/lib/sysusers.d/dbus.conf")?; + // ... + + // Give root a login shell, we don't want the default /usr/bin/nologin! + passwd.update_user("root", |user| { + user.shell = "/bin/zsh"; + user + }); + + // Add human user + let me = passwd::User::new(1000, "me", "me", ""); + me.shell = "/bin/zsh"; + me.home = "/home/me"; + passwd.add_user_with_group(me); + passwd.add_user_to_groups("me", ["wheel", "optical", "uucp", "users"]); + + + // Don't store passwords in your git repo, load them from the system instead + passwd.passwd_from_system(["me", "root"]); + + // Deal with the IDs not matching (because the mappings were created + // before konfigkoll was in use for example) + passwd.align_ids_with_system()?; + + // Apply changes + passwd.apply(cmds)?; +} +``` + +## `USER_MAPPING` and `GROUP_MAPPING` + +First up, there is special support for systemd's `/usr/lib/sysusers.d/` files. +These often don't declare the specific user/group IDs, but instead auto-assign them. + +This creates a bit of chaos between computers and there is no auto-assign logic +in Konfigkoll (yet?). To solve both of these issues we need to declare which +IDs we want for the auto-assigned IDs if we are to use `sysusers.d`-integration. + +That is what the `USER_MAPPING` and `GROUP_MAPPING` constants are for. + +## General workflow + +The idea is (as stated above) to create *one* instance of `Passwd`, update it +as you go along, and then write out the result at the end: + +```rune +let passwd = passwd::Passwd::new(USER_MAPPING, GROUP_MAPPING)?; + +// Do stuff + +passwd.apply(cmds)?; +``` + +Now, what about the "stuff" you can "do"? + +### Adding a system user / group + +The easiest option (when available) is `passwd.add_from_sysusers`. Arch Linux +uses this for (almost?) all users created by packages. Debian however doesn't. + +If there *isn't* a corresponding sysusers file to add you need to create the user +yourself. This will be pretty much like the example of adding a human user below. + +### Patching a user or group + +Sometimes you need to make changes to a user or group created by sysusers. This +can be done by passing a function to `passwd.update_user` or `passwd.update_group`. + +```rune +// Give root a login shell, we don't want the default /usr/bin/nologin! +passwd.update_user("root", |user| { + user.shell = "/bin/zsh"; + user +}); +``` + +The `|...| { code }` syntax is a *closure*, a way to declare an inline function +that you can pass to another function. The bits between the `|` are the parameters +that the function takes. + +### Adding a human user + +There isn't *too* much code needed for this (and remember, you could always create a utility +function if you need this a lot): + +```rune +// Add human user +let me = passwd::User::new(1000, "me", "me", ""); +me.shell = "/bin/zsh"; +me.home = "/home/me"; + +// Add them to the passwd database (and automatically create a corresponding group) +passwd.add_user_with_group(me); + +// Add the user to some extra groups as well +passwd.add_user_to_groups("me", ["wheel", "optical", "uucp", "users"]); +``` + +### Passwords + +What about setting the password? Well, it isn't good practise to store those passwords +in your git repository. Instead, you can read them from the system: + +```rune +passwd.passwd_from_system(["me", "root"]); +``` + +This will make `me` and `root` have whatever password hashes they current already +have on the system. + +### IDs not matching + +If you already have several computers before starting with konfigkoll, chances +are the user and group IDs don't match up. This can be fixed with `passwd.align_ids_with_system`. +This will copy the IDs *from* the system so they match up. + +Of course the assignment of IDs on now your computers won't match, but the users +and groups will match whatever IDs are on the local file system. diff --git a/doc/src/konfigkoll/integrations/sysinfo.md b/doc/src/konfigkoll/integrations/sysinfo.md new file mode 100644 index 00000000..5b34f80a --- /dev/null +++ b/doc/src/konfigkoll/integrations/sysinfo.md @@ -0,0 +1,14 @@ +# Getting system information + +Getting information about the system (host name, distro, architecture, hardware, +etc) is important in order to make a robust config for multiple computers. + +For example, rather than listing exactly which computers should have `intel-ucode` +installed for the microcode firmware, you can look at the CPU vendor and determine +if it should have Intel or AMD microcode. + +Konfigkoll exposes this via the `sysinfo` module +([API docs](https://vorpalblade.github.io/paketkoll/api/sysinfo.module.html)). + +Currently, this is a bit of work in progress and the API is likely to be expanded, +in particular around detecting PCI devices (GPUs etc). diff --git a/doc/src/konfigkoll/integrations/systemd_units.md b/doc/src/konfigkoll/integrations/systemd_units.md new file mode 100644 index 00000000..ec9f1778 --- /dev/null +++ b/doc/src/konfigkoll/integrations/systemd_units.md @@ -0,0 +1,76 @@ +# Systemd units + +Konfigkoll has special support for enabling and masking systemd units. This +simplifies what would otherwise be a bunch of `cmds.ln()` calls. In particular, +it will handle aliases, WantedBy, etc correctly + +## Enabling units from packages + +The basic form is: + +```rune +systemd::Unit::from_pkg("gpm", + "gpm.service", + package_managers.files()) + .enable(cmds)?; +``` + +This will load the unit file from the package manager and figure out what symlinks +needs to be created to enable the unit. + +Some units are parametersised, this can be handled by using the `name` method: + +```rune +systemd::Unit::from_pkg("systemd", + "getty@.service", + package_managers.files()) + .name("getty@tty1.service") + .enable(cmds)?; +``` + +User units can also be enabled. This enables user units globally (`/etc/systemd/user`), +not per-user: + +```rune +systemd::Unit::from_pkg("xdg-user-dirs", + "xdg-user-dirs-update.service", + package_managers.files()) + .user() + .enable(cmds)?; +``` + +You can skip automatically installing `WantedBy` symlinks by using: + +```rune +systemd::Unit::from_pkg("avahi", + "avahi-daemon.service", + package_managers.files()) + .skip_wanted_by() + .enable(cmds)?; +``` + +A similar option is also available for `Alias`. + +## Enabling custom units + +If you have a unit you install yourself that doesn't come from a package you +can do this: + +```rune +cmds.copy("/etc/systemd/system/kdump.service")?; +systemd::Unit::from_file("/etc/systemd/system/kdump.service", cmds)? + .enable(cmds)?; +``` + +All the other options described in the previous section are also available for +these types of units. + +## Caveats + +While `WantedBy` and `Alias` are handled correctly, `Also` is not processed, +if you want such units you have to add them manually. The reason is that these +could come from a different package, and we don't know which one. + +We could find out for installed packages, but what if it is from a package that +isn't yet installed? This can happen since we build the configuration first, then +install packages. diff --git a/doc/src/konfigkoll/limitations.md b/doc/src/konfigkoll/limitations.md new file mode 100644 index 00000000..03ad357f --- /dev/null +++ b/doc/src/konfigkoll/limitations.md @@ -0,0 +1,59 @@ +# Limitations + +This chapter documents some known limitations of Konfigkoll. + +Also consider checking the +[issue tracker on GitHub](https://github.com/VorpalBlade/paketkoll/issues) +for more potential limitations. + +## Limitations due to underlying distro + +### Debian + +On Debian, `apt`/`dpkg` doesn't provide a lot of information about the files installed by a package. +In fact it only provides the MD5 sum of regular files and the list of non-regular files (without info +about what *type* of non-regular file they are). This means that unlike Arch Linux: + +* We won't be able to tell if the mode/owner/group is wrong on a file. +* `--trust-mtime` doesn't work (we have to checksum every file). + +I have plans how to work around some of these limitations in the future. + +Debian is, unlike Arch Linux, not yet fully systemd-ified. This means that some +of the integrations (like enabling systemd services) are less useful. Debian +support is *currently work in progress* and solution for this will be designed +at later point in time. + +## Limitations due to not yet being implemented + +* Certain errors can be delayed from when they happen to they are reported. + This happens because of the async runtime in use (tokio) and how it handles + (or rather not handles) cancelling synchronous background tasks. +* Some of the exposed API is work in progress: + * Sysinfo PCI devices is the most notable example. + * The process API is also not fully fleshed out (no way to provide stdin + to child processes). + * The regex API is rather limited, and will have to be fully redesigned using + a lower level Rust crate at some point. +* There are plans to do privilege separation like aconfmgr does. This is not yet + implemented. +* There is not yet support for creating FIFOs, device nodes etc. Or rather, there is, + it just isn't hooked up to the scripting language yet (nor tested). + +## Things that won't get implemented (probably) + +This is primarily a comparison with aconfmgr, as that is the closest thing to +Konfigkoll that exists. + +* Aconfmgr has special support for *some* AUR helpers. Konfigkoll doesn't. + * For a start, I use [aurutils] which works differently than the helpers + aconfmgr supports in that it uses a custom repository. The main purpose + of the aconfmgr integration is to work around the lack of such custom + repositories. + * It would be very Arch Linux specific, and it would be hard to abstract + over this in a way that would be useful for other distros. The reason + Konfigkoll exists is to let me manage my Debian systems in the same way + as my Arch Linux systems, so this is not a priority. That Konfigkoll is + also much faster is a nice bonus. + +[aurutils]: https://github.com/aurutils/aurutils diff --git a/doc/src/konfigkoll/packages.md b/doc/src/konfigkoll/packages.md new file mode 100644 index 00000000..0bc7765b --- /dev/null +++ b/doc/src/konfigkoll/packages.md @@ -0,0 +1,69 @@ +# Managing packages + +> This assumes you have read [Getting started](./getting_started.md) before. +> This chapter builds directly on that, specifically the +> [section about the main phase](./getting_started.md#the-main-phase) (which in +> turn builds on earlier sections of that chapter). + +## Commands: Installing packages + +As noted in the previous chapter, the key type for describing the system configuration +is `Commands`. This includes installing packages. Let's look at a short example: + +```rune +pub async fn phase_main(props, cmds, package_managers) { + cmds.add_pkg("pacman", "linux")?; + cmds.add_pkg("pacman", "linux-firmware")?; + + Ok(()) +} +``` + +This says that the packages `linux` and `linux-firmware` should be installed +*if* the package manager `pacman` is enabled. + +There are two things of note here: + +* Konfigkoll ignores instructions to install packages for non-enabled package + managers. This allows sharing a config across distros more easily. +* The above example actually says that *only* `linux` and `linux-firmware` + should be installed. Any package that isn't explicitly mentioned (or a + dependency of an explicitly mentioned package) will be removed. As such, you + need to list all packages you want to keep. + +There is also a `cmds.remove_pkg`. You probably don't want to use it (since +all unmentioned packages are removed), the main purpose of it as a marker in +`unsorted.rn` to tell you that a package is removed on the system compared to +your configuration. + +## Optional dependencies + +Since Konfigkoll wants you to list all packages you want to keep (except for +their dependencies which are automatically included), what about optional dependencies? + +The answer is that you need to list them too, Konfigkoll (like aconfmgr) doesn't +consider optional dependencies for the purpose of keeping packages installed. + +> **Note**: This is true for Arch Linux. For Debian the situation is *currently* +> different, but likely to change in the future to match that of Arch Linux. +> Debian support is currently highly experimental. + +## Note about early packages + +As mentioned in the [previous chapter](./getting_started.md#script-dependencies) +you can use `phase_script_dependencies` to install packages that are needed by +the script itself during the main phase. The syntax (`cmds.add_pkg`) is identical +to the main phase. + +## Package manager specific notes + +Not all package managers are created equal, and konfigkoll tries to abstract +over them. Sometimes details leak through though. Here are some notes on +those leaks. + +### Flatpak + +Flatpak doesn't really have the notion of manual installed packages vs dependencies. +Instead, it has the notion of "applications" and "runtimes". That means you cannot +yourself set a package as explicit/implicit installed. Konfigkoll maps "runtimes" +to dependency and "applications" to explicit packages. diff --git a/release-plz.toml b/release-plz.toml index d05eb2c6..d1092e9c 100644 --- a/release-plz.toml +++ b/release-plz.toml @@ -2,20 +2,23 @@ git_release_enable = false [[package]] -name = "mtree2" +name = "xtask" +release = false [[package]] name = "paketkoll_core" publish_features = ["arch_linux", "debian", "serde"] +# We only want GH releases for the binaries [[package]] -name = "paketkoll" -# We only want GH releases for the binary +name = "konfigkoll" git_release_enable = true git_release_type = "auto" [[package]] -name = "systemd_tmpfiles" +name = "paketkoll" +git_release_enable = true +git_release_type = "auto" [changelog] body = """ @@ -37,7 +40,8 @@ header = """ # Changelog\n All notable changes to this project will be documented in this file. Keep in mind that this is only updated when releases are made and the file -is generated automatically from commit messages.\n +is generated automatically from commit messages (and may or may not be lightly +edited).\n For a possibly more edited message focused on the binary please see the github releases.\n """ diff --git a/site/index.html b/site/index.html new file mode 100644 index 00000000..5358ea53 --- /dev/null +++ b/site/index.html @@ -0,0 +1,14 @@ + + + + + Paketkoll and konfigkoll + + + + + +

Please refer to the documentation for paketkoll and konfigkoll

+ + + \ No newline at end of file