diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 00000000..02a37a46 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,3 @@ +target/ +.env +*.md \ No newline at end of file diff --git a/.github/workflows/main-cache.yaml b/.github/workflows/main-cache.yaml new file mode 100644 index 00000000..70a2beae --- /dev/null +++ b/.github/workflows/main-cache.yaml @@ -0,0 +1,77 @@ +# See https://stackoverflow.com/questions/75951150/why-cant-my-cache-action-triggered-inside-a-pull-request-not-access-the-cache-t + +name: Post Merge Cache Building on Master Branch +on: + push: + branches: [master] + workflow_dispatch: +jobs: + fmt: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: dtolnay/rust-toolchain@stable + with: + components: rustfmt + - uses: Swatinem/rust-cache@v2 + id: rust-cache + - name: Check for cache hit + run: echo "cache-hit=${{ steps.rust-cache.outputs.cache-hit }}" + - if: ${{ steps.rust-cache.outputs.cache-hit == 'false' }} + run: cargo fmt --check + + check: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: dtolnay/rust-toolchain@stable + - uses: Swatinem/rust-cache@v2 + id: rust-cache + - name: Check for cache hit + run: echo "cache-hit=${{ steps.rust-cache.outputs.cache-hit }}" + - if: ${{ steps.rust-cache.outputs.cache-hit == 'false' }} + name: Install protoc + run: sudo apt-get update && sudo apt-get install -y protobuf-compiler + - if: ${{ steps.rust-cache.outputs.cache-hit == 'false' }} + run: cargo check + + clippy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: dtolnay/rust-toolchain@stable + with: + components: clippy + - uses: Swatinem/rust-cache@v2 + id: rust-cache + - name: Check for cache hit + run: echo "cache-hit=${{ steps.rust-cache.outputs.cache-hit }}" + - if: ${{ steps.rust-cache.outputs.cache-hit == 'false' }} + name: Install protoc + run: sudo apt-get update && sudo apt-get install -y protobuf-compiler + - if: ${{ steps.rust-cache.outputs.cache-hit == 'false' }} + run: cargo clippy + + test: + runs-on: ubuntu-latest + steps: + - name: Remove cached stuff + run: | + sudo rm -rf /usr/share/dotnet + sudo rm -rf /opt/ghc + sudo rm -rf "/usr/local/share/boost" + sudo rm -rf "$AGENT_TOOLSDIRECTORY" + - uses: actions/checkout@v3 + - uses: dtolnay/rust-toolchain@stable + - uses: Swatinem/rust-cache@v2 + id: rust-cache + - name: Check for cache hit + run: echo "cache-hit=${{ steps.rust-cache.outputs.cache-hit }}" + - if: ${{ steps.rust-cache.outputs.cache-hit == 'false' }} + name: Install protoc + run: sudo apt-get update && sudo apt-get install -y protobuf-compiler + - if: ${{ steps.rust-cache.outputs.cache-hit == 'false' }} + name: Install latest nextest release + uses: taiki-e/install-action@nextest + - if: ${{ steps.rust-cache.outputs.cache-hit == 'false' }} + run: cargo nextest run --all-features diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml new file mode 100644 index 00000000..8bf6e8f7 --- /dev/null +++ b/.github/workflows/tests.yaml @@ -0,0 +1,64 @@ +on: pull_request + +jobs: + fmt: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: dtolnay/rust-toolchain@stable + with: + components: rustfmt + - uses: Swatinem/rust-cache@v2 + id: rust-cache + - name: Check for cache hit + run: echo "cache-hit=${{ steps.rust-cache.outputs.cache-hit }}" + - run: cargo fmt --check + + check: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Install protoc + run: sudo apt-get update && sudo apt-get install -y protobuf-compiler + - uses: dtolnay/rust-toolchain@stable + - uses: Swatinem/rust-cache@v2 + id: rust-cache + - name: Check for cache hit + run: echo "cache-hit=${{ steps.rust-cache.outputs.cache-hit }}" + - run: cargo check + + clippy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Install protoc + run: sudo apt-get update && sudo apt-get install -y protobuf-compiler + - uses: dtolnay/rust-toolchain@stable + with: + components: clippy + - uses: Swatinem/rust-cache@v2 + id: rust-cache + - name: Check for cache hit + run: echo "cache-hit=${{ steps.rust-cache.outputs.cache-hit }}" + - run: cargo clippy + + test: + runs-on: ubuntu-latest + steps: + - name: Remove cached stuff + run: | + sudo rm -rf /usr/share/dotnet + sudo rm -rf /opt/ghc + sudo rm -rf "/usr/local/share/boost" + sudo rm -rf "$AGENT_TOOLSDIRECTORY" + - uses: actions/checkout@v3 + - name: Install protoc + run: sudo apt-get update && sudo apt-get install -y protobuf-compiler + - uses: dtolnay/rust-toolchain@stable + - uses: Swatinem/rust-cache@v2 + id: rust-cache + - name: Check for cache hit + run: echo "cache-hit=${{ steps.rust-cache.outputs.cache-hit }}" + - name: Install latest nextest release + uses: taiki-e/install-action@nextest + - run: cargo nextest run --all-features diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..74a89848 --- /dev/null +++ b/.gitignore @@ -0,0 +1,205 @@ +package-lock.json + +# Json files +connection_oauth_definition.json + +# Editor : JetBrains +.idea/ + +# Editor : VS Code +.vscode/ + +# Created by https://www.toptal.com/developers/gitignore/api/node +# Edit at https://www.toptal.com/developers/gitignore?templates=node + +### Node ### +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +lerna-debug.log* + +# Diagnostic reports (https://nodejs.org/api/report.html) +report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json + +# Runtime data +pids +*.pid +*.seed +*.pid.lock + + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage +*.lcov + +# nyc test coverage +.nyc_output + +# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# Bower dependency directory (https://bower.io/) +bower_components + +# node-waf configuration +.lock-wscript + +# Compiled binary addons (https://nodejs.org/api/addons.html) +build/Release + +# Dependency directories +node_modules/ +jspm_packages/ + +# TypeScript v1 declaration files +typings/ + +# TypeScript cache +*.tsbuildinfo + +# Optional npm cache directory +.npm + +# Optional eslint cache +.eslintcache + +# Microbundle cache +.rpt2_cache/ +.rts2_cache_cjs/ +.rts2_cache_es/ +.rts2_cache_umd/ + +# Optional REPL history +.node_repl_history + +# Output of 'npm pack' +*.tgz + +# Yarn Integrity file +.yarn-integrity + +# dotenv environment variables file +.env +.env.test + +# parcel-bundler cache (https://parceljs.org/) +.cache + +# Next.js build output +.next + +# Nuxt.js build / generate output +.nuxt +dist + +# Gatsby files +.cache/ +# Comment in the public line in if your project uses Gatsby and not Next.js +# https://nextjs.org/blog/next-9-1#public-directory-support +# public + +# vuepress build output +.vuepress/dist + +# Serverless directories +.serverless/ + +# FuseBox cache +.fusebox/ + +# DynamoDB Local files +.dynamodb/ + +# TernJS port file +.tern-port + +# Stores VSCode versions used for testing VSCode extensions +.vscode-test + +# End of https://www.toptal.com/developers/gitignore/api/node + +# Created by https://www.toptal.com/developers/gitignore/api/macos +# Edit at https://www.toptal.com/developers/gitignore?templates=macos + +### macOS ### +# General +.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two +Icon + + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +### macOS Patch ### +# iCloud generated files +*.icloud + +# End of https://www.toptal.com/developers/gitignore/api/macos + +# Created by https://www.toptal.com/developers/gitignore/api/windows +# Edit at https://www.toptal.com/developers/gitignore?templates=windows + +### Windows ### +# Windows thumbnail cache files +Thumbs.db +Thumbs.db:encryptable +ehthumbs.db +ehthumbs_vista.db + +# Dump file +*.stackdump + +# Folder config file +[Dd]esktop.ini + +# Recycle Bin used on file shares +$RECYCLE.BIN/ + +# Windows Installer files +*.cab +*.msi +*.msix +*.msm +*.msp + +# Windows shortcuts +*.lnk + +# End of https://www.toptal.com/developers/gitignore/api/windows + +#Added by cargo + +target/ + +.pnp.* +.yarn/ + +*.node + +*/generated_test_output/* diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 00000000..219c6e84 --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,4023 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "addr2line" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "aes" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" +dependencies = [ + "cfg-if", + "cipher", + "cpufeatures", +] + +[[package]] +name = "ahash" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" +dependencies = [ + "cfg-if", + "getrandom", + "once_cell", + "version_check", + "zerocopy", +] + +[[package]] +name = "aho-corasick" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" +dependencies = [ + "memchr", +] + +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anes" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" + +[[package]] +name = "anstyle" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8901269c6307e8d93993578286ac0edf7f195079ffff5ebdeea6a59ffb7e36bc" + +[[package]] +name = "anyhow" +version = "1.0.80" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ad32ce52e4161730f7098c077cd2ed6229b5804ccf99e5366be1ab72a98b4e1" + +[[package]] +name = "api" +version = "0.1.0" +dependencies = [ + "anyhow", + "axum", + "bson", + "chrono", + "convert_case 0.6.0", + "dotenvy", + "envconfig", + "event-core", + "fake", + "futures", + "futures-util", + "gateway", + "handlebars", + "http", + "http-serde-ext", + "hyper", + "indexmap 2.2.5", + "integrationos-domain", + "jsonwebtoken", + "mockito", + "moka", + "mongodb", + "num_cpus", + "openapiv3", + "rand", + "redis-retry", + "reqwest", + "segment", + "semver 1.0.22", + "serde", + "serde_json", + "strum", + "testcontainers-modules", + "tokio", + "tower", + "tower-http", + "tracing", + "tracing-subscriber", + "uuid", + "validator", +] + +[[package]] +name = "arc-swap" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b3d0060af21e8d11a926981cc00c6c1541aa91dd64b9f881985c3da1094425f" + +[[package]] +name = "assert-json-diff" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47e4f2b81832e72834d7518d8487a0396a28cc408186a2e8854c0f98011faf12" +dependencies = [ + "serde", + "serde_json", +] + +[[package]] +name = "async-lock" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "287272293e9d8c41773cec55e365490fe034813a2f172f502d6ddcf75b2f582b" +dependencies = [ + "event-listener", +] + +[[package]] +name = "async-recursion" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5fd55a5ba1179988837d24ab4c7cc8ed6efdeff578ede0416b4225a5fca35bd0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "async-trait" +version = "0.1.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c980ee35e870bd1a4d2c8294d4c04d0499e67bca1e4b5cefcc693c2fa00caea9" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "atomic" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c59bdb34bc650a32731b31bd8f0829cc15d24a708ee31559e0bb34f2bc320cba" + +[[package]] +name = "autocfg" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" + +[[package]] +name = "axum" +version = "0.6.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf" +dependencies = [ + "async-trait", + "axum-core", + "bitflags 1.3.2", + "bytes", + "futures-util", + "http", + "http-body", + "hyper", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tower", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-core" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" +dependencies = [ + "async-trait", + "bytes", + "futures-util", + "http", + "http-body", + "mime", + "rustversion", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-macros" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdca6a10ecad987bda04e95606ef85a5417dcaac1a78455242d72e031e2b6b62" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "axum-prometheus" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97def327c5481791abb57ac295bfc70f2e1a0727675b7dbf74bd1b27a72b6fd8" +dependencies = [ + "axum", + "axum-core", + "bytes", + "futures", + "futures-core", + "http", + "http-body", + "matchit", + "metrics", + "metrics-exporter-prometheus", + "once_cell", + "pin-project", + "tokio", + "tower", + "tower-http", +] + +[[package]] +name = "backtrace" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837" +dependencies = [ + "addr2line", + "cc", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", +] + +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + +[[package]] +name = "base64ct" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf" + +[[package]] +name = "bitvec" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" +dependencies = [ + "funty", + "radium", + "tap", + "wyz", +] + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "bollard-stubs" +version = "1.42.0-rc.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed59b5c00048f48d7af971b71f800fdf23e858844a6f9e4d32ca72e9399e7864" +dependencies = [ + "serde", + "serde_with", +] + +[[package]] +name = "bson" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce21468c1c9c154a85696bb25c20582511438edb6ad67f846ba1378ffdd80222" +dependencies = [ + "ahash", + "base64 0.13.1", + "bitvec", + "hex", + "indexmap 2.2.5", + "js-sys", + "once_cell", + "rand", + "serde", + "serde_bytes", + "serde_json", + "time", + "uuid", +] + +[[package]] +name = "bumpalo" +version = "3.15.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ff69b9dd49fd426c69a0db9fc04dd934cdb6645ff000864d98f7e2af8830eaa" + +[[package]] +name = "bytecount" +version = "0.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1e5f035d16fc623ae5f74981db80a439803888314e3a555fd6f04acd51a3205" + +[[package]] +name = "bytes" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" + +[[package]] +name = "camino" +version = "1.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c59e92b5a388f549b863a7bea62612c09f24c8393560709a54558a9abdfb3b9c" +dependencies = [ + "serde", +] + +[[package]] +name = "cargo-platform" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "694c8807f2ae16faecc43dc17d74b3eb042482789fd0eb64b39a2e04e087053f" +dependencies = [ + "serde", +] + +[[package]] +name = "cargo_metadata" +version = "0.14.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4acbb09d9ee8e23699b9634375c72795d095bf268439da88562cf9b501f181fa" +dependencies = [ + "camino", + "cargo-platform", + "semver 1.0.22", + "serde", + "serde_json", +] + +[[package]] +name = "cast" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" + +[[package]] +name = "cc" +version = "1.0.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8cd6604a82acf3039f1144f54b8eb34e91ffba622051189e71b781822d5ee1f5" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chrono" +version = "0.4.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eaf5903dcbc0a39312feb77df2ff4c76387d591b9fc7b04a238dcf8bb62639a" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "js-sys", + "num-traits", + "serde", + "wasm-bindgen", + "windows-targets 0.52.4", +] + +[[package]] +name = "ciborium" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42e69ffd6f0917f5c029256a24d0161db17cea3997d185db0d35926308770f0e" +dependencies = [ + "ciborium-io", + "ciborium-ll", + "serde", +] + +[[package]] +name = "ciborium-io" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05afea1e0a06c9be33d539b876f1ce3692f4afea2cb41f740e7743225ed1c757" + +[[package]] +name = "ciborium-ll" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9" +dependencies = [ + "ciborium-io", + "half", +] + +[[package]] +name = "cipher" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" +dependencies = [ + "crypto-common", + "inout", +] + +[[package]] +name = "clap" +version = "4.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b230ab84b0ffdf890d5a10abdbc8b83ae1c4918275daea1ab8801f71536b2651" +dependencies = [ + "clap_builder", +] + +[[package]] +name = "clap_builder" +version = "4.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae129e2e766ae0ec03484e609954119f123cc1fe650337e155d03b022f24f7b4" +dependencies = [ + "anstyle", + "clap_lex", +] + +[[package]] +name = "clap_lex" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce" + +[[package]] +name = "colored" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cbf2150cce219b664a8a70df7a1f933836724b503f8a413af9365b4dcc4d90b8" +dependencies = [ + "lazy_static", + "windows-sys 0.48.0", +] + +[[package]] +name = "combine" +version = "4.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35ed6e9d84f0b51a7f52daf1c7d71dd136fd7a3f41a8462b8cdb8c78d920fad4" +dependencies = [ + "bytes", + "futures-core", + "memchr", + "pin-project-lite", + "tokio", + "tokio-util", +] + +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + +[[package]] +name = "convert_case" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "cooked-waker" +version = "5.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147be55d677052dabc6b22252d5dd0fd4c29c8c27aa4f2fbef0f94aa003b406f" + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" + +[[package]] +name = "cpufeatures" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504" +dependencies = [ + "libc", +] + +[[package]] +name = "criterion" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2b12d017a929603d80db1831cd3a24082f8137ce19c69e6447f54f5fc8d692f" +dependencies = [ + "anes", + "cast", + "ciborium", + "clap", + "criterion-plot", + "futures", + "is-terminal", + "itertools 0.10.5", + "num-traits", + "once_cell", + "oorandom", + "plotters", + "rayon", + "regex", + "serde", + "serde_derive", + "serde_json", + "tinytemplate", + "tokio", + "walkdir", +] + +[[package]] +name = "criterion-plot" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" +dependencies = [ + "cast", + "itertools 0.10.5", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab3db02a9c5b5121e1e42fbdb1aeb65f5e02624cc58c43f2884c6ccac0b82f95" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" +dependencies = [ + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345" + +[[package]] +name = "crunchy" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "ctr" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0369ee1ad671834580515889b80f2ea915f23b8be8d0daa4bbaf2ac5c7590835" +dependencies = [ + "cipher", +] + +[[package]] +name = "darling" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a01d95850c592940db9b8194bc39f4bc0e89dee5c4265e4b1807c34a9aba453c" +dependencies = [ + "darling_core 0.13.4", + "darling_macro 0.13.4", +] + +[[package]] +name = "darling" +version = "0.20.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54e36fcd13ed84ffdfda6f5be89b31287cbb80c439841fe69e04841435464391" +dependencies = [ + "darling_core 0.20.8", + "darling_macro 0.20.8", +] + +[[package]] +name = "darling_core" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "859d65a907b6852c9361e3185c862aae7fafd2887876799fa55f5f99dc40d610" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn 1.0.109", +] + +[[package]] +name = "darling_core" +version = "0.20.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c2cf1c23a687a1feeb728783b993c4e1ad83d99f351801977dd809b48d0a70f" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn 2.0.52", +] + +[[package]] +name = "darling_macro" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c972679f83bdf9c42bd905396b6c3588a843a17f0f16dfcfa3e2c5d57441835" +dependencies = [ + "darling_core 0.13.4", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "darling_macro" +version = "0.20.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a668eda54683121533a393014d8692171709ff57a7d61f187b6e782719f8933f" +dependencies = [ + "darling_core 0.20.8", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "data-encoding" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5" + +[[package]] +name = "debugid" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef552e6f588e446098f6ba40d89ac146c8c7b64aade83c051ee00bb5d2bc18d" +dependencies = [ + "serde", + "uuid", +] + +[[package]] +name = "deno_core" +version = "0.238.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ded8b759e4435aa0929913909dd6c482ed6042dae19c53260e1caf9d55b37a9" +dependencies = [ + "anyhow", + "bytes", + "cooked-waker", + "deno_ops", + "deno_unsync", + "futures", + "libc", + "log", + "parking_lot", + "pin-project", + "serde", + "serde_json", + "serde_v8", + "smallvec", + "sourcemap", + "static_assertions", + "tokio", + "url", + "v8", +] + +[[package]] +name = "deno_ops" +version = "0.114.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "168a929496191fdd8e91f898c8454429df4d5489597777d89f47897f6a37da6b" +dependencies = [ + "proc-macro-rules", + "proc-macro2", + "quote", + "strum", + "strum_macros", + "syn 2.0.52", + "thiserror", +] + +[[package]] +name = "deno_unsync" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30dff7e03584dbae188dae96a0f1876740054809b2ad0cf7c9fc5d361f20e739" +dependencies = [ + "tokio", +] + +[[package]] +name = "deranged" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +dependencies = [ + "powerfmt", + "serde", +] + +[[package]] +name = "derivative" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "derive_more" +version = "0.99.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +dependencies = [ + "convert_case 0.4.0", + "proc-macro2", + "quote", + "rustc_version 0.4.0", + "syn 1.0.109", +] + +[[package]] +name = "deunicode" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6e854126756c496b8c81dec88f9a706b15b875c5849d4097a3854476b9fdf94" + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "crypto-common", + "subtle", +] + +[[package]] +name = "dotenvy" +version = "0.15.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" + +[[package]] +name = "downcast-rs" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ea835d29036a4087793836fa931b08837ad5e957da9e23886b29586fb9b6650" + +[[package]] +name = "dummy" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e57e12b69e57fad516e01e2b3960f122696fdb13420e1a88ed8e210316f2876" +dependencies = [ + "darling 0.20.8", + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "either" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11157ac094ffbdde99aa67b23417ebdd801842852b500e395a45a9c0aac03e4a" + +[[package]] +name = "encoding_rs" +version = "0.8.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "enum-as-inner" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21cdad81446a7f7dc43f6a77409efeb9733d2fa65553efef6018ef257c959b73" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "envconfig" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea81cc7e21f55a9d9b1efb6816904978d0bfbe31a50347cb24b2e75564bcac9b" +dependencies = [ + "envconfig_derive", +] + +[[package]] +name = "envconfig_derive" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dfca278e5f84b45519acaaff758ebfa01f18e96998bc24b8f1b722dd804b9bf" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] +name = "errno" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "error-chain" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d2f06b9cac1506ece98fe3231e3cc9c4410ec3d5b1f24ae1c8946f0742cdefc" +dependencies = [ + "version_check", +] + +[[package]] +name = "event-core" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "bson", + "chrono", + "dotenvy", + "envconfig", + "fake", + "futures", + "google-token-fetcher", + "handlebars", + "http", + "integrationos-domain", + "js-sandbox-ios", + "metrics", + "metrics-exporter-prometheus", + "mockito", + "moka", + "mongodb", + "redis-retry", + "reqwest", + "serde", + "serde_json", + "testcontainers-modules", + "tokio", + "tokio-condvar", + "tracing", + "tracing-subscriber", + "uuid", +] + +[[package]] +name = "event-listener" +version = "2.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" + +[[package]] +name = "fake" +version = "2.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26221445034074d46b276e13eb97a265ebdb8ed8da705c4dddd3dd20b66b45d2" +dependencies = [ + "chrono", + "deunicode", + "dummy", + "http", + "rand", + "rand_core", + "semver 1.0.22", + "serde_json", + "url-escape", + "uuid", +] + +[[package]] +name = "fastrand" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" + +[[package]] +name = "finl_unicode" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fcfdc7a0362c9f4444381a9e697c79d435fe65b52a37466fc2c1184cee9edc6" + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "fslock" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57eafdd0c16f57161105ae1b98a1238f97645f2f588438b2949c99a2af9616bf" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "funty" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" + +[[package]] +name = "futures" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" + +[[package]] +name = "futures-executor" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" + +[[package]] +name = "futures-macro" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "futures-sink" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" + +[[package]] +name = "futures-task" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" + +[[package]] +name = "futures-util" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "gateway" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "axum", + "axum-macros", + "axum-prometheus", + "criterion", + "dotenvy", + "envconfig", + "http", + "http-serde-ext", + "integrationos-domain", + "moka", + "mongodb", + "redis-retry", + "serde", + "serde_json", + "tokio", + "tower", + "tower-http", + "tracing", + "tracing-subscriber", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "190092ea657667030ac6a35e305e62fc4dd69fd98ac98631e5d3a2b1575a12b5" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "gimli" +version = "0.28.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" + +[[package]] +name = "glob" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" + +[[package]] +name = "google-token-fetcher" +version = "0.1.0" +dependencies = [ + "anyhow", + "reqwest", + "serde", +] + +[[package]] +name = "h2" +version = "0.3.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb2c4422095b67ee78da96fbb51a4cc413b3b25883c7717ff7ca1ab31022c9c9" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http", + "indexmap 2.2.5", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "half" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5eceaaeec696539ddaf7b333340f1af35a5aa87ae3e4f3ead0532f72affab2e" +dependencies = [ + "cfg-if", + "crunchy", +] + +[[package]] +name = "handlebars" +version = "4.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "faa67bab9ff362228eb3d00bd024a4965d8231bbb7921167f0cfa66c6626b225" +dependencies = [ + "log", + "pest", + "pest_derive", + "serde", + "serde_json", + "thiserror", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "hashbrown" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33ff8ae62cd3a9102e5637afc8452c55acf3844001bd5374e0b0bd7b6616c038" +dependencies = [ + "ahash", +] + +[[package]] +name = "hashbrown" +version = "0.14.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + +[[package]] +name = "hermit-abi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest", +] + +[[package]] +name = "home" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" +dependencies = [ + "windows-sys 0.52.0", +] + +[[package]] +name = "hostname" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c731c3e10504cc8ed35cfe2f1db4c9274c3d35fa486e3b31df46f068ef3e867" +dependencies = [ + "libc", + "match_cfg", + "winapi", +] + +[[package]] +name = "http" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" +dependencies = [ + "bytes", + "http", + "pin-project-lite", +] + +[[package]] +name = "http-range-header" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "add0ab9360ddbd88cfeb3bd9574a1d85cfdfa14db10b3e21d3700dbc4328758f" + +[[package]] +name = "http-serde-ext" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c94a985b23074de11e6d99712873131c3a3d12cf482406de7e0a2ec8a4cd1943" +dependencies = [ + "http", + "serde", +] + +[[package]] +name = "httparse" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "hyper" +version = "0.14.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf96e135eb83a2a8ddf766e426a841d8ddd7449d5f00d34ea02b41d2f19eef80" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2 0.5.6", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" +dependencies = [ + "futures-util", + "http", + "hyper", + "rustls", + "tokio", + "tokio-rustls", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.60" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "idna" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8" +dependencies = [ + "matches", + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "idna" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "idna" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "if_chain" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb56e1aa765b4b4f3aadfab769793b7087bb03a4ea4920644a6d238e2df5b9ed" + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", +] + +[[package]] +name = "indexmap" +version = "2.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b0b929d511467233429c45a44ac1dcaa21ba0f5ba11e4879e6ed28ddb4f9df4" +dependencies = [ + "equivalent", + "hashbrown 0.14.3", + "serde", +] + +[[package]] +name = "inout" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" +dependencies = [ + "generic-array", +] + +[[package]] +name = "integrationos-domain" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddff1b83ab196f76666b13730747a8e9fc6d1953eb0f3fc70b8afa00ba246f0c" +dependencies = [ + "aes", + "anyhow", + "async-recursion", + "async-trait", + "axum", + "base64 0.21.7", + "base64ct", + "bson", + "chrono", + "ctr", + "downcast-rs", + "envconfig", + "fake", + "futures", + "handlebars", + "http", + "http-serde-ext", + "indexmap 2.2.5", + "js-sandbox-ios", + "jsonpath_lib", + "moka", + "mongodb", + "openapiv3", + "pin-project", + "prost", + "rand", + "reqwest", + "semver 1.0.22", + "serde", + "serde_json", + "sha2", + "sha3", + "strum", + "thiserror", + "tokio", + "tracing", + "uuid", +] + +[[package]] +name = "ipconfig" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b58db92f96b720de98181bbbe63c831e87005ab460c1bf306eb2622b4707997f" +dependencies = [ + "socket2 0.5.6", + "widestring", + "windows-sys 0.48.0", + "winreg", +] + +[[package]] +name = "ipnet" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" + +[[package]] +name = "is-terminal" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f23ff5ef2b80d608d61efee834934d862cd92461afc0560dedf493e4c033738b" +dependencies = [ + "hermit-abi", + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + +[[package]] +name = "itertools" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" + +[[package]] +name = "js-sandbox-ios" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e3dd0f703d6d50aef7889fe55933fa3110e37d97ceeecab2c1c0a9de2f1eeac" +dependencies = [ + "deno_core", + "js-sandbox-macros", + "serde", + "serde_json", +] + +[[package]] +name = "js-sandbox-macros" +version = "0.2.0-rc.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0445474069bad4c2b01856976d83c796521ae9298ba718ecf26041767eb68b2" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "js-sys" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "jsonpath_lib" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eaa63191d68230cccb81c5aa23abd53ed64d83337cacbb25a7b8c7979523774f" +dependencies = [ + "log", + "serde", + "serde_json", +] + +[[package]] +name = "jsonwebtoken" +version = "8.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6971da4d9c3aa03c3d8f3ff0f4155b534aad021292003895a469716b2a230378" +dependencies = [ + "base64 0.21.7", + "pem", + "ring 0.16.20", + "serde", + "serde_json", + "simple_asn1", +] + +[[package]] +name = "keccak" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654" +dependencies = [ + "cpufeatures", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "libc" +version = "0.2.153" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" + +[[package]] +name = "linked-hash-map" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" + +[[package]] +name = "linux-raw-sys" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c" + +[[package]] +name = "lock_api" +version = "0.4.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" + +[[package]] +name = "lru-cache" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31e24f1ad8321ca0e8a1e0ac13f23cb668e6f5466c2c57319f6a5cf1cc8e3b1c" +dependencies = [ + "linked-hash-map", +] + +[[package]] +name = "mach2" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19b955cdeb2a02b9117f121ce63aa52d08ade45de53e48fe6a38b39c10f6f709" +dependencies = [ + "libc", +] + +[[package]] +name = "match_cfg" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffbee8634e0d45d258acb448e7eaab3fce7a0a467395d4d9f228e3c1f01fb2e4" + +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata 0.1.10", +] + +[[package]] +name = "matches" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" + +[[package]] +name = "matchit" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" + +[[package]] +name = "md-5" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" +dependencies = [ + "cfg-if", + "digest", +] + +[[package]] +name = "memchr" +version = "2.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149" + +[[package]] +name = "metrics" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fde3af1a009ed76a778cb84fdef9e7dbbdf5775ae3e4cc1f434a6a307f6f76c5" +dependencies = [ + "ahash", + "metrics-macros", + "portable-atomic", +] + +[[package]] +name = "metrics-exporter-prometheus" +version = "0.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d4fa7ce7c4862db464a37b0b31d89bca874562f034bd7993895572783d02950" +dependencies = [ + "base64 0.21.7", + "hyper", + "indexmap 1.9.3", + "ipnet", + "metrics", + "metrics-util", + "quanta 0.11.1", + "thiserror", + "tokio", + "tracing", +] + +[[package]] +name = "metrics-macros" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38b4faf00617defe497754acde3024865bc143d44a86799b24e191ecff91354f" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "metrics-util" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4de2ed6e491ed114b40b732e4d1659a9d53992ebd87490c44a6ffe23739d973e" +dependencies = [ + "crossbeam-epoch", + "crossbeam-utils", + "hashbrown 0.13.1", + "metrics", + "num_cpus", + "quanta 0.11.1", + "sketches-ddsketch", +] + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "miniz_oxide" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d811f3e15f28568be3407c8e7fdb6514c1cda3cb30683f15b6a1a1dc4ea14a7" +dependencies = [ + "adler", +] + +[[package]] +name = "mio" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" +dependencies = [ + "libc", + "wasi", + "windows-sys 0.48.0", +] + +[[package]] +name = "mockito" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2f6e023aa5bdf392aa06c78e4a4e6d498baab5138d0c993503350ebbc37bf1e" +dependencies = [ + "assert-json-diff", + "colored", + "futures-core", + "hyper", + "log", + "rand", + "regex", + "serde_json", + "serde_urlencoded", + "similar", + "tokio", +] + +[[package]] +name = "moka" +version = "0.12.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1911e88d5831f748a4097a43862d129e3c6fca831eecac9b8db6d01d93c9de2" +dependencies = [ + "async-lock", + "async-trait", + "crossbeam-channel", + "crossbeam-epoch", + "crossbeam-utils", + "futures-util", + "once_cell", + "parking_lot", + "quanta 0.12.2", + "rustc_version 0.4.0", + "skeptic", + "smallvec", + "tagptr", + "thiserror", + "triomphe", + "uuid", +] + +[[package]] +name = "mongodb" +version = "2.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de59562e5c71656c098d8e966641b31da87b89dc3dcb6e761d3b37dcdfa0cb72" +dependencies = [ + "async-trait", + "base64 0.13.1", + "bitflags 1.3.2", + "bson", + "chrono", + "derivative", + "derive_more", + "futures-core", + "futures-executor", + "futures-io", + "futures-util", + "hex", + "hmac", + "lazy_static", + "md-5", + "pbkdf2", + "percent-encoding", + "rand", + "rustc_version_runtime", + "rustls", + "rustls-pemfile", + "serde", + "serde_bytes", + "serde_with", + "sha-1", + "sha2", + "socket2 0.4.10", + "stringprep", + "strsim", + "take_mut", + "thiserror", + "tokio", + "tokio-rustls", + "tokio-util", + "trust-dns-proto", + "trust-dns-resolver", + "typed-builder", + "uuid", + "webpki-roots", +] + +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + +[[package]] +name = "num-bigint" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", + "rand", +] + +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + +[[package]] +name = "num-integer" +version = "0.1.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da0df0e5185db44f69b44f26786fe401b6c293d1907744beaa7fa62b2e5a517a" +dependencies = [ + "autocfg", +] + +[[package]] +name = "num_cpus" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" +dependencies = [ + "hermit-abi", + "libc", +] + +[[package]] +name = "object" +version = "0.32.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" + +[[package]] +name = "oorandom" +version = "11.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" + +[[package]] +name = "openapiv3" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc02deea53ffe807708244e5914f6b099ad7015a207ee24317c22112e17d9c5c" +dependencies = [ + "indexmap 2.2.5", + "serde", + "serde_json", +] + +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + +[[package]] +name = "parking_lot" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-targets 0.48.5", +] + +[[package]] +name = "pbkdf2" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83a0692ec44e4cf1ef28ca317f14f8f07da2d95ec3fa01f86e4467b725e60917" +dependencies = [ + "digest", +] + +[[package]] +name = "pem" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8835c273a76a90455d7344889b0964598e3316e2a79ede8e36f16bdcf2228b8" +dependencies = [ + "base64 0.13.1", +] + +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "pest" +version = "2.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56f8023d0fb78c8e03784ea1c7f3fa36e68a723138990b8d5a47d916b651e7a8" +dependencies = [ + "memchr", + "thiserror", + "ucd-trie", +] + +[[package]] +name = "pest_derive" +version = "2.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0d24f72393fd16ab6ac5738bc33cdb6a9aa73f8b902e8fe29cf4e67d7dd1026" +dependencies = [ + "pest", + "pest_generator", +] + +[[package]] +name = "pest_generator" +version = "2.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdc17e2a6c7d0a492f0158d7a4bd66cc17280308bbaff78d5bef566dca35ab80" +dependencies = [ + "pest", + "pest_meta", + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "pest_meta" +version = "2.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "934cd7631c050f4674352a6e835d5f6711ffbfb9345c2fc0107155ac495ae293" +dependencies = [ + "once_cell", + "pest", + "sha2", +] + +[[package]] +name = "pin-project" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "plotters" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2c224ba00d7cadd4d5c660deaf2098e5e80e07846537c51f9cfa4be50c1fd45" +dependencies = [ + "num-traits", + "plotters-backend", + "plotters-svg", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "plotters-backend" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e76628b4d3a7581389a35d5b6e2139607ad7c75b17aed325f210aa91f4a9609" + +[[package]] +name = "plotters-svg" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38f6d39893cca0701371e3c27294f09797214b86f1fb951b89ade8ec04e2abab" +dependencies = [ + "plotters-backend", +] + +[[package]] +name = "portable-atomic" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + +[[package]] +name = "proc-macro-error" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" +dependencies = [ + "proc-macro-error-attr", + "proc-macro2", + "quote", + "syn 1.0.109", + "version_check", +] + +[[package]] +name = "proc-macro-error-attr" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" +dependencies = [ + "proc-macro2", + "quote", + "version_check", +] + +[[package]] +name = "proc-macro-rules" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07c277e4e643ef00c1233393c673f655e3672cf7eb3ba08a00bdd0ea59139b5f" +dependencies = [ + "proc-macro-rules-macros", + "proc-macro2", + "syn 2.0.52", +] + +[[package]] +name = "proc-macro-rules-macros" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "207fffb0fe655d1d47f6af98cc2793405e85929bdbc420d685554ff07be27ac7" +dependencies = [ + "once_cell", + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "proc-macro2" +version = "1.0.78" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "prost" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "146c289cda302b98a28d40c8b3b90498d6e526dd24ac2ecea73e4e491685b94a" +dependencies = [ + "bytes", + "prost-derive", +] + +[[package]] +name = "prost-derive" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "efb6c9a1dd1def8e2124d17e83a20af56f1570d6c2d2bd9e266ccb768df3840e" +dependencies = [ + "anyhow", + "itertools 0.11.0", + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "pulldown-cmark" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57206b407293d2bcd3af849ce869d52068623f19e1b5ff8e8778e3309439682b" +dependencies = [ + "bitflags 2.4.2", + "memchr", + "unicase", +] + +[[package]] +name = "quanta" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a17e662a7a8291a865152364c20c7abc5e60486ab2001e8ec10b24862de0b9ab" +dependencies = [ + "crossbeam-utils", + "libc", + "mach2", + "once_cell", + "raw-cpuid 10.7.0", + "wasi", + "web-sys", + "winapi", +] + +[[package]] +name = "quanta" +version = "0.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ca0b7bac0b97248c40bb77288fc52029cf1459c0461ea1b05ee32ccf011de2c" +dependencies = [ + "crossbeam-utils", + "libc", + "once_cell", + "raw-cpuid 11.0.1", + "wasi", + "web-sys", + "winapi", +] + +[[package]] +name = "quick-error" +version = "1.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" + +[[package]] +name = "quote" +version = "1.0.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "radium" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "raw-cpuid" +version = "10.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c297679cb867470fa8c9f67dbba74a78d78e3e98d7cf2b08d6d71540f797332" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "raw-cpuid" +version = "11.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d86a7c4638d42c44551f4791a20e687dbb4c3de1f33c43dd71e355cd429def1" +dependencies = [ + "bitflags 2.4.2", +] + +[[package]] +name = "rayon" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4963ed1bc86e4f3ee217022bd855b297cef07fb9eac5dfa1f788b220b49b3bd" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" +dependencies = [ + "crossbeam-deque", + "crossbeam-utils", +] + +[[package]] +name = "redis" +version = "0.23.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f49cdc0bb3f412bf8e7d1bd90fe1d9eb10bc5c399ba90973c14662a27b3f8ba" +dependencies = [ + "arc-swap", + "async-trait", + "bytes", + "combine", + "futures", + "futures-util", + "itoa", + "percent-encoding", + "pin-project-lite", + "ryu", + "sha1_smol", + "socket2 0.4.10", + "tokio", + "tokio-retry", + "tokio-util", + "url", +] + +[[package]] +name = "redis-retry" +version = "0.1.0" +dependencies = [ + "anyhow", + "envconfig", + "futures-util", + "redis", + "tracing", +] + +[[package]] +name = "redox_syscall" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "regex" +version = "1.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b62dbe01f0b06f9d8dc7d49e05a0785f153b00b2c227856282f671e0318c9b15" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata 0.4.6", + "regex-syntax 0.8.2", +] + +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax 0.6.29", +] + +[[package]] +name = "regex-automata" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax 0.8.2", +] + +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + +[[package]] +name = "regex-syntax" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" + +[[package]] +name = "reqwest" +version = "0.11.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6920094eb85afde5e4a138be3f2de8bbdf28000f0029e72c45025a56b042251" +dependencies = [ + "base64 0.21.7", + "bytes", + "encoding_rs", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "hyper", + "hyper-rustls", + "ipnet", + "js-sys", + "log", + "mime", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls", + "rustls-pemfile", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "system-configuration", + "tokio", + "tokio-rustls", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "webpki-roots", + "winreg", +] + +[[package]] +name = "resolv-conf" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52e44394d2086d010551b14b53b1f24e31647570cd1deb0379e2c21b329aba00" +dependencies = [ + "hostname", + "quick-error", +] + +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin 0.5.2", + "untrusted 0.7.1", + "web-sys", + "winapi", +] + +[[package]] +name = "ring" +version = "0.17.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +dependencies = [ + "cc", + "cfg-if", + "getrandom", + "libc", + "spin 0.9.8", + "untrusted 0.9.0", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" + +[[package]] +name = "rustc_version" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" +dependencies = [ + "semver 0.9.0", +] + +[[package]] +name = "rustc_version" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +dependencies = [ + "semver 1.0.22", +] + +[[package]] +name = "rustc_version_runtime" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d31b7153270ebf48bf91c65ae5b0c00e749c4cfad505f66530ac74950249582f" +dependencies = [ + "rustc_version 0.2.3", + "semver 0.9.0", +] + +[[package]] +name = "rustix" +version = "0.38.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ea3e1a662af26cd7a3ba09c0297a31af215563ecf42817c98df621387f4e949" +dependencies = [ + "bitflags 2.4.2", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustls" +version = "0.21.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9d5a6813c0759e4609cd494e8e725babae6a2ca7b62a5536a13daaec6fcb7ba" +dependencies = [ + "log", + "ring 0.17.8", + "rustls-webpki", + "sct", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" +dependencies = [ + "base64 0.21.7", +] + +[[package]] +name = "rustls-webpki" +version = "0.101.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" +dependencies = [ + "ring 0.17.8", + "untrusted 0.9.0", +] + +[[package]] +name = "rustversion" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" + +[[package]] +name = "ryu" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1" + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "sct" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" +dependencies = [ + "ring 0.17.8", + "untrusted 0.9.0", +] + +[[package]] +name = "segment" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12485833e00457a6bbba60397d3f19362751a0caefe27f6755fff1a2be4fd601" +dependencies = [ + "async-trait", + "reqwest", + "serde", + "serde_json", + "thiserror", + "time", +] + +[[package]] +name = "semver" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" +dependencies = [ + "semver-parser", +] + +[[package]] +name = "semver" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92d43fe69e652f3df9bdc2b85b2854a0825b86e4fb76bc44d945137d053639ca" +dependencies = [ + "serde", +] + +[[package]] +name = "semver-parser" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" + +[[package]] +name = "serde" +version = "1.0.197" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_bytes" +version = "0.11.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b8497c313fd43ab992087548117643f6fcd935cbf36f176ffda0aacf9591734" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_derive" +version = "1.0.197" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "serde_json" +version = "1.0.114" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0" +dependencies = [ + "indexmap 2.2.5", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_path_to_error" +version = "0.1.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebd154a240de39fdebcf5775d2675c204d7c13cf39a4c697be6493c8e734337c" +dependencies = [ + "itoa", + "serde", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_v8" +version = "0.147.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2af950d83e1c70b762d48fa7a869d6db9a4f191548dfd666fa4e62f2229e1dce" +dependencies = [ + "bytes", + "derive_more", + "num-bigint", + "serde", + "smallvec", + "thiserror", + "v8", +] + +[[package]] +name = "serde_with" +version = "1.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678b5a069e50bf00ecd22d0cd8ddf7c236f68581b03db652061ed5eb13a312ff" +dependencies = [ + "serde", + "serde_with_macros", +] + +[[package]] +name = "serde_with_macros" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e182d6ec6f05393cc0e5ed1bf81ad6db3a8feedf8ee515ecdd369809bcce8082" +dependencies = [ + "darling 0.13.4", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "sha-1" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f5058ada175748e33390e40e872bd0fe59a19f265d0158daa551c5a88a76009c" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sha1_smol" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae1a47186c03a32177042e55dbc5fd5aee900b8e0069a8d70fba96a9375cd012" + +[[package]] +name = "sha2" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sha3" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75872d278a8f37ef87fa0ddbda7802605cb18344497949862c0d4dcb291eba60" +dependencies = [ + "digest", + "keccak", +] + +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "signal-hook-registry" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1" +dependencies = [ + "libc", +] + +[[package]] +name = "similar" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32fea41aca09ee824cc9724996433064c89f7777e60762749a4170a14abbfa21" + +[[package]] +name = "simple_asn1" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adc4e5204eb1910f40f9cfa375f6f05b68c3abac4b6fd879c8ff5e7ae8a0a085" +dependencies = [ + "num-bigint", + "num-traits", + "thiserror", + "time", +] + +[[package]] +name = "skeptic" +version = "0.13.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16d23b015676c90a0f01c197bfdc786c20342c73a0afdda9025adb0bc42940a8" +dependencies = [ + "bytecount", + "cargo_metadata", + "error-chain", + "glob", + "pulldown-cmark", + "tempfile", + "walkdir", +] + +[[package]] +name = "sketches-ddsketch" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85636c14b73d81f541e525f585c0a2109e6744e1565b5c1668e31c70c10ed65c" + +[[package]] +name = "slab" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg", +] + +[[package]] +name = "smallvec" +version = "1.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6ecd384b10a64542d77071bd64bd7b231f4ed5940fba55e98c3de13824cf3d7" + +[[package]] +name = "socket2" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "socket2" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05ffd9c0a93b7543e062e759284fcf5f5e3b098501104bfbdde4d404db792871" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "sourcemap" +version = "7.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10da010a590ed2fa9ca8467b00ce7e9c5a8017742c0c09c45450efc172208c4b" +dependencies = [ + "data-encoding", + "debugid", + "if_chain", + "rustc_version 0.2.3", + "serde", + "serde_json", + "unicode-id", + "url", +] + +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "stringprep" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb41d74e231a107a1b4ee36bd1214b11285b77768d2e3824aedafa988fd36ee6" +dependencies = [ + "finl_unicode", + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "strsim" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" + +[[package]] +name = "strum" +version = "0.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "290d54ea6f91c969195bdbcd7442c8c2a2ba87da8bf60a7ee86a235d4bc1e125" +dependencies = [ + "strum_macros", +] + +[[package]] +name = "strum_macros" +version = "0.25.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23dc1fa9ac9c169a78ba62f0b841814b7abae11bdd047b9c58f893439e309ea0" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "rustversion", + "syn 2.0.52", +] + +[[package]] +name = "subtle" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.52" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b699d15b36d1f02c3e7c69f8ffef53de37aefae075d8488d4ba1a7788d574a07" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + +[[package]] +name = "system-configuration" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "tagptr" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b2093cf4c8eb1e67749a6762251bc9cd836b6fc171623bd0a9d324d37af2417" + +[[package]] +name = "take_mut" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f764005d11ee5f36500a149ace24e00e3da98b0158b3e2d53a7495660d3f4d60" + +[[package]] +name = "tap" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + +[[package]] +name = "tempfile" +version = "3.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" +dependencies = [ + "cfg-if", + "fastrand", + "rustix", + "windows-sys 0.52.0", +] + +[[package]] +name = "testcontainers" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f83d2931d7f521af5bae989f716c3fa43a6af9af7ec7a5e21b59ae40878cec00" +dependencies = [ + "bollard-stubs", + "futures", + "hex", + "hmac", + "log", + "rand", + "serde", + "serde_json", + "sha2", +] + +[[package]] +name = "testcontainers-modules" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d0334776e1e8ee7c504a922c5236daf865ffe413aa630d84ae91dcce0b10bc3" +dependencies = [ + "testcontainers", +] + +[[package]] +name = "thiserror" +version = "1.0.57" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e45bcbe8ed29775f228095caf2cd67af7a4ccf756ebff23a306bf3e8b47b24b" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.57" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a953cb265bef375dae3de6663da4d3804eee9682ea80d8e2542529b73c531c81" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "thread_local" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" +dependencies = [ + "cfg-if", + "once_cell", +] + +[[package]] +name = "time" +version = "0.3.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8248b6521bb14bc45b4067159b9b6ad792e2d6d754d6c41fb50e29fefe38749" +dependencies = [ + "deranged", + "itoa", + "num-conv", + "powerfmt", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" + +[[package]] +name = "time-macros" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ba3a3ef41e6672a2f0f001392bb5dcd3ff0a9992d618ca761a11c3121547774" +dependencies = [ + "num-conv", + "time-core", +] + +[[package]] +name = "tinytemplate" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" +dependencies = [ + "serde", + "serde_json", +] + +[[package]] +name = "tinyvec" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.36.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61285f6515fa018fb2d1e46eb21223fff441ee8db5d0f1435e8ab4f5cdb80931" +dependencies = [ + "backtrace", + "bytes", + "libc", + "mio", + "num_cpus", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "socket2 0.5.6", + "tokio-macros", + "windows-sys 0.48.0", +] + +[[package]] +name = "tokio-condvar" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7233b09174540ef9bf9fc8326bcad6ccebc631e7c9a1e2e48d956a133056f9d" +dependencies = [ + "tokio", +] + +[[package]] +name = "tokio-macros" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "tokio-retry" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f57eb36ecbe0fc510036adff84824dd3c24bb781e21bfa67b69d556aa85214f" +dependencies = [ + "pin-project", + "rand", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" +dependencies = [ + "rustls", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" +dependencies = [ + "bytes", + "futures-core", + "futures-io", + "futures-sink", + "pin-project-lite", + "tokio", + "tracing", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "pin-project", + "pin-project-lite", + "tokio", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-http" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61c5bb1d698276a2443e5ecfabc1008bf15a36c12e6a7176e7bf089ea9131140" +dependencies = [ + "bitflags 2.4.2", + "bytes", + "futures-core", + "futures-util", + "http", + "http-body", + "http-range-header", + "pin-project-lite", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" + +[[package]] +name = "tower-service" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" + +[[package]] +name = "tracing" +version = "0.1.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +dependencies = [ + "log", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] + +[[package]] +name = "tracing-core" +version = "0.1.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", +] + +[[package]] +name = "triomphe" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "859eb650cfee7434994602c3a68b25d77ad9e68c8a6cd491616ef86661382eb3" + +[[package]] +name = "trust-dns-proto" +version = "0.21.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c31f240f59877c3d4bb3b3ea0ec5a6a0cff07323580ff8c7a605cd7d08b255d" +dependencies = [ + "async-trait", + "cfg-if", + "data-encoding", + "enum-as-inner", + "futures-channel", + "futures-io", + "futures-util", + "idna 0.2.3", + "ipnet", + "lazy_static", + "log", + "rand", + "smallvec", + "thiserror", + "tinyvec", + "tokio", + "url", +] + +[[package]] +name = "trust-dns-resolver" +version = "0.21.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4ba72c2ea84515690c9fcef4c6c660bb9df3036ed1051686de84605b74fd558" +dependencies = [ + "cfg-if", + "futures-util", + "ipconfig", + "lazy_static", + "log", + "lru-cache", + "parking_lot", + "resolv-conf", + "smallvec", + "thiserror", + "tokio", + "trust-dns-proto", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "typed-builder" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89851716b67b937e393b3daa8423e67ddfc4bbbf1654bcf05488e95e0828db0c" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "typenum" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" + +[[package]] +name = "ucd-trie" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9" + +[[package]] +name = "unicase" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89" +dependencies = [ + "version_check", +] + +[[package]] +name = "unicode-bidi" +version = "0.3.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" + +[[package]] +name = "unicode-id" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1b6def86329695390197b82c1e244a54a131ceb66c996f2088a3876e2ae083f" + +[[package]] +name = "unicode-ident" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" + +[[package]] +name = "unicode-normalization" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-segmentation" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202" + +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" +dependencies = [ + "form_urlencoded", + "idna 0.5.0", + "percent-encoding", + "serde", +] + +[[package]] +name = "url-escape" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44e0ce4d1246d075ca5abec4b41d33e87a6054d08e2366b63205665e950db218" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "uuid" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f00cc9702ca12d3c81455259621e676d0f7251cec66a21e98fe2e9a37db93b2a" +dependencies = [ + "atomic", + "getrandom", + "md-5", + "serde", + "sha1_smol", +] + +[[package]] +name = "v8" +version = "0.82.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f53dfb242f4c0c39ed3fc7064378a342e57b5c9bd774636ad34ffe405b808121" +dependencies = [ + "bitflags 1.3.2", + "fslock", + "once_cell", + "which", +] + +[[package]] +name = "validator" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b92f40481c04ff1f4f61f304d61793c7b56ff76ac1469f1beb199b1445b253bd" +dependencies = [ + "idna 0.4.0", + "lazy_static", + "regex", + "serde", + "serde_derive", + "serde_json", + "url", + "validator_derive", +] + +[[package]] +name = "validator_derive" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc44ca3088bb3ba384d9aecf40c6a23a676ce23e09bdaca2073d99c207f864af" +dependencies = [ + "if_chain", + "lazy_static", + "proc-macro-error", + "proc-macro2", + "quote", + "regex", + "syn 1.0.109", + "validator_types", +] + +[[package]] +name = "validator_types" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "111abfe30072511849c5910134e8baf8dc05de4c0e5903d681cbd5c9c4d611e3" +dependencies = [ + "proc-macro2", + "syn 1.0.109", +] + +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn 2.0.52", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0" +dependencies = [ + "cfg-if", + "js-sys", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" + +[[package]] +name = "watchdog" +version = "0.1.0" +dependencies = [ + "anyhow", + "chrono", + "dotenvy", + "envconfig", + "futures", + "integrationos-domain", + "mongodb", + "redis-retry", + "serde_json", + "tokio", + "tracing", + "tracing-subscriber", +] + +[[package]] +name = "web-sys" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webpki-roots" +version = "0.25.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" + +[[package]] +name = "which" +version = "4.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" +dependencies = [ + "either", + "home", + "once_cell", + "rustix", +] + +[[package]] +name = "widestring" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "653f141f39ec16bba3c5abe400a0c60da7468261cc2cbf36805022876bc721a8" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" +dependencies = [ + "winapi", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-core" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" +dependencies = [ + "windows-targets 0.52.4", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.4", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dd37b7e5ab9018759f893a1952c9420d060016fc19a472b4bb20d1bdd694d1b" +dependencies = [ + "windows_aarch64_gnullvm 0.52.4", + "windows_aarch64_msvc 0.52.4", + "windows_i686_gnu 0.52.4", + "windows_i686_msvc 0.52.4", + "windows_x86_64_gnu 0.52.4", + "windows_x86_64_gnullvm 0.52.4", + "windows_x86_64_msvc 0.52.4", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bcf46cf4c365c6f2d1cc93ce535f2c8b244591df96ceee75d8e83deb70a9cac9" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da9f259dd3bcf6990b55bffd094c4f7235817ba4ceebde8e6d11cd0c5633b675" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b474d8268f99e0995f25b9f095bc7434632601028cf86590aea5c8a5cb7801d3" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1515e9a29e5bed743cb4415a9ecf5dfca648ce85ee42e15873c3cd8610ff8e02" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5eee091590e89cc02ad514ffe3ead9eb6b660aedca2183455434b93546371a03" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77ca79f2451b49fa9e2af39f0747fe999fcda4f5e241b2898624dca97a1f2177" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8" + +[[package]] +name = "winreg" +version = "0.50.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" +dependencies = [ + "cfg-if", + "windows-sys 0.48.0", +] + +[[package]] +name = "wyz" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" +dependencies = [ + "tap", +] + +[[package]] +name = "zerocopy" +version = "0.7.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.52", +] diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 00000000..625f6147 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,80 @@ +[workspace] + +resolver = "2" + +members = [ + "api", + "event-core", + "gateway", + "google-token-fetcher", + "redis-retry", + "watchdog", +] + +[workspace.dependencies] +anyhow = "1.0.75" +async-trait = "0.1.74" +axum = "0.6.20" +axum-macros = "0.3.8" +base64 = "0.21.5" +base64ct = { version = "1.6.0", features = ["alloc"] } +bson = "2.7.0" +chrono = { version = "0.4.31", features = ["serde"] } +convert_case = "0.6.0" +dotenvy = "0.15.7" +envconfig = "0.10.0" +fake = { version = "=2.9.1", features = [ + "uuid", + "derive", + "dummy", + "chrono", + "rand_core", + "http", + "serde_json", + "semver", +] } +futures = "0.3.28" +futures-util = "0.3.28" +handlebars = "4.4.0" +http = "0.2.9" +http-serde-ext = "0.1.8" +integrationos-domain = "0.1.5" +js-sandbox-ios = "0.1.0" +jsonpath_lib = "0.3.0" +jsonwebtoken = "8.3.0" +mockito = "1.2.0" +moka = { version = "0.12.1", features = ["future"] } +mongodb = "2.7.0" +once_cell = "1.18.0" +openapiv3 = { version = "2.0.0", features = ["skip_serializing_defaults"] } +rand = "0.8.5" +regex = "1.10.2" +reqwest = { version = "0.11.22", features = [ + "json", + "rustls-tls", +], default-features = false } +semver = { version = "1.0.20", features = ["serde"] } +serde = { version = "1.0.189", features = ["derive", "rc"] } +serde_json = "1.0.107" +sha2 = { version = "0.10.6", features = ["asm"] } +strum = { version = "0.25", features = ["derive"] } +testcontainers-modules = "0.3" +tokio = { version = "1.33.0", features = [ + "macros", + "rt-multi-thread", + "time", + "sync", +] } +tower-http = { version = "0.4.4", features = [ + "trace", + "cors", + "sensitive-headers", +] } +tracing = "0.1.40" +tracing-subscriber = { version = "0.3.17", features = ["env-filter"] } +url = { version = "2.4.1", features = ["serde"] } +uuid = { version = "1.5.0", features = ["v4", "serde"] } +validator = { version = "0.16.1", features = ["derive"] } + +[profile.release] +lto = true diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..0f60103d --- /dev/null +++ b/Dockerfile @@ -0,0 +1,24 @@ +FROM lukemathwalker/cargo-chef:latest-rust-1 AS chef +ARG EXECUTABLE +RUN : "${EXECUTABLE:?Build argument needs to be set and non-empty.}" +WORKDIR /app/${EXECUTABLE} + +FROM chef AS planner +COPY . . +RUN cargo chef prepare --recipe-path recipe.json + +FROM chef AS builder +COPY --from=planner /app/${EXECUTABLE}/recipe.json recipe.json +# Build dependencies - this is the caching Docker layer! +RUN cargo chef cook --release --recipe-path recipe.json --bin ${EXECUTABLE} +# Build application +COPY . . +RUN cargo build --release --bin ${EXECUTABLE} + +FROM debian:bookworm-slim AS runtime +ARG EXECUTABLE +ENV EXECUTABLE=$EXECUTABLE +RUN apt-get update && apt-get install -y ca-certificates && rm -rf /var/lib/apt/lists/* +WORKDIR /app +COPY --from=builder /app/${EXECUTABLE}/target/release/${EXECUTABLE} /usr/local/bin +ENTRYPOINT /usr/local/bin/${EXECUTABLE} diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..f288702d --- /dev/null +++ b/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/README.md b/README.md new file mode 100644 index 00000000..315a3db2 --- /dev/null +++ b/README.md @@ -0,0 +1,10 @@ +# core + +Projects related to IntegrationOS platform: + +- [API](./api) +- [Event Core](./event-core) +- [Gateway](./gateway) +- [Redis Retry](./redis-retry) +- [Google Token Fetcher](./google-token-fetcher) +- [Watchdog](./watchdog) diff --git a/api/Cargo.toml b/api/Cargo.toml new file mode 100644 index 00000000..1897ac08 --- /dev/null +++ b/api/Cargo.toml @@ -0,0 +1,59 @@ +[package] +name = "api" +version = "0.1.0" +edition = "2021" + +[features] +dummy = ["fake", "integrationos-domain/dummy"] + +[dependencies] +anyhow.workspace = true +axum.workspace = true +bson.workspace = true +chrono.workspace = true +convert_case.workspace = true +dotenvy.workspace = true +envconfig.workspace = true +fake = { workspace = true, optional = true } +futures.workspace = true +futures-util.workspace = true +handlebars.workspace = true +http-serde-ext.workspace = true +http.workspace = true +hyper = "0.14.27" +indexmap = "2.1.0" +integrationos-domain = { workspace = true, features = [ + "unified", + "metrics", + "axum-error", +] } +jsonwebtoken.workspace = true +moka.workspace = true +mongodb.workspace = true +num_cpus = "1" +openapiv3.workspace = true +rand.workspace = true +redis-retry = { path = "../redis-retry" } +reqwest.workspace = true +segment = "0.2.3" +semver.workspace = true +serde.workspace = true +serde_json.workspace = true +strum.workspace = true +tokio.workspace = true +tower = { version = "0.4.13", features = ["filter"] } +tower-http.workspace = true +tracing-subscriber.workspace = true +tracing.workspace = true +validator.workspace = true + +[dev-dependencies] +testcontainers-modules = { workspace = true, features = ["mongo", "redis"] } +event-core = { path = "../event-core" } +gateway = { path = "../gateway" } +mockito.workspace = true +uuid.workspace = true + +[[test]] +name = "api_tests" +required-features = ["dummy"] diff --git a/api/README.md b/api/README.md new file mode 100644 index 00000000..55273660 --- /dev/null +++ b/api/README.md @@ -0,0 +1,757 @@ +## Common CRUD Endpoints + +The following CRUD endopints are implemented for + +- [`connection-definitions`](#v1connection-definitions-connection-definitions) +- [`connection-model-definitions`](#v1connection-model-definitions-connection-model-definitions) +- [`connection-model-schemas`](#v1connection-model-schemas-connection-model-schemas) +- [`connection-oauth-definitions`](#v1connection-oauth-definitions-connection-oauth-definitions) +- [`common-models`](#v1common-models-common-models) + +### `GET` Requests + +All `GET` requests return a list of models filtered by the query parameters. The query parameters can be of any field but must match the field value exactly. There are also 2 special query paramters, `limit` and `skip`. These can be used for pagination since they will `limit` the amount of records returned and `skip` records before returning. + +All `GET` responses will be the following format: + +``` +{ + "rows": [ + { + + }, + { + + } + ], + "total": 2, + "skip": 0, + "limit": 20 +} +``` + +### `POST` Requests + +All `POST` requests will create a record of that model type. The `body` of the `POST` request must be in the shape of the model type, except without the `_id` field and metadata fields. The exact `body` payloads for each model are listed below. On success, the model will be returned along with the newly created `_id` and metadata fields. + +### `PATCH` Requests + +All `PATCH` requests will update a record based on the `:id` passed in the endpoint path which is `/v1//:id`. The `body` of the request is identical to the `body` in the request for `POST` endpoints. On success, the response will be: + +```json +{ + "success": true +} +``` + +### `DELETE` Requests + +All `DELETE` requests will delete a record based on the `:id` passed in the endpoint path which is `/v1//:id`. On success, the newly deleted model will be returned. + + +### `/v1/connection-oauth-definitions` Connection OAuth Definitions + +### `POST` Requests + +The `POST` request has the format: + +```json +{ + "connectionPlatform": "xero", + "platformRedirectUri": "https://login.xero.com/identity/connect/authorize?response_type=code", + "iosRedirectUri": "/connection-oauth/callback", + "scopes": "", + "init": { + "configuration": { + "baseUrl": "https://identity.xero.com/", + "path": "connect/token", + "authMethod": { + "type": "None" + }, + "headers": { + "connection": [ + "keep-alive" + ], + "accept": [ + "application/json;charset=utf-8" + ], + "authorization": [ + "{{ authorization }}" + ] + }, + "schemas": {}, + "samples": {}, + "responses": [], + "content": "form" + }, + "compute": "function btoa(str) { \n const base64Chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'; \n\n let result = ''; \n let i = 0; \n\n while (i < str.length) { \n const byte1 = str.charCodeAt(i++); \n const byte2 = i < str.length ? str.charCodeAt(i++) : 0; \n const byte3 = i < str.length ? str.charCodeAt(i++) : 0; \n\n const triplet = (byte1 << 16) | (byte2 << 8) | byte3; \n\n const char1 = (triplet >> 18) & 0x3F; \n const char2 = (triplet >> 12) & 0x3F; \n const char3 = (triplet >> 6) & 0x3F; \n const char4 = triplet & 0x3F; \n\n result += base64Chars.charAt(char1) + base64Chars.charAt(char2) +\n(i < str.length + 2 ? base64Chars.charAt(char3) : '=') +\n(i < str.length + 1 ? base64Chars.charAt(char4) : '='); \n } \n\n return result; \n } \n\nfunction compute(payload) { \n const credentials = payload.clientId + \":\" + payload.clientSecret;\n const encodedCredentials = btoa(credentials);\n return \"Basic \" + encodedCredentials;\n}; function headers(payload) { const credentials = payload.clientId + \":\" + payload.clientSecret; const encodedCredentials = btoa(credentials); return { authorization: \"Basic \" + encodedCredentials }; }; function body(payload) { const body = {grant_type: \"authorization_code\", code: payload.metadata.code, redirect_uri: payload.metadata.redirectUri}; return body; }; function compute(payload) { return { headers: headers(payload), body: body(payload) }; };", + "responseCompute": "function compute(payload) { return { accessToken: payload.access_token, refreshToken: payload.refresh_token, expiresIn: payload.expires_in, tokenType: payload.token_type }; }" + }, + "refresh": { + "configuration": { + "baseUrl": "https://identity.xero.com/", + "path": "connect/token", + "authMethod": { + "type": "None" + }, + "headers": { + "connection": [ + "keep-alive" + ], + "accept": [ + "application/json;charset=utf-8" + ], + "authorization": [ + "{{ authorization }}" + ] + }, + "schemas": {}, + "samples": {}, + "responses": [], + "content": "form" + }, + "compute": "function btoa(str) { \n const base64Chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'; \n\n let result = ''; \n let i = 0; \n\n while (i < str.length) { \n const byte1 = str.charCodeAt(i++); \n const byte2 = i < str.length ? str.charCodeAt(i++) : 0; \n const byte3 = i < str.length ? str.charCodeAt(i++) : 0; \n\n const triplet = (byte1 << 16) | (byte2 << 8) | byte3; \n\n const char1 = (triplet >> 18) & 0x3F; \n const char2 = (triplet >> 12) & 0x3F; \n const char3 = (triplet >> 6) & 0x3F; \n const char4 = triplet & 0x3F; \n\n result += base64Chars.charAt(char1) + base64Chars.charAt(char2) +\n(i < str.length + 2 ? base64Chars.charAt(char3) : '=') +\n(i < str.length + 1 ? base64Chars.charAt(char4) : '='); \n } \n\n return result; \n } \n\nfunction compute(payload) { \n const credentials = payload.clientId + \":\" + payload.clientSecret;\n const encodedCredentials = btoa(credentials);\n return \"Basic \" + encodedCredentials;\n}; function headers(payload) { const credentials = payload.clientId + \":\" + payload.clientSecret; const encodedCredentials = btoa(credentials); return { authorization: \"Basic \" + encodedCredentials }; }; function body(payload) { const body = {grant_type: \"authorization_code\", code: payload.metadata.code, redirect_uri: payload.metadata.redirectUri}; return body; }; function compute(payload) { return { headers: headers(payload), body: body(payload) }; };", + "responseCompute": "function compute(payload) { return { accessToken: payload.access_token, refreshToken: payload.refresh_token, expiresIn: payload.expires_in, tokenType: payload.token_type }; }" + } +} +``` + +### `v1/oauth/:platform` Connection OAuth Definitions + +### `POST` Requests + +The `POST` request have the format: + +```json +{ + "connectionDefinitionId": "conn_def::F6MxYHq3G2k::8ZIUdCAXTr-dX_CCBXeQDQ", + "payload": { + "code": "bteTDmQWsmEdKJAwt_AoCESx5GKWO-L6ysZ_6szzdIM", + "redirectUri": "http://localhost:34676/callback" + }, + "clientId": "D2E9E9AFED384A248654176E2D0FBA63", + "group": "{{$randomAbbreviation}}", + "label": "{{$randomAbbreviation}}" +} +``` + +### `/v1/connection-definitions` Connection Definitions + +The `GET`, `DELETE`, and `POST` responses have the format: + +```json +{ + "_id": "conn_def::F4-WTUpbMag::SmHUiaVsQLGsyW5VPcj_bw", + "platformVersion": "BHm3CLrgmibci40LNu", + "platform": "n5Y5PX92TnHQj08", + "type": "custom", + "name": "UnMqJY", + "authSecrets": [], + "authMethod": null, + "frontend": { + "spec": { + "title": "UnMqJY", + "description": "fiYnwI3DTcD7BoW7", + "platform": "n5Y5PX92TnHQj08", + "category": "yd6q6MPz", + "image": "GLDuS", + "tags": ["DbrJIVq", "quWjC6limknKTI9C5I", "3VMCK5Cv3N", "A7tt1X"] + }, + "connectionForm": { + "name": "Connect", + "description": "Securely connect your account", + "formData": [] + } + }, + "paths": { + "id": null, + "event": null, + "payload": null, + "timestamp": "GoCqjy7WcwiDlgs", + "secret": "0JxyiTAUDNW", + "signature": null, + "cursor": null + }, + "settings": { + "parseWebhookBody": true, + "showSecret": false, + "allowCustomEvents": false, + "oauth": { + "type": "disabled" + } + }, + "hidden": false, + "testConnection": null, + "createdAt": 1697740843246, + "updatedAt": 1697740843246, + "updated": false, + "version": "1.0.0", + "lastModifiedBy": "system", + "deleted": false, + "changeLog": {}, + "tags": [], + "active": true, + "deprecated": false +} +``` + +The `POST` and `PATCH` payloads have the format: + +```json +{ + "platform": "n5Y5PX92TnHQj08", + "platformVersion": "BHm3CLrgmibci40LNu", + "type": "custom", + "name": "UnMqJY", + "description": "fiYnwI3DTcD7BoW7", + "category": "yd6q6MPz", + "image": "GLDuS", + "tags": ["DbrJIVq", "quWjC6limknKTI9C5I", "3VMCK5Cv3N", "A7tt1X"], + "authentication": [], + "settings": { + "parseWebhookBody": true, + "showSecret": false, + "allowCustomEvents": false, + "oauth": { + "type": "disabled" + } + }, + "paths": { + "id": null, + "event": null, + "payload": null, + "timestamp": "GoCqjy7WcwiDlgs", + "secret": "0JxyiTAUDNW", + "signature": null, + "cursor": null + }, + "testConnection": "conn_mod_def::F5zMkdbRJdc::vhALlwvZR6aI2U8Ub7xCHg", + "active": true +} +``` + +### `/v1/connection-model-definitions` Connection Model Definitions + +The `GET`, `DELETE`, and `POST` responses have the format: + +```json +{ + "_id": "conn_mod_def::F4-WYWQcQjA::DsGW0UWlQe2sUQNe6Pp12g", + "connectionPlatform": "ZJmUuHVr2AN", + "connectionDefinitionId": "tx::Tfow-4FtI9c::QrH-T0eCwk8ql2jSXZr4PA", + "platformVersion": "k3YXF8YWytA4qFeN", + "title": "n0eIi", + "name": "OohEHl1eU6a69", + "action": "GET", + "baseUrl": "kEPVIiax", + "path": "LQ37My6", + "authMethod": { + "type": "BasicAuth", + "username": "pr7kp7Yw2L", + "password": "bNsRtgvoZdpp3s1" + }, + "headers": null, + "queryParams": null, + "samples": {}, + "schemas": { + "type": "object", + "properties": {} + }, + "paths": { + "request": { + "object": "$.body.order" + }, + "response": { + "object": "$.body.order", + "id": "$.body.order.id", + "cursor": null + } + }, + "testConnectionStatus": { + "lastTestedAt": 0, + "state": "untested" + }, + "createdAt": 1697740929577, + "updatedAt": 1697740929577, + "updated": false, + "version": "6.8.14", + "lastModifiedBy": "system", + "deleted": false, + "changeLog": {}, + "tags": [], + "active": true, + "deprecated": false +} +``` + +The `POST` and `PATCH` payloads have the format: + +```json +{ + "connectionPlatform": "ZJmUuHVr2AN", + "connectionDefinitionId": "tx::Tfow-4FtI9c::QrH-T0eCwk8ql2jSXZr4PA", + "platformVersion": "k3YXF8YWytA4qFeN", + "title": "n0eIi", + "name": "OohEHl1eU6a69", + "baseUrl": "kEPVIiax", + "path": "LQ37My6", + "authMethod": { + "type": "BasicAuth", + "username": "pr7kp7Yw2L", + "password": "bNsRtgvoZdpp3s1" + }, + "paths": { + "request": { + "object": "$.body.order" + }, + "response": { + "object": "$.body.order", + "id": "$.body.order.id", + "cursor": null + } + }, + "action": "GET", + "headers": null, + "queryParams": null, + "samples": {}, + "schemas": { + "type": "object", + "properties": {} + }, + "version": "6.8.14" +} +``` + +### `/v1/connection-model-definitions/test/:id` Connection Model Definitions + +The `POST` request has the format: + +```json +{ + "connectionKey": "shopify::testing-connection", + "request": { + "headers": { + "Content-Type": "application/json" + }, + "queryParams": {}, + "pathParams": { + "api_version": "2023-10" + }, + "body": { + "customer": { + "first_name": "Steve2", + "last_name": "Lastnameson2", + "email": "steve.lastnameson3@example.com", + "phone": "+15142543211", + "verified_email": true, + "addresses": [ + { + "address1": "123 Oak St", + "city": "Ottawa", + "province": "ON", + "phone": "555-1212", + "zip": "123 ABC", + "last_name": "Lastnameson", + "first_name": "Mother", + "country": "CA" + } + ], + "password": "newpass", + "password_confirmation": "newpass", + "send_email_welcome": false + } + } + } +} +``` + +Success Response: + +```json +{ + "code": 201, + "status": { + "lastTestedAt": 1701269093942, + "state": "success" + }, + "meta": { + "timestamp": 1701269093944, + "platform": "shopify", + "platformVersion": "2023-10", + "connectionDefinitionId": "conn_def::F5mzNk_Tt9A::aXpKo-F3SAaiQVD16Q__nA", + "connectionKey": "shopify::testing-connection", + "modelName": "create_customer", + "action": "POST" + }, + "response": "{\"customer\":{...}}" +} +``` + +Failure Response: + +```json +{ + "code": 400, + "status": { + "lastTestedAt": 1701269121866, + "state": { + "failure": { + "message": "Bad Request" + } + } + }, + "meta": { + "timestamp": 1701269121869, + "platform": "shopify", + "platformVersion": "2023-10", + "connectionDefinitionId": "conn_def::F5mzNk_Tt9A::aXpKo-F3SAaiQVD16Q__nA", + "connectionKey": "shopify::testing-connection", + "modelName": "customers", + "action": "GET" + }, + "response": "Bad Request" +} +``` + +### `/v1/connection-model-schemas` Connection Model Schemas + +The `GET`, `DELETE`, and `POST` responses have the format: + +```json +{ + "_id": "conn_mod_sch::F4-WaNQisZg::AEAMd-pwR7KgCOXErebXaQ", + "platformId": "job::KhzJ3k6uT5g::YDe89fFipCILugI93iUvEQ", + "platformPageId": "pipe::hyTA__88knM::LfKo5A30Q26Sd793-W1Tvg", + "connectionPlatform": "AQI8AlWVJpH3KWCbJ", + "connectionDefinitionId": "conn_def::9vWLYStbVPk::CfjmdRFAixElTxEQ0GLtVA", + "platformVersion": "Q4e9PAKrcat", + "modelName": "M6he0O", + "schema": { + "type": "YOJLofxSfaK", + "properties": { + "U79rxjh9yu0Pwt": { + "type": "EgMEKvSd", + "path": "oUWgRmSxZXswD", + "description": "QDPEN4sC" + }, + "DnnXOSX5Mbg": { + "type": "99jq4t11EleVV", + "path": "vJQVV6woZ", + "description": null + }, + "FI5lEp": { + "type": "kQn8HgiSEM5", + "path": null, + "description": null + }, + "7wN0c": { + "type": "uW5y5z9", + "path": "Gj8aqaVpMnXKR", + "description": null + }, + "i5KHq7jdoTyDwsAiGs": { + "type": "hjcgwrNf", + "path": "9yZhifJsNb", + "description": null + }, + "0bbcW": { + "type": "sPDF1HnnuNASVy", + "path": null, + "description": null + } + }, + "required": null + }, + "paths": { + "id": "$.id", + "createdAt": "$.created_at", + "updatedAt": null + }, + "mapping": null, + "createdAt": 1697740961521, + "updatedAt": 1697740961521, + "updated": false, + "version": "1.0.0", + "lastModifiedBy": "system", + "deleted": false, + "changeLog": {}, + "tags": [], + "active": true, + "deprecated": false +} +``` + +The `POST` and `PATCH` payloads have the format: + +```json +{ + "platformId": "job::KhzJ3k6uT5g::YDe89fFipCILugI93iUvEQ", + "platformPageId": "pipe::hyTA__88knM::LfKo5A30Q26Sd793-W1Tvg", + "connectionPlatform": "AQI8AlWVJpH3KWCbJ", + "connectionDefinitionId": "conn_def::9vWLYStbVPk::CfjmdRFAixElTxEQ0GLtVA", + "platformVersion": "Q4e9PAKrcat", + "modelName": "M6he0O", + "schema": { + "type": "YOJLofxSfaK", + "properties": { + "DnnXOSX5Mbg": { + "type": "99jq4t11EleVV", + "path": "vJQVV6woZ", + "description": null + }, + "U79rxjh9yu0Pwt": { + "type": "EgMEKvSd", + "path": "oUWgRmSxZXswD", + "description": "QDPEN4sC" + }, + "FI5lEp": { + "type": "kQn8HgiSEM5", + "path": null, + "description": null + }, + "7wN0c": { + "type": "uW5y5z9", + "path": "Gj8aqaVpMnXKR", + "description": null + }, + "0bbcW": { + "type": "sPDF1HnnuNASVy", + "path": null, + "description": null + }, + "i5KHq7jdoTyDwsAiGs": { + "type": "hjcgwrNf", + "path": "9yZhifJsNb", + "description": null + } + }, + "required": null + }, + "paths": { + "id": "$.id", + "createdAt": "$.created_at", + "updatedAt": null + }, + "mapping": null +} +``` + +`GET`` / (Common CRUD Endpoints): + +```json +{ + "connectionDefinitionId": "conn_def::F5wy_4FXeoA::r1ZvYSOASBivcMw1Triu1Q" +} +``` + +```json +[ + { + "_id": "conn_mod_sch::F5wzdAvvBM8::_xSJQALySR-bQ_aFFnfyow", + "connectionPlatform": "shopify", + "connectionDefinitionId": "conn_def::F5wy_4FXeoA::r1ZvYSODSBivcMw1Triu1Q", + "platformVersion": "2023-10", + "modelName": "DiscountCode", + "mapping": { + "commonModelName": "GiftCards" + }, + "createdAt": 1701291332748, + "updatedAt": 1701291332748, + "updated": false, + "version": "1.0.0", + "lastModifiedBy": "system", + "deleted": false, + "changeLog": {}, + "tags": [], + "active": true, + "deprecated": false + } +] +``` + +### `/v1/common-models` Common Models + +The `GET`, `DELETE`, and `POST` responses have the format: + +```json +{ + "_id": "gm::F4-WcODfqMA::fCdamKVUTpi4k5uZRwpJbg", + "name": "caohE4", + "fields": [ + { + "name": "0LAwsHhUY", + "datatype": "Date" + }, + { + "name": "CDyzz6pBp", + "datatype": "Date" + }, + { + "name": "Iz7ofBo9oxAGOsv", + "datatype": "String" + }, + { + "name": "tYfojTeHq2Oby", + "datatype": "Enum", + "options": ["3qCLm1Ifg"] + }, + { + "name": "ij2C6Ytfhqhnmdz522", + "datatype": "Array", + "elementType": { + "datatype": "Array", + "elementType": { + "datatype": "Date" + } + } + }, + { + "name": "Ywe0UZlRIhQqfePZZo", + "datatype": "Boolean" + }, + { + "name": "IXLErlwISvzKxRw5", + "datatype": "Expandable", + "reference": "YosqAgwYC" + }, + { + "name": "5xH2RzL3WAwWGJ", + "datatype": "Number" + } + ], + "category": "gcTGn0lM7F7R8xe", + "createdAt": 1697740996095, + "updatedAt": 1697740996095, + "updated": false, + "version": "7.6.13", + "lastModifiedBy": "system", + "deleted": false, + "changeLog": {}, + "tags": [], + "active": true, + "deprecated": false +} +``` + +The `POST` and `PATCH` payloads have the format: + +```json +{ + "name": "caohE4", + "version": "7.6.13", + "fields": [ + { + "name": "0LAwsHhUY", + "datatype": "Date" + }, + { + "name": "CDyzz6pBp", + "datatype": "Date" + }, + { + "name": "Iz7ofBo9oxAGOsv", + "datatype": "String" + }, + { + "name": "tYfojTeHq2Oby", + "datatype": "Enum", + "options": ["3qCLm1Ifg"] + }, + { + "name": "ij2C6Ytfhqhnmdz522", + "datatype": "Array", + "elementType": { + "datatype": "Array", + "elementType": { + "datatype": "Date" + } + } + }, + { + "name": "Ywe0UZlRIhQqfePZZo", + "datatype": "Boolean" + }, + { + "name": "IXLErlwISvzKxRw5", + "datatype": "Expandable", + "reference": "YosqAgwYC" + }, + { + "name": "5xH2RzL3WAwWGJ", + "datatype": "Number" + } + ], + "category": "gcTGn0lM7F7R8xe" +} +``` + +### `GET /v1/common-models/:id/expand` + +Returns the common model referenced by `:id` with all `expandable` fields expanded recursively. + +The response will have a format similar to the following: + +```json +{ + "_id": "cm::F5II13XsG6g::hr2Usj-TSbS7MISxCzrD_A", + "name": "8WuZHvoVXO", + "fields": [ + { + "name": "870hOUP19sN3Ldp", + "datatype": "Expandable", + "reference": "S9UN7lXUwfdrttJe", + "model": { + "_id": "cm::F5II13VDwgA::QFjFJN4gS0y0STwmYYVXjQ", + "name": "S9UN7lXUwfdrttJe", + "fields": [], + "category": "9c26GTSTglf8n", + "createdAt": 1698429730950, + "updatedAt": 1698429730950, + "updated": false, + "version": "7.5.7", + "lastModifiedBy": "system", + "deleted": false, + "changeLog": {}, + "tags": [], + "active": true, + "deprecated": false + } + }, + { + "name": "Ze3qMkMRC", + "datatype": "Array", + "elementType": { + "datatype": "Expandable", + "reference": "S9UN7lXUwfdrttJe", + "model": { + "_id": "cm::F5II13VDwgA::QFjFJN4gS0y0STwmYYVXjQ", + "name": "S9UN7lXUwfdrttJe", + "fields": [], + "category": "9c26GTSTglf8n", + "createdAt": 1698429730950, + "updatedAt": 1698429730950, + "updated": false, + "version": "7.5.7", + "lastModifiedBy": "system", + "deleted": false, + "changeLog": {}, + "tags": [], + "active": true, + "deprecated": false + } + } + } + ], + "category": "2UBilcFRq4ymekG", + "createdAt": 1698429730961, + "updatedAt": 1698429730961, + "updated": false, + "version": "8.8.10", + "lastModifiedBy": "system", + "deleted": false, + "changeLog": {}, + "tags": [], + "active": true, + "deprecated": false +} +``` diff --git a/api/src/api_payloads.rs b/api/src/api_payloads.rs new file mode 100644 index 00000000..66fdbd9f --- /dev/null +++ b/api/src/api_payloads.rs @@ -0,0 +1,39 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Deserialize, Serialize, Clone)] +pub struct RootResponse { + pub success: bool, +} + +#[derive(Clone, serde::Serialize, serde::Deserialize, Debug)] +pub struct ErrorResponse { + pub error: String, +} + +impl ErrorResponse { + pub fn new(error: T) -> Self { + Self { + error: error.to_string(), + } + } +} + +#[derive(Deserialize, Serialize, Clone)] +pub struct CreateResponse { + pub id: String, +} + +#[derive(Deserialize, Serialize, Clone, Debug)] +pub struct CreatePayload { + pub payload: T, +} + +#[derive(Deserialize, Serialize, Clone, Debug)] +pub struct UpdateResponse { + pub id: String, +} + +#[derive(Deserialize, Serialize, Clone, Debug)] +pub struct DeleteResponse { + pub id: String, +} diff --git a/api/src/config.rs b/api/src/config.rs new file mode 100644 index 00000000..3f37630b --- /dev/null +++ b/api/src/config.rs @@ -0,0 +1,199 @@ +use std::{ + fmt::{Display, Formatter, Result}, + net::SocketAddr, +}; + +use envconfig::Envconfig; +use integrationos_domain::common::{ + claude::ClaudeConfig, database::DatabaseConfig, openai::OpenAiConfig, secrets::SecretsConfig, +}; +use redis_retry::Config as RedisConfig; + +#[derive(Envconfig, Clone)] +pub struct Config { + #[envconfig(from = "WORKER_THREADS")] + pub worker_threads: Option, + #[envconfig(from = "DEBUG_MODE", default = "false")] + pub debug_mode: bool, + #[envconfig(from = "INTERNAL_SERVER_ADDRESS", default = "0.0.0.0:3005")] + pub address: SocketAddr, + #[envconfig(from = "CACHE_SIZE", default = "100")] + pub cache_size: u64, + #[envconfig(from = "ACCESS_KEY_CACHE_TTL_SECS", default = "1800")] + // Half hour access key ttl by default + pub access_key_cache_ttl_secs: u64, + #[envconfig(from = "ACCESS_KEY_WHITELIST_REFRESH_INTERVAL_SECS", default = "60")] + pub access_key_whitelist_refresh_interval_secs: u64, + #[envconfig(from = "IS_ADMIN", default = "false")] + pub is_admin: bool, + #[envconfig(from = "ENGINEERING_ACCOUNT_ID", default = "engineering_account")] + pub engineering_account_id: String, + #[envconfig( + from = "EVENT_ACCESS_PASSWORD", + default = "32KFFT_i4UpkJmyPwY2TGzgHpxfXs7zS" + )] + pub event_access_password: String, + #[envconfig(from = "EVENT_ACCESS_THROUGHPUT", default = "500")] + pub event_access_throughput: u64, + #[envconfig(from = "EVENT_SAVE_BUFFER_SIZE", default = "2048")] + pub event_save_buffer_size: usize, + #[envconfig(from = "EVENT_SAVE_TIMEOUT_SECS", default = "30")] + pub event_save_timeout_secs: u64, + #[envconfig(from = "METRIC_SAVE_CHANNEL_SIZE", default = "2048")] + pub metric_save_channel_size: usize, + #[envconfig(from = "METRIC_SYSTEM_ID", default = "IntegrationOS-Internal-System")] + pub metric_system_id: String, + #[envconfig(from = "SEGMENT_WRITE_KEY")] + pub segment_write_key: Option, + // In the future, we will want to emit events for internal API actions + #[envconfig(from = "EMIT_URL", default = "http://127.0.0.1:3000/emit/")] + pub emit_url: String, + #[envconfig(nested = true)] + pub secrets_config: SecretsConfig, + #[envconfig( + from = "JWT_SECRET", + default = "2thZ2UiOnsibmFtZSI6IlN0YXJ0dXBsa3NoamRma3NqZGhma3NqZGhma3NqZG5jhYtggfaP9ubmVjdGlvbnMiOjUwMDAwMCwibW9kdWxlcyI6NSwiZW5kcG9pbnRzIjo3b4e05e2-f050-401f-9822-44f43f71753c" + )] + pub jwt_secret: String, + #[envconfig(from = "API_VERSION", default = "v1")] + pub api_version: String, + #[envconfig(from = "MOCK_LLM", default = "false")] + pub mock_llm: bool, + #[envconfig(from = "HTTP_CLIENT_TIMEOUT_SECS", default = "30")] + pub http_client_timeout_secs: u64, + #[envconfig(nested = true)] + pub headers: Headers, + #[envconfig(nested = true)] + pub db_config: DatabaseConfig, + #[envconfig(nested = true)] + pub claude_config: ClaudeConfig, + #[envconfig(nested = true)] + pub openai_config: OpenAiConfig, + #[envconfig(nested = true)] + pub redis_config: RedisConfig, +} + +impl Display for Config { + fn fmt(&self, f: &mut Formatter<'_>) -> Result { + writeln!(f, "WORKER_THREADS: {:?}", self.worker_threads)?; + writeln!(f, "DEBUG_MODE: {:?}", self.debug_mode)?; + writeln!(f, "INTERNAL_SERVER_ADDRESS: {}", self.address)?; + writeln!(f, "CACHE_SIZE: {}", self.cache_size)?; + writeln!( + f, + "ACCESS_KEY_CACHE_TTL_SECS: {}", + self.access_key_cache_ttl_secs + )?; + writeln!( + f, + "ACCESS_KEY_WHITELIST_REFRESH_INTERVAL_SECS: {}", + self.access_key_whitelist_refresh_interval_secs + )?; + writeln!(f, "IS_ADMIN: {}", self.is_admin)?; + writeln!(f, "EVENT_ACCESS_PASSWORD: ***")?; + writeln!( + f, + "EVENT_ACCESS_THROUGHPUT: {}", + self.event_access_throughput + )?; + writeln!(f, "EVENT_SAVE_BUFFER_SIZE: {}", self.event_save_buffer_size)?; + writeln!( + f, + "EVENT_SAVE_TIMEOUT_SECS: {}", + self.event_save_timeout_secs + )?; + writeln!( + f, + "METRIC_SAVE_CHANNEL_SIZE: {}", + self.metric_save_channel_size + )?; + writeln!(f, "METRIC_SYSTEM_ID: {}", self.metric_system_id)?; + writeln!(f, "SEGMENT_WRITE_KEY: ***")?; + writeln!(f, "EMIT_URL: {}", self.emit_url)?; + writeln!(f, "JWT_SECRET: ***")?; + write!(f, "{}", self.secrets_config)?; + writeln!(f, "API_VERSION: {}", self.api_version)?; + writeln!(f, "MOCK_LLM: {}", self.mock_llm)?; + writeln!(f, "{}", self.headers)?; + writeln!(f, "{}", self.db_config)?; + writeln!(f, "{}", self.claude_config)?; + writeln!(f, "{}", self.openai_config)?; + writeln!(f, "{}", self.redis_config) + } +} + +#[derive(Envconfig, Default, Clone)] +pub struct Headers { + #[envconfig(from = "HEADER_AUTH", default = "x-integrationos-secret")] + pub auth_header: String, + #[envconfig(from = "HEADER_CONNECTION", default = "x-integrationos-connection-key")] + pub connection_header: String, + #[envconfig(from = "HEADER_CUSTOM_MAP", default = "x-integrationos-custom-map")] + pub custom_map_header: String, + #[envconfig( + from = "HEADER_ENABLE_PASSTHROUGH", + default = "x-integrationos-enable-passthrough" + )] + pub enable_passthrough_header: String, + #[envconfig( + from = "HEADER_INCLUDE_OVERFLOW", + default = "x-integrationos-include-overflow" + )] + pub include_overflow_header: String, + #[envconfig( + from = "HEADER_DYNAMIC_PLATFORM", + default = "x-integrationos-dynamic-platform" + )] + pub dynamic_platform_header: String, + #[envconfig( + from = "HEADER_RATE_LIMIT_LIMIT", + default = "x-integrationos-rate-limit-limit" + )] + pub rate_limit_limit: String, + #[envconfig( + from = "HEADER_RATE_LIMIT_REMAINING", + default = "x-integrationos-rate-limit-remainings" + )] + pub rate_limit_remaining: String, + #[envconfig( + from = "HEADER_RATE_LIMIT_REST", + default = "x-integrationos-rate-limit-reset" + )] + pub rate_limit_reset: String, +} + +impl Headers { + pub fn new() -> Self { + Self::default() + } +} + +impl Display for Headers { + fn fmt(&self, f: &mut Formatter<'_>) -> Result { + writeln!(f, "HEADER_AUTH: {}", self.auth_header)?; + writeln!(f, "HEADER_CONNECTION: {}", self.connection_header)?; + writeln!(f, "HEADER_CUSTOM_MAP: {}", self.custom_map_header)?; + writeln!( + f, + "HEADER_INCLUDE_PASSTHROUGH: {}", + self.enable_passthrough_header + )?; + writeln!( + f, + "HEADER_INCLUDE_OVERFLOW: {}", + self.include_overflow_header + )?; + writeln!( + f, + "HEADER_DYNAMIC_PLATFORM: {}", + self.dynamic_platform_header + )?; + writeln!(f, "HEADER_RATE_LIMIT_LIMIT: {}", self.rate_limit_limit)?; + writeln!( + f, + "HEADER_RATE_LIMIT_REMAINING: {}", + self.rate_limit_remaining + )?; + writeln!(f, "HEADER_RATE_LIMIT_RESET: {}", self.rate_limit_reset) + } +} diff --git a/api/src/endpoints/common_enum.rs b/api/src/endpoints/common_enum.rs new file mode 100644 index 00000000..3eb6dd2f --- /dev/null +++ b/api/src/endpoints/common_enum.rs @@ -0,0 +1,46 @@ +use super::{ApiError, ReadResponse}; +use crate::{internal_server_error, server::AppState, util::shape_mongo_filter}; + +use axum::{ + extract::{Query, State}, + Json, +}; +use integrationos_domain::{algebra::adapter::StoreAdapter, common_model::CommonEnum}; + +use shape_mongo_filter::DELETED_STR; +use std::{collections::BTreeMap, sync::Arc}; +use tokio::try_join; +use tracing::error; + +pub async fn read( + query: Option>>, + State(state): State>, +) -> Result>, ApiError> { + let mut query = shape_mongo_filter(query, None, None); + query.filter.remove(DELETED_STR); + + let store = &state.app_stores.common_enum; + let count = store.count(query.filter.clone(), None); + let find = store.get_many( + Some(query.filter), + None, + None, + Some(query.limit), + Some(query.skip), + ); + + let res = match try_join!(count, find) { + Ok((total, rows)) => ReadResponse { + rows, + skip: query.skip, + limit: query.limit, + total, + }, + Err(e) => { + error!("Error reading from store: {e}"); + return Err(internal_server_error!()); + } + }; + + Ok(Json(res)) +} diff --git a/api/src/endpoints/common_model.rs b/api/src/endpoints/common_model.rs new file mode 100644 index 00000000..a5490c0b --- /dev/null +++ b/api/src/endpoints/common_model.rs @@ -0,0 +1,183 @@ +use super::{create, delete, read, update, ApiResult, CrudHook, CrudRequest}; +use crate::{ + internal_server_error, not_found, + server::{AppState, AppStores}, +}; +use axum::{ + async_trait, + extract::{Json, Path, State}, + routing::{get, patch, post}, + Router, +}; +use integrationos_domain::{ + algebra::adapter::StoreAdapter, + api_model_config::Lang, + common::{ + common_model::{CommonModel, Field}, + event_access::EventAccess, + json_schema::JsonSchema, + mongo::MongoDbStore, + }, + id::{prefix::IdPrefix, Id}, + IntegrationOSError, +}; +use mongodb::bson::doc; +use semver::Version; +use serde::{Deserialize, Serialize}; +use serde_json::Value; +use std::{collections::HashMap, sync::Arc}; +use tracing::error; + +pub fn get_router() -> Router> { + Router::new() + .route( + "/", + post(create::).get(read::), + ) + .route( + "/:id", + patch(update::) + .delete(delete::), + ) + .route("/:id/schema", get(as_json_schema)) + .route("/:id/expand", get(expand)) +} + +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] +#[cfg_attr(feature = "dummy", derive(fake::Dummy))] +#[serde(rename_all = "camelCase")] +pub struct CreateRequest { + pub name: String, + pub version: Version, + pub fields: Vec, + pub category: String, + pub sample: Value, + pub primary: bool, +} + +#[async_trait] +impl CrudHook for CreateRequest { + async fn after_create_hook( + record: &CommonModel, + stores: &AppStores, + ) -> Result<(), IntegrationOSError> { + let rust = record.generate_as(&Lang::Rust); + let typescript = record.generate_as(&Lang::TypeScript); + let interface = + HashMap::from_iter(vec![(Lang::Rust, rust), (Lang::TypeScript, typescript)]); + + update_interface(interface, record, &stores.common_model).await + } + + async fn after_update_hook( + record: &CommonModel, + stores: &AppStores, + ) -> Result<(), IntegrationOSError> { + let typescript = record.generate_as(&Lang::TypeScript); + let rust = record.generate_as(&Lang::Rust); + let interface = + HashMap::from_iter(vec![(Lang::Rust, rust), (Lang::TypeScript, typescript)]); + + update_interface(interface, record, &stores.common_model).await + } +} + +impl CrudRequest for CreateRequest { + type Output = CommonModel; + type Error = (); + + fn into_public(self) -> Result { + let mut record = Self::Output { + id: Id::now(IdPrefix::CommonModel), + name: self.name, + fields: self.fields, + sample: self.sample, + category: self.category, + primary: self.primary, + interface: Default::default(), + record_metadata: Default::default(), + }; + record.record_metadata.version = self.version; + Ok(record) + } + + fn into_with_event_access(self, _event_access: Arc) -> Self::Output { + unimplemented!() + } + + fn update(self, record: &mut Self::Output) { + record.name = self.name; + record.record_metadata.version = self.version; + record.fields = self.fields; + record.category = self.category; + record.sample = self.sample; + } + + fn get_store(stores: AppStores) -> MongoDbStore { + stores.common_model.clone() + } +} + +async fn expand(Path(id): Path, State(state): State>) -> ApiResult { + let Some(cm) = state + .app_stores + .common_model + .get_one_by_id(&id.to_string()) + .await + .map_err(|e| { + error!("Could not fetch common model: {e}"); + internal_server_error!() + })? + else { + return Err(not_found!("Common model")); + }; + + let expanded = cm + .expand_all( + state.app_stores.common_model.clone(), + state.app_stores.common_enum.clone(), + ) + .await + .map_err(|e| { + error!("Could not expand all: {e}"); + internal_server_error!() + })?; + + Ok(Json(expanded)) +} + +async fn as_json_schema(path: Path, state: State>) -> ApiResult { + let Json(cm) = expand(path, state).await?; + + match CommonModel::try_into(cm) { + Ok(schema) => Ok(Json(schema)), + Err(e) => { + error!("Could not convert to json schema: {e}"); + Err(internal_server_error!()) + } + } +} + +async fn update_interface( + interface: HashMap, + record: &CommonModel, + cm_store: &MongoDbStore, +) -> Result<(), IntegrationOSError> { + match bson::to_bson(&interface) { + Ok(interface) => { + cm_store + .update_one( + &record.id.to_string(), + doc! {"$set": {"interface": interface}}, + ) + .await + .ok(); + + Ok(()) + } + Err(e) => { + error!("Could not convert interface to bson: {e}"); + Ok(()) + } + } +} diff --git a/api/src/endpoints/connection.rs b/api/src/endpoints/connection.rs new file mode 100644 index 00000000..00763fe2 --- /dev/null +++ b/api/src/endpoints/connection.rs @@ -0,0 +1,429 @@ +use std::{collections::HashMap, sync::Arc}; + +use anyhow::{bail, Result}; +use axum::{ + extract::{Path, State}, + http::StatusCode, + routing::{delete as axum_delete, get, patch, post}, + Extension, Json, Router, +}; +use chrono::Utc; +use convert_case::{Case, Casing}; +use http::HeaderMap; +use integrationos_domain::{ + algebra::adapter::StoreAdapter, + common::{ + connection_definition::ConnectionDefinition, event_access::EventAccess, + mongo::MongoDbStore, record_metadata::RecordMetadata, settings::Settings, Connection, + Throughput, + }, + id::{prefix::IdPrefix, Id}, +}; +use mongodb::bson::doc; +use mongodb::bson::Regex; +use serde::{Deserialize, Serialize}; +use serde_json::Value; +use tracing::error; +use validator::Validate; + +use crate::{ + api_payloads::{DeleteResponse, ErrorResponse, UpdateResponse}, + bad_request, + endpoints::event_access::{generate_event_access, CreateEventAccessPayloadWithOwnership}, + internal_server_error, not_found, + server::{AppState, AppStores}, +}; + +use super::{delete, read, CrudRequest}; + +pub fn get_router() -> Router> { + Router::new() + .route("/", post(create_connection)) + .route("/", get(read::)) + .route("/:id", patch(update_connection)) + .route("/:id", axum_delete(delete_connection)) +} + +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, Validate)] +#[serde(rename_all = "camelCase")] +pub struct CreateConnectionPayload { + pub connection_definition_id: Id, + pub name: String, + pub group: String, + pub auth_form_data: HashMap, + pub active: bool, +} + +async fn test_connection( + state: &AppState, + connection_config: &ConnectionDefinition, + auth_form_data_value: &Value, +) -> Result<()> { + if let Some(ref test_connection_model_config_id) = connection_config.test_connection { + let test_connection_model_config = state + .app_stores + .model_config + .get_one_by_id(&test_connection_model_config_id.to_string()) + .await?; + + let test_connection_model_config = match test_connection_model_config { + Some(config) => Arc::new(config), + None => { + return Err(anyhow::anyhow!( + "Test connection model config {} not found", + test_connection_model_config_id + )) + } + }; + + let res = state + .extractor_caller + .execute_model_definition( + &test_connection_model_config, + HeaderMap::new(), + &HashMap::new(), + &Arc::new(auth_form_data_value.clone()), + None, + ) + .await?; + + if !res.status().is_success() { + bail!("Test connections failed: {}", res.status()); + } + } + + Ok(()) +} + +impl CrudRequest for CreateConnectionPayload { + type Output = Connection; + type Error = (); + + fn into_with_event_access(self, _event_access: Arc) -> Self::Output { + unimplemented!() + } + + fn update(self, _record: &mut Self::Output) { + unimplemented!() + } + + fn get_store(stores: AppStores) -> MongoDbStore { + stores.connection + } + + fn into_public(self) -> Result { + unimplemented!() + } +} + +pub async fn create_connection( + Extension(user_event_access): Extension>, + State(state): State>, + Json(req): Json, +) -> Result, (StatusCode, Json)> { + if let Err(validation_errors) = req.validate() { + return Err(bad_request!(format!( + "Invalid payload: {:?}", + validation_errors + ))); + } + + let connection_config = match state + .app_stores + .connection_config + .get_one(doc! { + "_id": req.connection_definition_id.to_string(), + "deleted": false + }) + .await + { + Ok(Some(data)) => data, + Ok(None) => { + return Err(not_found!("Connection definition")); + } + Err(e) => { + error!( + "Error fetching connection definition in connection create: {:?}", + e + ); + + return Err(internal_server_error!()); + } + }; + + let key = format!( + "{}::{}::{}", + user_event_access.environment, + connection_config.platform, + req.group.to_case(Case::Kebab) + ); + + let event_access = generate_event_access( + state.config.clone(), + CreateEventAccessPayloadWithOwnership { + name: req.name.clone(), + group: Some(req.group.clone()), + platform: connection_config.platform.clone(), + namespace: None, + connection_type: connection_config.r#type.clone(), + environment: user_event_access.environment, + paths: connection_config.paths.clone(), + ownership: user_event_access.ownership.clone(), + }, + ) + .map_err(|e| { + error!("Error creating event access for connection: {:?}", e); + + internal_server_error!() + })?; + + let auth_form_data_value = serde_json::to_value(req.auth_form_data.clone()).map_err(|e| { + error!("Error serializing auth form data for connection: {:?}", e); + + internal_server_error!() + })?; + + test_connection(&state, &connection_config, &auth_form_data_value) + .await + .map_err(|e| { + error!( + "Error executing model definition in connections create for connection testing: {:?}", + e + ); + + bad_request!("Invalid connection credentials") + })?; + + let secret_result = state + .secrets_client + .encrypt( + user_event_access.ownership.id.to_string(), + &auth_form_data_value, + ) + .await + .map_err(|e| { + error!("Error creating secret for connection: {:?}", e); + + internal_server_error!() + })?; + + let connection = Connection { + id: Id::new(IdPrefix::Connection, Utc::now()), + platform_version: connection_config.clone().platform_version, + connection_definition_id: req.connection_definition_id, + r#type: connection_config.to_connection_type(), + name: req.name, + key: key.clone().into(), + group: req.group, + platform: connection_config.platform.into(), + environment: event_access.environment, + secrets_service_id: secret_result.id, + event_access_id: event_access.id, + access_key: event_access.access_key, + settings: connection_config.settings, + throughput: Throughput { key, limit: 100 }, + ownership: event_access.ownership, + oauth: None, + record_metadata: RecordMetadata::default(), + }; + + state + .app_stores + .connection + .create_one(&connection) + .await + .map_err(|e| { + error!("Error creating connection: {:?}", e); + + internal_server_error!() + })?; + + Ok(Json(connection)) +} + +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, Validate)] +#[serde(rename_all = "camelCase")] +pub struct UpdateConnectionPayload { + pub name: Option, + pub group: Option, + pub settings: Option, + pub throughput: Option, + pub auth_form_data: Option>, + pub active: Option, +} + +pub async fn update_connection( + Extension(event_access): Extension>, + Path(id): Path, + State(state): State>, + Json(req): Json, +) -> Result, (StatusCode, Json)> { + let Some(mut connection) = (match state.app_stores.connection.get_one_by_id(&id).await { + Ok(connection) => connection, + Err(e) => { + error!("Error fetching connection for update: {:?}", e); + + return Err(internal_server_error!()); + } + }) else { + return Err(not_found!("Connection")); + }; + + if connection.ownership != event_access.ownership + || connection.environment != event_access.environment + { + return Err(not_found!("Connection")); + } + + if let Some(name) = req.name { + connection.name = name; + } + + if let Some(group) = req.group { + connection.group = group.clone(); + connection.key = format!("{}::{}", connection.platform, group).into(); + } + + if let Some(settings) = req.settings { + connection.settings = settings; + } + + if let Some(throughput) = req.throughput { + connection.throughput = throughput; + } + + if let Some(auth_form_data) = req.auth_form_data { + let auth_form_data_value = serde_json::to_value(auth_form_data).map_err(|e| { + error!( + "Error serializing auth form data for connection update: {:?}", + e + ); + + internal_server_error!() + })?; + + let connection_config = match state + .app_stores + .connection_config + .get_one(doc! { + "_id": connection.connection_definition_id.to_string(), + "deleted": false + }) + .await + { + Ok(Some(data)) => data, + Ok(None) => { + return Err(not_found!("Connection definition")); + } + Err(e) => { + error!( + "Error fetching connection definition in connection update: {:?}", + e + ); + + return Err(internal_server_error!()); + } + }; + + test_connection(&state, &connection_config, &auth_form_data_value) + .await + .map_err(|e| { + error!("Error executing model definition in connections update for connection testing: {:?}", e); + + bad_request!("Invalid connection credentials") + })?; + + let secret_result = state + .secrets_client + .encrypt(event_access.ownership.id.to_string(), &auth_form_data_value) + .await + .map_err(|e| { + error!("Error creating secret for connection update: {:?}", e); + + internal_server_error!() + })?; + + connection.secrets_service_id = secret_result.id; + } + + if let Some(active) = req.active { + connection.record_metadata.active = active; + } + + let Ok(document) = bson::to_document(&connection) else { + error!("Could not serialize connection into document"); + + return Err(internal_server_error!()); + }; + + connection + .record_metadata + .mark_updated(&event_access.ownership.id); + + match state + .app_stores + .connection + .update_one( + &id, + doc! { + "$set": document + }, + ) + .await + { + Ok(_) => Ok(Json(UpdateResponse { id })), + Err(e) => { + error!("Error updating connection: {:?}", e); + + Err(internal_server_error!()) + } + } +} + +pub async fn delete_connection( + Extension(user_event_access): Extension>, + Path(id): Path, + State(state): State>, +) -> Result, (StatusCode, Json)> { + let Json(found_connection) = delete::( + Some(Extension(user_event_access.clone())), + Path(id.clone()), + State(state.clone()), + ) + .await?; + + let partial_cursor_key = format!( + "{}::{}::{}", + user_event_access.ownership.id, id, found_connection.key + ); + + let mongo_regex = Regex { + pattern: format!("^{}::", partial_cursor_key.replace('.', "\\.")), + options: "i".to_string(), + }; + + // Delete cursors by adding "deleted/" to the key + state + .app_stores + .cursors + .update_many_with_aggregation_pipeline( + doc! { + "key": mongo_regex + }, + &Vec::from([doc! { + "$set": { + "key": { + "$concat": ["deleted/", "$key"] + } + } + }]), + ) + .await + .map_err(|e| { + error!("Error deleting cursors for connection {:?}: {:?}", id, e); + + internal_server_error!() + })?; + + Ok(Json(DeleteResponse { id })) +} diff --git a/api/src/endpoints/connection_definition.rs b/api/src/endpoints/connection_definition.rs new file mode 100644 index 00000000..87866f84 --- /dev/null +++ b/api/src/endpoints/connection_definition.rs @@ -0,0 +1,349 @@ +use super::{ + create, delete, update, ApiResult, CachedRequest, CrudHook, CrudRequest, ReadResponse, +}; +use crate::{ + internal_server_error, not_found, + server::{AppState, AppStores}, +}; +use axum::{ + extract::{Path, State}, + routing::{patch, post}, + Json, Router, +}; +use integrationos_domain::{ + algebra::adapter::StoreAdapter, + common::{ + api_model_config::AuthMethod, + connection_definition::{ + AuthSecret, ConnectionDefinition, ConnectionDefinitionType, ConnectionForm, + FormDataItem, Frontend, Paths, Spec, + }, + event_access::EventAccess, + mongo::MongoDbStore, + record_metadata::RecordMetadata, + settings::Settings, + }, + connection_definition::ConnectionStatus, + connection_model_definition::{ConnectionModelDefinition, CrudAction}, + id::{prefix::IdPrefix, Id}, +}; +use moka::future::Cache; +use mongodb::bson::doc; +use serde::{Deserialize, Serialize}; +use std::{collections::BTreeMap, sync::Arc}; +use tracing::error; + +pub fn get_router() -> Router> { + Router::new() + .route("/", post(create::)) + .route( + "/:id", + patch(update::) + .delete(delete::), + ) +} + +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] +#[cfg_attr(feature = "dummy", derive(fake::Dummy))] +#[serde(rename_all = "camelCase")] +pub struct CreateRequest { + pub platform: String, + pub platform_version: String, + #[serde(default)] + pub status: ConnectionStatus, + pub r#type: ConnectionDefinitionType, + pub name: String, + pub description: String, + pub category: String, + pub image: String, + pub tags: Vec, + pub authentication: Vec, + pub auth_method: Option, + pub settings: Settings, + pub paths: Paths, + pub test_connection: Option, + pub active: bool, +} + +impl CrudHook for CreateRequest {} + +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] +#[cfg_attr(feature = "dummy", derive(fake::Dummy))] +pub struct AuthenticationItem { + pub name: String, + pub label: String, + pub r#type: String, + pub placeholder: String, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct UpdateFields { + pub active: bool, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct PublicGetConnectionDetailsResponse { + pub platform: String, + pub status: ConnectionStatus, + pub supported_actions: Vec, + pub oauth: PublicConnectionDataOauth, + pub pagination: bool, + pub filtration: bool, + pub sorting: bool, + pub caveats: Vec, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct PublicConnectionDataCaveat { + pub name: String, + pub action: Option, + pub comments: Vec, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct PublicConnectionDataOauth { + pub enabled: bool, + pub scopes: String, +} + +pub async fn public_get_connection_details( + Path((common_model, platform_name)): Path<(String, String)>, + State(state): State>, +) -> ApiResult { + let Some(connection_definition) = state + .app_stores + .connection_config + .get_one(doc! { + "platform": &platform_name, + }) + .await + .map_err(|e| { + error!("Error reading from connection definitions: {e}"); + internal_server_error!() + })? + else { + return Err(not_found!("Connection definition")); + }; + + let connection_model_definitions = state + .app_stores + .model_config + .get_many( + Some(doc! { + "connectionPlatform": { + "$regex": format!("^{}$", &platform_name), + "$options": "i" + }, + "mapping.commonModelName": { + "$regex": format!("^{}$", &common_model), + "$options": "i" + }, + "actionName": { + "$in": [ + "create", + "update", + "getMany", + "getOne", + "getCount", + "delete" + ] + } + }), + None, + None, + None, + None, + ) + .await + .map_err(|e| { + error!("Error reading from connection model definitions: {e}"); + internal_server_error!() + })?; + + let supported_actions = connection_model_definitions + .clone() + .into_iter() + .map(|definition| definition.action_name) + .collect::>(); + + let oauth_enabled = matches!(connection_definition.auth_method, Some(AuthMethod::OAuth)); + + let scopes = if oauth_enabled { + let connection_oauth_definition = state + .app_stores + .oauth_config + .get_one(doc! { + "connectionPlatform": &platform_name, + }) + .await + .map_err(|e| { + error!("Error reading from connection definitions: {e}"); + internal_server_error!() + })? + .ok_or_else(|| not_found!("Connection OAuth definition"))?; + + connection_oauth_definition.frontend.scopes + } else { + String::new() + }; + + let public_connection_details_record = state + .app_stores + .public_connection_details + .get_one(doc! { + "platform": &platform_name, + }) + .await + .map_err(|e| { + error!("Error reading from public connection details: {e}"); + internal_server_error!() + })? + .ok_or_else(|| not_found!("Public Connection Details"))?; + + let model_features = public_connection_details_record + .models + .iter() + .find(|model| model.name.to_lowercase() == common_model.to_lowercase()) + .ok_or_else(|| not_found!("Model Features"))?; + + let caveats = + public_connection_details_record + .caveats + .into_iter() + .fold(vec![], |mut v, caveat| { + match caveat.connection_model_definition_id { + Some(cmd_id) => { + let connection_model_definition = connection_model_definitions.iter().find( + |definition: &&ConnectionModelDefinition| { + definition.id.to_string() == cmd_id + }, + ); + + if let Some(definition) = connection_model_definition { + v.push(PublicConnectionDataCaveat { + name: definition.title.clone(), + action: Some(definition.action_name.clone()), + comments: caveat.comments, + }); + } + } + None => { + v.push(PublicConnectionDataCaveat { + name: "General".to_string(), + action: None, + comments: caveat.comments, + }); + } + } + v + }); + + Ok(Json(PublicGetConnectionDetailsResponse { + platform: connection_definition.platform, + status: connection_definition.status, + oauth: PublicConnectionDataOauth { + enabled: oauth_enabled, + scopes, + }, + supported_actions, + pagination: model_features.pagination, + filtration: model_features.filtration, + sorting: model_features.sorting, + caveats, + })) +} + +impl CrudRequest for CreateRequest { + type Output = ConnectionDefinition; + type Error = (); + + fn into_public(self) -> Result { + let auth_secrets: Vec = self + .authentication + .iter() + .map(|item| AuthSecret { + name: item.name.to_string(), + }) + .collect(); + + let connection_form_items: Vec = self + .authentication + .iter() + .map(|item| FormDataItem { + name: item.name.clone(), + r#type: item.r#type.clone(), + label: item.label.clone(), + placeholder: item.placeholder.clone(), + }) + .collect(); + + let connection_form = ConnectionForm { + name: "Connect".to_string(), + description: "Securely connect your account".to_string(), + form_data: connection_form_items, + }; + + let key = format!("api::{}::{}", self.platform, self.platform_version); + + let mut record = Self::Output { + id: Id::now(IdPrefix::ConnectionDefinition), + platform_version: self.platform_version, + platform: self.platform.clone(), + status: self.status, + r#type: self.r#type, + name: self.name.clone(), + key, + frontend: Frontend { + spec: Spec { + title: self.name, + description: self.description, + platform: self.platform, + category: self.category, + image: self.image, + tags: self.tags, + }, + connection_form, + }, + test_connection: self.test_connection, + auth_secrets, + auth_method: self.auth_method, + paths: self.paths, + settings: self.settings, + hidden: false, + record_metadata: RecordMetadata::default(), + }; + + record.record_metadata.active = self.active; + Ok(record) + } + + fn into_with_event_access(self, _event_access: Arc) -> Self::Output { + unimplemented!() + } + + fn update(self, record: &mut Self::Output) { + record.name = self.name; + record.frontend.spec.description = self.description; + record.frontend.spec.category = self.category; + record.frontend.spec.image = self.image; + record.frontend.spec.tags = self.tags; + record.test_connection = self.test_connection; + record.platform = self.platform; + record.record_metadata.active = self.active; + } + + fn get_store(stores: AppStores) -> MongoDbStore { + stores.connection_config + } +} + +impl CachedRequest for CreateRequest { + type Output = ConnectionDefinition; + + fn get_cache( + state: Arc, + ) -> Arc>, Arc>>> { + state.connection_definitions_cache.clone() + } +} diff --git a/api/src/endpoints/connection_model_definition.rs b/api/src/endpoints/connection_model_definition.rs new file mode 100644 index 00000000..2dc5a73d --- /dev/null +++ b/api/src/endpoints/connection_model_definition.rs @@ -0,0 +1,399 @@ +use super::{create, delete, read, update, CrudHook, CrudRequest}; +use crate::{ + api_payloads::ErrorResponse, + internal_server_error, not_found, + server::{AppState, AppStores}, + service_unavailable, +}; +use axum::{ + extract::{Path, State}, + http::StatusCode, + routing::{patch, post}, + Extension, Json, Router, +}; +use bson::SerializerOptions; +use chrono::Utc; +use http::HeaderMap; +use integrationos_domain::{ + algebra::adapter::StoreAdapter, + common::{ + api_model_config::{ + ApiModelConfig, AuthMethod, ModelPaths, ResponseBody, SamplesInput, SchemasInput, + }, + connection_model_definition::{ + ConnectionModelDefinition, CrudAction, CrudMapping, ExtractorConfig, PlatformInfo, + TestConnection, TestConnectionState, + }, + event_access::EventAccess, + mongo::MongoDbStore, + }, + get_secret_request::GetSecretRequest, + id::{prefix::IdPrefix, Id}, +}; +use mongodb::bson::doc; +use semver::Version; +use serde::{Deserialize, Serialize}; +use serde_json::Value; +use std::{ + collections::{BTreeMap, HashMap}, + sync::Arc, +}; +use tracing::error; + +pub fn get_router() -> Router> { + Router::new() + .route( + "/", + post(create::) + .get(read::), + ) + .route( + "/:id", + patch(update::) + .delete(delete::), + ) +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct TestConnectionPayload { + pub connection_key: String, + pub request: TestConnectionRequest, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +#[cfg_attr(feature = "dummy", derive(fake::Dummy))] +#[serde(rename_all = "camelCase")] +pub struct TestConnectionRequest { + #[serde( + with = "http_serde_ext::header_map::option", + skip_serializing_if = "Option::is_none", + default + )] + pub headers: Option, + pub query_params: Option>, + pub path_params: Option>, + pub body: Option, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct TestConnectionResponse { + #[serde(with = "http_serde_ext::status_code")] + pub code: StatusCode, + pub status: TestConnection, + pub meta: Meta, + pub response: String, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct Meta { + pub timestamp: u64, + pub platform: String, + pub platform_version: String, + pub connection_definition_id: Id, + pub connection_key: String, + pub model_name: String, + #[serde(with = "http_serde_ext::method")] + pub action: http::Method, +} + +pub async fn test_connection_model_definition( + Extension(event_access): Extension>, + Path(id): Path, + State(state): State>, + Json(req): Json, +) -> Result, (StatusCode, Json)> { + let connection = match state + .app_stores + .connection + .get_one(doc! { + "key": req.connection_key, + "ownership.buildableId": event_access.ownership.id.as_ref(), + "deleted": false + }) + .await + { + Ok(Some(data)) => data, + Ok(None) => { + return Err(not_found!("Connection Record")); + } + Err(e) => { + error!("Error fetching connection in testing endpoint: {:?}", e); + + return Err(internal_server_error!()); + } + }; + + let connection_model_definition = match state + .app_stores + .model_config + .get_one(doc! { + "_id": id, + "active": false, // Cannot test an active model definition + "deleted": false + }) + .await + { + Ok(Some(data)) => data, + Ok(None) => { + return Err(not_found!("Inactive Connection Model Definition Record")); + } + Err(e) => { + error!( + "Error fetching inactive connection model definition in testing endpoint: {:?}", + e + ); + + return Err(internal_server_error!()); + } + }; + + let mut secret_result = state + .secrets_client + .decrypt(&GetSecretRequest { + buildable_id: connection.ownership.id.to_string(), + id: connection.secrets_service_id.clone(), + }) + .await + .map_err(|e| { + error!("Error decripting secret for connection: {:?}", e); + + internal_server_error!() + })?; + + let request_string: String = serde_json::to_string(&req.request.clone()).map_err(|e| { + error!( + "Error converting request to json string in testing endpoint: {:?}", + e + ); + + internal_server_error!() + })?; + + // Add path params to template context + if let Some(path_params) = req.request.path_params { + for (key, val) in path_params { + secret_result[key] = Value::String(val); + } + } + + let request_body_vec = req.request.body.map(|body| body.to_string().into_bytes()); + let model_execution_result = state + .extractor_caller + .execute_model_definition( + &Arc::new(connection_model_definition.clone()), + req.request.headers.unwrap_or_default(), + &req.request.query_params.unwrap_or(HashMap::new()), + &Arc::new(secret_result), + request_body_vec, + ) + .await + .map_err(|e| { + error!("Error executing connection model definition: {:?}", e); + + service_unavailable!() + })?; + + let status_code = model_execution_result.status(); + + let response_body = model_execution_result.text().await.map_err(|e| { + error!("Could not get text from test connection failure: {e}"); + service_unavailable!() + })?; + + let status = match status_code { + status if status.is_success() => TestConnection { + last_tested_at: Utc::now().timestamp_millis(), + state: TestConnectionState::Success { + response: response_body.clone(), + request_payload: request_string, + }, + }, + _ => TestConnection { + last_tested_at: Utc::now().timestamp_millis(), + state: TestConnectionState::Failure { + message: response_body.clone(), + request_payload: request_string, + }, + }, + }; + + let status_bson = bson::to_bson_with_options( + &status, + SerializerOptions::builder().human_readable(false).build(), + ) + .map_err(|e| { + error!("Error serializing status to BSON: {:?}", e); + internal_server_error!() + })?; + + state + .app_stores + .model_config + .update_one( + &connection_model_definition.id.to_string(), + doc! { + "$set": { + "testConnectionStatus": status_bson + } + }, + ) + .await + .map_err(|e| { + error!( + "Error updating connection model definition in testing endpoint: {:?}", + e + ); + + internal_server_error!() + })?; + + let response = TestConnectionResponse { + code: status_code, + status, + response: response_body, + meta: Meta { + timestamp: Utc::now().timestamp_millis() as u64, + platform: connection.platform.to_string(), + platform_version: connection.platform_version.clone(), + connection_definition_id: connection_model_definition.connection_definition_id, + connection_key: connection.key.to_string(), + model_name: connection_model_definition.model_name.clone(), + action: connection_model_definition.action.clone(), + }, + }; + + Ok(Json(response)) +} + +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] +#[cfg_attr(feature = "dummy", derive(fake::Dummy))] +#[serde(rename_all = "camelCase")] +pub struct CreateRequest { + pub connection_platform: String, + pub connection_definition_id: Id, + pub platform_version: String, + pub title: String, + pub name: String, + pub model_name: String, + pub base_url: String, + pub path: String, + pub auth_method: AuthMethod, + pub action_name: CrudAction, + #[serde(with = "http_serde_ext::method", rename = "action")] + pub http_method: http::Method, + #[serde( + with = "http_serde_ext::header_map::option", + skip_serializing_if = "Option::is_none", + default + )] + pub headers: Option, + pub query_params: Option>, + #[serde(flatten, skip_serializing_if = "Option::is_none")] + pub extractor_config: Option, + pub schemas: SchemasInput, + pub samples: SamplesInput, + pub responses: Vec, + pub version: Version, // the event-inc-version + pub is_default_crud_mapping: Option, + pub mapping: Option, + pub paths: Option, +} + +impl CrudHook for CreateRequest {} + +impl CrudRequest for CreateRequest { + type Output = ConnectionModelDefinition; + type Error = (); + + fn into_public(self) -> Result { + let key = format!( + "api::{}::{}::{}::{}::{}::{}", + self.connection_platform, + self.platform_version, + self.model_name, + self.action_name, + self.path, + self.name + ) + .to_lowercase(); + + let mut record = Self::Output { + id: Id::new(IdPrefix::ConnectionModelDefinition, Utc::now()), + connection_platform: self.connection_platform, + connection_definition_id: self.connection_definition_id, + platform_version: self.platform_version, + key, + title: self.title, + name: self.name, + model_name: self.model_name, + platform_info: PlatformInfo::Api(ApiModelConfig { + base_url: self.base_url, + path: self.path, + content: Default::default(), + auth_method: self.auth_method, + headers: self.headers, + query_params: self.query_params, + schemas: self.schemas, + samples: self.samples, + responses: self.responses, + paths: self.paths, + }), + action: self.http_method, + action_name: self.action_name, + extractor_config: self.extractor_config, + test_connection_status: TestConnection::default(), + is_default_crud_mapping: self.is_default_crud_mapping, + mapping: self.mapping, + record_metadata: Default::default(), + }; + record.record_metadata.version = self.version; + Ok(record) + } + + fn into_with_event_access(self, _event_access: Arc) -> Self::Output { + unimplemented!() + } + + fn update(self, record: &mut Self::Output) { + let key = format!( + "api::{}::{}::{}::{}::{}::{}", + self.connection_platform, + self.platform_version, + self.model_name, + self.action_name, + self.path, + self.name + ) + .to_lowercase(); + + record.key = key; + record.connection_platform = self.connection_platform; + record.connection_definition_id = self.connection_definition_id; + record.platform_version = self.platform_version; + record.title = self.title; + record.name = self.name; + record.action = self.http_method; + record.action_name = self.action_name; + record.platform_info = PlatformInfo::Api(ApiModelConfig { + base_url: self.base_url, + path: self.path, + content: Default::default(), + auth_method: self.auth_method, + headers: self.headers, + query_params: self.query_params, + schemas: self.schemas, + samples: self.samples, + responses: self.responses, + paths: self.paths, + }); + record.mapping = self.mapping; + record.extractor_config = self.extractor_config; + record.record_metadata.version = self.version; + } + + fn get_store(stores: AppStores) -> MongoDbStore { + stores.model_config.clone() + } +} diff --git a/api/src/endpoints/connection_model_schema.rs b/api/src/endpoints/connection_model_schema.rs new file mode 100644 index 00000000..ad2c174d --- /dev/null +++ b/api/src/endpoints/connection_model_schema.rs @@ -0,0 +1,236 @@ +use super::{ + create, delete, read, update, ApiError, ApiResult, CrudHook, CrudRequest, ReadResponse, +}; +use crate::{ + api_payloads::ErrorResponse, + internal_server_error, + server::{AppState, AppStores}, + util::shape_mongo_filter, +}; +use axum::{ + extract::{Path, Query, State}, + routing::{patch, post}, + Extension, Json, Router, +}; +use http::StatusCode; +use integrationos_domain::{ + algebra::adapter::StoreAdapter, + common::{ + connection_model_schema::{ + ConnectionModelSchema, Mappings, PublicConnectionModelSchema, SchemaPaths, + }, + event_access::EventAccess, + json_schema::JsonSchema, + mongo::MongoDbStore, + }, + id::{prefix::IdPrefix, Id}, +}; +use mongodb::bson::doc; +use serde::{de::DeserializeOwned, Deserialize, Serialize}; +use serde_json::Value; +use std::{collections::BTreeMap, sync::Arc}; +use tokio::try_join; +use tracing::error; + +pub fn get_router() -> Router> { + Router::new() + .route( + "/", + post(create::) + .get(read::), + ) + .route( + "/:id", + patch(update::) + .delete(delete::), + ) +} + +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] +#[cfg_attr(feature = "dummy", derive(fake::Dummy))] +#[serde(rename_all = "camelCase")] +pub struct PublicGetConnectionModelSchema { + pub connection_definition_id: Id, +} + +pub async fn public_get_connection_model_schema( + event_access: Option>>, + query: Option>>, + State(state): State>, +) -> Result>, ApiError> +where + T: CrudRequest + 'static, + U: Serialize + DeserializeOwned + Unpin + Sync + Send + 'static, +{ + match query.as_ref().and_then(|q| q.get("connectionDefinitionId")) { + Some(id) => id.to_string(), + None => { + return Err(( + StatusCode::BAD_REQUEST, + Json(ErrorResponse { + error: "connectionDefinitionId is required".to_string(), + }), + )) + } + }; + + let mut query = shape_mongo_filter( + query, + event_access.map(|e| { + let Extension(e) = e; + e + }), + None, + ); + + query.filter.remove("ownership.buildableId"); + query.filter.remove("environment"); + query.filter.insert("mapping", doc! { "$ne": null }); + + let store = T::get_store(state.app_stores.clone()); + let count = store.count(query.filter.clone(), None); + let find = store.get_many( + Some(query.filter), + None, + None, + Some(query.limit), + Some(query.skip), + ); + + let res = match try_join!(count, find) { + Ok((total, rows)) => ReadResponse { + rows, + skip: query.skip, + limit: query.limit, + total, + }, + Err(e) => { + error!("Error reading from store: {e}"); + return Err(internal_server_error!()); + } + }; + + Ok(Json(res)) +} + +pub async fn public_get_platform_models( + Path(platform_name): Path, + State(state): State>, +) -> ApiResult> { + let store = state.app_stores.public_model_schema.clone(); + + let res = store + .get_many( + Some(doc! { + "connectionPlatform": &platform_name, + "mapping": { "$ne": null } + }), + None, + None, + Some(100), + None, + ) + .await + .map_err(|e| { + error!("Error reading from connection model schema store: {e}"); + internal_server_error!() + })?; + + let common_model_names = res + .into_iter() + .map(|r| r.mapping) + .map(|m| m.common_model_name) + .collect::>(); + + Ok(Json(common_model_names)) +} + +impl CrudRequest for PublicGetConnectionModelSchema { + type Output = PublicConnectionModelSchema; + type Error = (); + + fn into_public(self) -> Result { + unimplemented!() + } + + fn into_with_event_access(self, _event_access: Arc) -> Self::Output { + unimplemented!() + } + + fn update(self, _record: &mut Self::Output) { + unimplemented!() + } + + fn get_store(stores: AppStores) -> MongoDbStore { + stores.public_model_schema.clone() + } +} + +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] +#[cfg_attr(feature = "dummy", derive(fake::Dummy))] +#[serde(rename_all = "camelCase")] +pub struct CreateRequest { + pub platform_id: Id, + pub platform_page_id: Id, + pub connection_platform: String, + pub connection_definition_id: Id, + pub platform_version: String, + pub model_name: String, + pub schema: JsonSchema, + pub sample: Value, + pub paths: Option, + #[cfg_attr(feature = "dummy", dummy(default))] + pub mapping: Option, +} + +impl CrudHook for CreateRequest {} + +impl CrudRequest for CreateRequest { + type Output = ConnectionModelSchema; + type Error = (); + + fn into_public(self) -> Result { + let key = format!( + "api::{}::{}::{}", + self.connection_platform, self.platform_version, self.model_name + ) + .to_lowercase(); + + Ok(Self::Output { + id: Id::now(IdPrefix::ConnectionModelSchema), + platform_id: self.platform_id, + platform_page_id: self.platform_page_id, + connection_platform: self.connection_platform, + connection_definition_id: self.connection_definition_id, + platform_version: self.platform_version, + key, + model_name: self.model_name, + schema: self.schema, + mapping: self.mapping, + sample: self.sample, + paths: self.paths, + record_metadata: Default::default(), + }) + } + + fn into_with_event_access(self, _event_access: Arc) -> Self::Output { + unimplemented!() + } + + fn update(self, record: &mut Self::Output) { + record.platform_id = self.platform_id; + record.platform_page_id = self.platform_page_id; + record.connection_platform = self.connection_platform; + record.connection_definition_id = self.connection_definition_id; + record.platform_version = self.platform_version; + record.model_name = self.model_name; + record.schema = self.schema; + record.sample = self.sample; + record.paths = self.paths; + record.mapping = self.mapping; + } + + fn get_store(stores: AppStores) -> MongoDbStore { + stores.model_schema.clone() + } +} diff --git a/api/src/endpoints/connection_oauth_definition.rs b/api/src/endpoints/connection_oauth_definition.rs new file mode 100644 index 00000000..fce4a4e0 --- /dev/null +++ b/api/src/endpoints/connection_oauth_definition.rs @@ -0,0 +1,212 @@ +use super::{create, delete, read, update, CachedRequest, CrudHook, CrudRequest, GetCache}; +use crate::server::{AppState, AppStores}; +use axum::{ + routing::{patch, post}, + Router, +}; +use chrono::Utc; +use integrationos_domain::{ + common::{ + api_model_config::{ApiModelConfig, Compute, Function, Lang}, + connection_oauth_definition::{ + ComputeRequest, ConnectionOAuthDefinition, Frontend, OAuthApiConfig, OAuthCompute, + }, + event_access::EventAccess, + mongo::MongoDbStore, + }, + id::{prefix::IdPrefix, Id}, + record_metadata::RecordMetadata, +}; +use mongodb::bson::doc; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; + +pub fn get_router() -> Router> { + Router::new() + .route( + "/", + post(create::) + .get(read::), + ) + .route( + "/:id", + patch(update::) + .delete(delete::), + ) +} + +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct CreateRequest { + pub connection_platform: String, + pub platform_redirect_uri: String, + pub ios_redirect_uri: String, + pub scopes: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub separator: Option, + pub init: RequestParams, + pub refresh: RequestParams, +} + +impl CrudHook for CreateRequest {} + +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct RequestParams { + pub configuration: ApiModelConfig, + #[serde(skip_serializing_if = "Option::is_none", default = "default_separator")] + pub compute: Option, + pub response_compute: String, +} + +fn default_separator() -> Option { + Some(" ".to_string()) +} + +impl CrudRequest for CreateRequest { + type Output = ConnectionOAuthDefinition; + type Error = (); + + fn into_public(self) -> Result { + Ok(Self::Output { + id: Id::new(IdPrefix::ConnectionOAuthDefinition, Utc::now()), + connection_platform: self.connection_platform, + configuration: OAuthApiConfig { + init: self.init.configuration, + refresh: self.refresh.configuration, + }, + compute: OAuthCompute { + init: ComputeRequest { + response: Function(Compute { + entry: "compute".to_string(), + function: self.init.response_compute, + language: Lang::JavaScript, + }), + computation: self.init.compute.map(|compute| { + Function(Compute { + entry: "compute".to_string(), + function: compute, + language: Lang::JavaScript, + }) + }), + }, + refresh: ComputeRequest { + computation: self.refresh.compute.map(|compute| { + Function(Compute { + entry: "compute".to_string(), + function: compute, + language: Lang::JavaScript, + }) + }), + response: Function(Compute { + entry: "compute".to_string(), + function: self.refresh.response_compute, + language: Lang::JavaScript, + }), + }, + }, + frontend: Frontend { + platform_redirect_uri: self.platform_redirect_uri, + ios_redirect_uri: self.ios_redirect_uri, + scopes: self.scopes, + separator: self.separator, + }, + record_metadata: Default::default(), + hooks: Default::default(), + }) + } + + fn into_with_event_access(self, _event_access: Arc) -> Self::Output { + unimplemented!() + } + + fn update(self, record: &mut Self::Output) { + record.connection_platform = self.connection_platform; + record.configuration = OAuthApiConfig { + init: self.init.configuration, + refresh: self.refresh.configuration, + }; + record.compute = OAuthCompute { + init: ComputeRequest { + computation: self.init.compute.map(|compute| { + Function(Compute { + entry: "compute".to_string(), + function: compute, + language: Lang::JavaScript, + }) + }), + response: Function(Compute { + entry: "compute".to_string(), + function: self.init.response_compute, + language: Lang::JavaScript, + }), + }, + refresh: ComputeRequest { + response: Function(Compute { + entry: "compute".to_string(), + function: self.refresh.response_compute, + language: Lang::JavaScript, + }), + computation: self.refresh.compute.map(|compute| { + Function(Compute { + entry: "compute".to_string(), + function: compute, + language: Lang::JavaScript, + }) + }), + }, + }; + record.frontend = Frontend { + platform_redirect_uri: self.platform_redirect_uri, + ios_redirect_uri: self.ios_redirect_uri, + scopes: self.scopes, + separator: self.separator, + }; + record.record_metadata.updated_at = Utc::now().timestamp_millis(); + record.record_metadata.updated = true; + } + + fn get_store(stores: AppStores) -> MongoDbStore { + stores.oauth_config.clone() + } +} + +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct FrontendOauthConnectionDefinition { + #[serde(rename = "_id")] + pub id: String, + pub connection_platform: String, + pub frontend: Frontend, + #[serde(flatten)] + pub record_metadata: RecordMetadata, +} + +impl CrudRequest for FrontendOauthConnectionDefinition { + type Output = FrontendOauthConnectionDefinition; + type Error = (); + + fn into_public(self) -> Result { + unimplemented!() + } + + fn into_with_event_access(self, _: Arc) -> Self::Output { + unimplemented!() + } + + fn update(self, _: &mut Self::Output) { + unimplemented!() + } + + fn get_store(stores: AppStores) -> MongoDbStore { + stores.frontend_oauth_config.clone() + } +} + +impl CachedRequest for FrontendOauthConnectionDefinition { + type Output = FrontendOauthConnectionDefinition; + + fn get_cache(state: Arc) -> GetCache { + state.connection_oauth_definitions_cache.clone() + } +} diff --git a/api/src/endpoints/event_access.rs b/api/src/endpoints/event_access.rs new file mode 100644 index 00000000..906f0a50 --- /dev/null +++ b/api/src/endpoints/event_access.rs @@ -0,0 +1,246 @@ +use std::sync::Arc; + +use anyhow::Result; +use axum::{ + extract::State, + http::StatusCode, + routing::{delete as axum_delete, get, post}, + Extension, Json, Router, +}; +use integrationos_domain::{ + algebra::adapter::StoreAdapter, + common::{ + access_key_data::AccessKeyData, + access_key_prefix::AccessKeyPrefix, + connection_definition::{ConnectionDefinitionType, Paths}, + environment::Environment, + event_access::EventAccess, + event_type::EventType, + mongo::MongoDbStore, + ownership::Ownership, + record_metadata::RecordMetadata, + AccessKey, + }, + id::{prefix::IdPrefix, Id}, +}; +use mongodb::bson::doc; +use rand::Rng; +use serde::{Deserialize, Serialize}; +use tracing::{error, warn}; + +use validator::Validate; + +use crate::{ + api_payloads::ErrorResponse, + bad_request, + config::Config, + internal_server_error, + server::{AppState, AppStores}, +}; + +use super::{delete, read, CrudRequest}; + +const DEFAULT_GROUP: &str = "event-inc-internal"; +const DEFAULT_NAMESPACE: &str = "default"; + +pub fn get_router() -> Router> { + Router::new() + .route("/", post(create_event_access)) + .route("/", get(read::)) + .route( + "/:id", + axum_delete(delete::), + ) +} + +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, Validate)] +#[serde(rename_all = "camelCase")] +pub struct CreateEventAccessRequest { + pub name: String, + pub group: Option, + pub platform: String, + pub namespace: Option, + pub connection_type: ConnectionDefinitionType, + pub paths: Paths, +} + +impl CrudRequest for CreateEventAccessRequest { + type Output = EventAccess; + type Error = (); + + fn into_with_event_access(self, _event_access: Arc) -> Self::Output { + unimplemented!() + } + + fn update(self, _record: &mut Self::Output) { + unimplemented!() + } + + fn get_store(stores: AppStores) -> MongoDbStore { + stores.event_access + } + + fn into_public(self) -> Result { + unimplemented!() + } +} + +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize, Validate)] +#[cfg_attr(feature = "dummy", derive(fake::Dummy))] +#[serde(rename_all = "camelCase")] +pub struct CreateEventAccessPayloadWithOwnership { + pub name: String, + pub group: Option, + pub platform: String, + pub namespace: Option, + pub connection_type: ConnectionDefinitionType, + pub environment: Environment, + pub paths: Paths, + pub ownership: Ownership, +} + +impl CreateEventAccessPayloadWithOwnership { + pub fn as_event_access(&self, config: &Config) -> Result { + generate_event_access(config.clone(), self.clone()) + } +} + +pub fn generate_event_access( + config: Config, + payload: CreateEventAccessPayloadWithOwnership, +) -> Result { + let namespace = payload + .namespace + .unwrap_or_else(|| DEFAULT_NAMESPACE.to_string()); + let group = payload.group.unwrap_or_else(|| DEFAULT_GROUP.to_string()); + + let access_key = AccessKey { + prefix: AccessKeyPrefix { + environment: payload.environment, + event_type: EventType::SecretKey, + version: 1, + }, + data: AccessKeyData { + id: payload.ownership.id.to_owned().to_string(), + namespace: namespace.clone(), + event_type: "custom".to_owned(), + group: group.clone(), + event_path: payload + .paths + .event + .clone() + .unwrap_or("$.body.event".to_string()), + event_object_id_path: Some(payload.paths.id.clone().unwrap_or("$.body.id".to_string())), + timestamp_path: Some( + payload + .paths + .timestamp + .clone() + .unwrap_or("$.body.id".to_string()), + ), + parent_access_key: None, + }, + }; + + let iv = rand::thread_rng().gen::<[u8; 16]>(); + let password = config.event_access_password.as_bytes().try_into()?; + + let encoded_access_key = access_key.encode(password, &iv)?; + + let key = format!( + "event_access::{}::{}::{}::{}::{}", + payload.connection_type, payload.environment, namespace, payload.platform, group + ); + + Ok(EventAccess { + id: Id::now(IdPrefix::EventAccess), + name: payload.name, + namespace, + r#type: payload.connection_type, + group, + platform: payload.platform, + ownership: payload.ownership, + key, + paths: payload.paths, + access_key: encoded_access_key.to_string(), + environment: payload.environment, + record_metadata: RecordMetadata::default(), + throughput: config.event_access_throughput, + }) +} + +pub async fn create_event_access_for_new_user( + State(state): State>, + Json(req): Json, +) -> Result, (StatusCode, Json)> { + if let Err(validation_errors) = req.validate() { + warn!("Invalid payload: {:?}", validation_errors); + return Err(bad_request!(format!( + "Invalid payload: {:?}", + validation_errors + ))); + } + + let event_access = generate_event_access(state.config.clone(), req).map_err(|e| { + error!("Error generating event access for new user: {:?}", e); + + internal_server_error!() + })?; + + state + .app_stores + .event_access + .create_one(&event_access) + .await + .map_err(|e| { + error!("Error creating event access for new user: {:?}", e); + + internal_server_error!() + })?; + + Ok(Json(event_access)) +} + +pub async fn create_event_access( + Extension(user_event_access): Extension>, + State(state): State>, + Json(req): Json, +) -> Result, (StatusCode, Json)> { + if let Err(validation_errors) = req.validate() { + return Err(bad_request!(format!( + "Invalid payload: {:?}", + validation_errors + ))); + } + + let event_access_payload = CreateEventAccessPayloadWithOwnership { + name: req.name.clone(), + group: req.group.clone(), + namespace: req.namespace.clone(), + platform: req.platform.clone(), + connection_type: req.connection_type.clone(), + environment: user_event_access.environment, + paths: req.paths.clone(), + ownership: user_event_access.ownership.clone(), + }; + + let event_access = + generate_event_access(state.config.clone(), event_access_payload).map_err(|e| { + error!("Error generating event access for existing user: {:?}", e); + + internal_server_error!() + })?; + + state + .app_stores + .event_access + .create_one(&event_access) + .await + .map_err(|e| { + error!("Error creating event access for existing user: {:?}", e); + + internal_server_error!() + })?; + + Ok(Json(event_access)) +} diff --git a/api/src/endpoints/events.rs b/api/src/endpoints/events.rs new file mode 100644 index 00000000..59077b03 --- /dev/null +++ b/api/src/endpoints/events.rs @@ -0,0 +1,38 @@ +use std::sync::Arc; + +use axum::{routing::get, Router}; +use bson::doc; +use integrationos_domain::common::{event_access::EventAccess, mongo::MongoDbStore, Event}; +use serde::{Deserialize, Serialize}; + +use crate::server::{AppState, AppStores}; + +use super::{read, CrudRequest}; + +pub fn get_router() -> Router> { + Router::new().route("/", get(read::)) +} + +#[derive(Serialize, Deserialize)] +pub struct CreateEventRequest; + +impl CrudRequest for CreateEventRequest { + type Output = Event; + type Error = (); + + fn into_with_event_access(self, _event_access: Arc) -> Self::Output { + unimplemented!() + } + + fn update(self, _record: &mut Self::Output) { + unimplemented!() + } + + fn get_store(stores: AppStores) -> MongoDbStore { + stores.event + } + + fn into_public(self) -> anyhow::Result { + unimplemented!() + } +} diff --git a/api/src/endpoints/metrics.rs b/api/src/endpoints/metrics.rs new file mode 100644 index 00000000..35401005 --- /dev/null +++ b/api/src/endpoints/metrics.rs @@ -0,0 +1,125 @@ +use std::sync::Arc; + +use axum::{ + extract::{Path, Query, State}, + routing::get, + Json, Router, +}; +use bson::Document; +use integrationos_domain::Store; +use serde::{Deserialize, Serialize}; +use tracing::error; + +use crate::{ + internal_server_error, + metrics::{DAILY_KEY, MONTHLY_KEY, PLATFORMS_KEY, TOTAL_KEY}, + not_found, + server::AppState, +}; + +use super::ApiResult; + +pub fn get_router() -> Router> { + Router::new() + .route("/", get(get_metrics)) + .route("/:client_id", get(get_metrics)) +} + +#[derive(Debug, Default, Deserialize)] +#[serde(rename_all = "camelCase")] +pub enum Granularity { + Day(String), + Month(String), + #[default] + Total, +} + +#[derive(Debug, Clone, Copy, strum::Display, Deserialize)] +#[serde(rename_all = "lowercase")] +#[strum(serialize_all = "lowercase")] +pub enum MetricType { + Passthrough, + Unified, + RateLimited, +} + +#[derive(Debug, Deserialize, Default)] +#[serde(rename_all = "camelCase")] +pub struct QueryParams { + #[serde(default, rename = "apiType")] + metric_type: Option, + #[serde(default)] + platform: Option, + #[serde(flatten)] + granularity: Option, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct MetricResponse { + pub count: i32, +} + +pub async fn get_metrics( + state: State>, + path: Option>, + query_params: Option>, +) -> ApiResult { + let coll = state + .app_stores + .db + .collection::(&Store::Metrics.to_string()); + + let client_id = path + .and_then(|p| if p.0.is_empty() { None } else { Some(p) }) + .map(|p| p.0) + .unwrap_or(state.config.metric_system_id.clone()); + + let doc = match coll + .find_one(bson::doc! { "clientId": client_id }, None) + .await + { + Ok(Some(doc)) => doc, + Ok(None) => return Err(not_found!("Client")), + Err(e) => { + error!("Could not fetch metric: {e}"); + return Err(internal_server_error!()); + } + }; + + let query_params = query_params.unwrap_or_default(); + + let metric_type = query_params.metric_type.unwrap_or(MetricType::Unified); + println!( + "metric type: {:?} - {}", + query_params.metric_type, metric_type + ); + let Ok(doc) = doc.get_document(metric_type.to_string()) else { + return Ok(Json(MetricResponse { count: 0 })); + }; + + let doc = if let Some(platform) = &query_params.platform { + let Ok(doc) = doc + .get_document(PLATFORMS_KEY) + .and_then(|d| d.get_document(platform)) + else { + return Ok(Json(MetricResponse { count: 0 })); + }; + doc + } else { + doc + }; + + let result = match query_params + .granularity + .as_ref() + .unwrap_or(&Granularity::Total) + { + Granularity::Day(day) => doc.get_document(DAILY_KEY).and_then(|d| d.get_i32(day)), + Granularity::Month(month) => doc.get_document(MONTHLY_KEY).and_then(|d| d.get_i32(month)), + Granularity::Total => doc.get_i32(TOTAL_KEY), + }; + + Ok(Json(MetricResponse { + count: result.unwrap_or_default(), + })) +} diff --git a/api/src/endpoints/mod.rs b/api/src/endpoints/mod.rs new file mode 100644 index 00000000..090fe216 --- /dev/null +++ b/api/src/endpoints/mod.rs @@ -0,0 +1,434 @@ +pub mod common_enum; +pub mod common_model; +pub mod connection; +pub mod connection_definition; +pub mod connection_model_definition; +pub mod connection_model_schema; +pub mod connection_oauth_definition; +pub mod event_access; +pub mod events; +pub mod metrics; +pub mod oauth; +pub mod openapi; +pub mod passthrough; +pub mod pipeline; +pub mod transactions; +pub mod unified; + +use crate::{ + api_payloads::ErrorResponse, + bad_request, internal_server_error, not_found, + server::{AppState, AppStores}, + util::shape_mongo_filter, +}; +use anyhow::Result; +use axum::{ + async_trait, + extract::{Path, Query, State}, + Extension, Json, +}; +use bson::{doc, SerializerOptions}; +use http::{HeaderMap, HeaderValue, StatusCode}; +use integrationos_domain::{ + algebra::adapter::StoreAdapter, + common::{event_access::EventAccess, mongo::MongoDbStore, Connection}, + IntegrationOSError, OAuth, Store, +}; +use moka::future::Cache; +use mongodb::options::FindOneOptions; +use serde::{de::DeserializeOwned, Deserialize, Serialize}; +use std::{collections::BTreeMap, sync::Arc}; +use tokio::try_join; +use tracing::error; + +const INTEGRATION_OS_PASSTHROUGH_HEADER: &str = "x-integrationos-passthrough"; + +pub type GetCache = Arc>, Arc>>>; +pub type ApiError = (StatusCode, Json); +pub type ApiResult = Result, ApiError>; + +pub trait CrudRequest: Sized { + type Output: Serialize + DeserializeOwned + Unpin + Sync + Send + 'static; + type Error: Serialize + DeserializeOwned + Unpin + Sync + Send + 'static + std::fmt::Debug; + + fn into_with_event_access(self, event_access: Arc) -> Self::Output; + fn into_public(self) -> Result; + fn update(self, record: &mut Self::Output); + fn get_store(stores: AppStores) -> MongoDbStore; +} + +pub trait CachedRequest: Sized { + type Output: Serialize + DeserializeOwned + Unpin + Sync + Send + 'static; + + fn get_cache(state: Arc) -> GetCache; +} + +#[async_trait] +pub trait CrudHook +where + Input: Serialize + DeserializeOwned + Unpin + Sync + Send + 'static, +{ + async fn after_create_hook( + _record: &Input, + _stores: &AppStores, + ) -> Result<(), IntegrationOSError> { + Ok(()) + } + + async fn after_update_hook( + _record: &Input, + _stores: &AppStores, + ) -> Result<(), IntegrationOSError> { + Ok(()) + } + + async fn after_delete_hook( + _record: &Input, + _stores: &AppStores, + ) -> Result<(), IntegrationOSError> { + Ok(()) + } + + async fn after_read_hook( + _record: &Input, + _stores: &AppStores, + ) -> Result<(), IntegrationOSError> { + Ok(()) + } +} + +pub async fn create( + event_access: Option>>, + State(state): State>, + Json(req): Json, +) -> ApiResult +where + T: CrudRequest + CrudHook + 'static, + U: Serialize + DeserializeOwned + Unpin + Sync + Send + 'static, +{ + let output = if let Some(Extension(event_access)) = event_access { + req.into_with_event_access(event_access) + } else { + req.into_public().map_err(|e| { + error!("Error creating object: {:?}", e); + internal_server_error!() + })? + }; + + match T::get_store(state.app_stores.clone()) + .create_one(&output) + .await + { + Ok(_) => { + T::after_create_hook(&output, &state.app_stores) + .await + .map_err(|e| { + error!("Error running after create hook: {:?}", e); + }) + .ok(); + + Ok(Json(output)) + } + Err(e) => { + error!("Error creating object: {e}"); + Err(internal_server_error!()) + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq)] +pub struct ReadResponse { + pub rows: Vec, + pub total: u64, + pub skip: u64, + pub limit: u64, +} + +pub async fn read( + headers: HeaderMap, + event_access: Option>>, + query: Option>>, + State(state): State>, +) -> Result>, ApiError> +where + T: CrudRequest + 'static, + U: Serialize + DeserializeOwned + Unpin + Sync + Send + 'static, +{ + let query = shape_mongo_filter( + query, + event_access.map(|e| { + let Extension(e) = e; + e + }), + Some(headers), + ); + + let store = T::get_store(state.app_stores.clone()); + let count = store.count(query.filter.clone(), None); + let find = store.get_many( + Some(query.filter), + None, + None, + Some(query.limit), + Some(query.skip), + ); + + let res = match try_join!(count, find) { + Ok((total, rows)) => ReadResponse { + rows, + skip: query.skip, + limit: query.limit, + total, + }, + Err(e) => { + error!("Error reading from store: {e}"); + return Err(internal_server_error!()); + } + }; + + Ok(Json(res)) +} + +pub async fn read_cached( + query: Option>>, + State(state): State>, +) -> Result>>, ApiError> +where + T: CrudRequest + CachedRequest + 'static, + U: Clone + Serialize + DeserializeOwned + Unpin + Sync + Send + 'static, +{ + let cache = T::get_cache(state.clone()); + + let res = cache + .try_get_with(query.as_ref().map(|q| q.0.clone()), async { + let query = shape_mongo_filter(query, None, None); + + let store = T::get_store(state.app_stores.clone()); + let count = store.count(query.filter.clone(), None); + let find = store.get_many( + Some(query.filter), + None, + None, + Some(query.limit), + Some(query.skip), + ); + + let res = match try_join!(count, find) { + Ok((total, rows)) => Arc::new(ReadResponse { + rows, + skip: query.skip, + limit: query.limit, + total, + }), + Err(e) => { + error!("Error reading from store: {e}"); + return Err(internal_server_error!()); + } + }; + + Ok(res) + }) + .await + .map_err(Arc::unwrap_or_clone)?; + + Ok(Json(res)) +} + +#[derive(Serialize, Deserialize)] +pub struct SuccessResponse { + success: bool, +} + +pub async fn update( + event_access: Option>>, + Path(id): Path, + State(state): State>, + Json(req): Json, +) -> Result, ApiError> +where + T: CrudRequest + CrudHook + 'static, + U: Serialize + DeserializeOwned + Unpin + Sync + Send + 'static, +{ + let mut query = shape_mongo_filter( + None, + event_access.map(|e| { + let Extension(e) = e; + e + }), + None, + ); + query.filter.insert("_id", id.clone()); + + let store = T::get_store(state.app_stores.clone()); + + let Some(mut record) = (match store.get_one(query.filter).await { + Ok(ret) => ret, + Err(e) => { + error!("Error getting record in store: {e}"); + return Err(internal_server_error!()); + } + }) else { + return Err(not_found!("Record")); + }; + + req.update(&mut record); + + let bson = bson::to_bson_with_options( + &record, + SerializerOptions::builder().human_readable(false).build(), + ) + .map_err(|e| { + error!("Could not serialize record into document: {e}"); + internal_server_error!() + })?; + + let document = doc! { + "$set": bson + }; + + match store.update_one(&id, document).await { + Ok(_) => { + T::after_update_hook(&record, &state.app_stores) + .await + .map_err(|e| { + error!("Error running after update hook: {:?}", e); + }) + .ok(); + Ok(Json(SuccessResponse { success: true })) + } + Err(e) => { + error!("Error updating in store: {e}"); + Err(internal_server_error!()) + } + } +} + +pub async fn delete( + event_access: Option>>, + Path(id): Path, + State(state): State>, +) -> ApiResult +where + T: CrudRequest + 'static, + U: Serialize + DeserializeOwned + Unpin + Sync + Send + 'static, +{ + let store = T::get_store(state.app_stores.clone()); + + let mut query = shape_mongo_filter( + None, + event_access.map(|e| { + let Extension(e) = e; + e + }), + None, + ); + query.filter.insert("_id", id.clone()); + + let Some(res) = (match store.get_one(query.filter).await { + Ok(ret) => ret, + Err(e) => { + error!("Could not get record from store: {e}"); + return Err(internal_server_error!()); + } + }) else { + return Err(not_found!("Record")); + }; + + match store + .update_one( + &id, + doc! { + "$set": { + "deleted": true, + } + }, + ) + .await + { + Ok(_) => Ok(Json(res)), + Err(e) => { + error!("Could not update record in store: {e}"); + Err(internal_server_error!()) + } + } +} + +#[derive(Deserialize)] +struct SparseConnection { + oauth: OAuth, +} + +async fn get_connection( + access: &EventAccess, + connection_id: &HeaderValue, + stores: &AppStores, + cache: &Cache<(Arc, HeaderValue), Arc>, +) -> Result, ApiError> { + let connection = cache + .try_get_with( + (access.ownership.id.clone(), connection_id.clone()), + async { + let Ok(connection_id_str) = connection_id.to_str() else { + return Err(bad_request!("Invalid connection key header")); + }; + + let connection = match stores + .connection + .get_one(doc! { + "key": connection_id_str, + "ownership.buildableId": access.ownership.id.as_ref(), + "deleted": false + }) + .await + { + Ok(Some(data)) => Arc::new(data), + Ok(None) => { + return Err(not_found!("Connection")); + } + Err(e) => { + error!("Error fetching connection: {:?}", e); + + return Err(internal_server_error!()); + } + }; + + Ok(connection) + }, + ) + .await + .map_err(Arc::unwrap_or_clone)?; + + if let Some(OAuth::Enabled { .. }) = connection.oauth { + let sparse_connection = match stores + .db + .collection::(&Store::Connections.to_string()) + .find_one( + doc! { + "_id": &connection.id.to_string(), + "ownership.buildableId": access.ownership.id.as_ref(), + "deleted": false + }, + FindOneOptions::builder() + .projection(doc! { + "oauth": 1 + }) + .build(), + ) + .await + { + Ok(Some(data)) => data, + Ok(None) => { + return Err(not_found!("Connection")); + } + Err(e) => { + error!("Error fetching connection: {:?}", e); + + return Err(internal_server_error!()); + } + }; + let mut connection = (*connection).clone(); + connection.oauth = Some(sparse_connection.oauth); + return Ok(Arc::new(connection)); + } + Ok(connection) +} diff --git a/api/src/endpoints/oauth.rs b/api/src/endpoints/oauth.rs new file mode 100644 index 00000000..66415d38 --- /dev/null +++ b/api/src/endpoints/oauth.rs @@ -0,0 +1,493 @@ +use super::event_access::CreateEventAccessPayloadWithOwnership; +use crate::{endpoints::ApiError, internal_server_error, not_found, server::AppState}; +use anyhow::anyhow; +use axum::{ + extract::{Path, State}, + routing::post, + Extension, Json, Router, +}; +use chrono::{Duration, Utc}; +use http::{HeaderMap, HeaderName, HeaderValue}; +use integrationos_domain::{ + algebra::adapter::StoreAdapter, + common::{ + api_model_config::ContentType, + connection_definition::ConnectionDefinition, + connection_oauth_definition::{ + Computation, ConnectionOAuthDefinition, OAuthResponse, PlatformSecret, Settings, + }, + event_access::EventAccess, + mongo::MongoDbStore, + ownership::Ownership, + Connection, OAuth, Throughput, + }, + get_secret_request::GetSecretRequest, + id::{prefix::IdPrefix, Id}, + oauth_secret::OAuthSecret, +}; +use mongodb::bson::doc; +use reqwest::Request; +use serde::{de::DeserializeOwned, Deserialize, Serialize}; +use serde_json::{to_string_pretty, Value}; +use std::{ + collections::{BTreeMap, HashMap}, + str::FromStr, + sync::Arc, +}; +use tracing::{debug, error}; + +pub fn get_router() -> Router> { + Router::new().route("/:platform", post(oauth_handler)) +} + +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] +#[cfg_attr(feature = "dummy", derive(fake::Dummy))] +#[serde(rename_all = "camelCase")] +struct OAuthRequest { + #[serde(rename = "__isEngineeringAccount__", default)] + is_engineering_account: bool, + connection_definition_id: Id, + client_id: String, + group: String, + label: String, + #[serde(skip_serializing_if = "Option::is_none")] + payload: Option, +} + +type ApiResult = std::result::Result; + +#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] +#[cfg_attr(feature = "dummy", derive(fake::Dummy))] +#[serde(rename_all = "camelCase")] +struct OAuthPayload { + client_id: String, + client_secret: String, + metadata: Value, +} + +async fn oauth_handler( + state: State>, + Extension(user_event_access): Extension>, + Path(platform): Path, + Json(payload): Json, +) -> ApiResult> { + let oauth_definition = find_oauth_definition(&state, &platform).await?; + let setting = find_settings( + &state, + &user_event_access.ownership, + payload.is_engineering_account, + ) + .await?; + + let secret = get_secret::( + &state, + GetSecretRequest { + id: setting.platform_secret(&payload.connection_definition_id).ok_or_else(|| { + error!("Settings does not have a secret service id for the connection platform"); + not_found!( + "Settings does not have a secret service id for the connection platform provided, settings" + ) + })?, + buildable_id: if payload.is_engineering_account { + state.config.engineering_account_id.clone() + } else { + user_event_access.clone().ownership.id.to_string() + }, + }, + ) + .await?; + + let oauth_payload = OAuthPayload { + metadata: payload.payload.clone().unwrap_or(Value::Null), + client_id: payload.client_id, + client_secret: secret.client_secret, + }; + + let request = request(&oauth_definition, &oauth_payload)?; + let response = state + .http_client + .execute(request) + .await + .map(|response| response.json::()) + .map_err(|e| { + error!("Failed to execute oauth request: {}", e); + not_found!("Failed to execute oauth request {e}") + })? + .await + .map_err(|e| { + error!("Failed to decode third party oauth response: {}", e); + internal_server_error!() + })?; + + debug!("oauth response: {:?}", response); + + let decoded: OAuthResponse = oauth_definition + .compute + .init + .response + .compute(&response) + .map_err(|e| { + error!("Failed to decode oauth response: {}", e); + internal_server_error!() + })?; + + let oauth_secret = OAuthSecret::from_init( + decoded, + oauth_payload.client_id, + oauth_payload.client_secret, + response, + payload.payload, + ); + + let secret = state + .secrets_client + .encrypt( + user_event_access.clone().ownership.id.to_string(), + &oauth_secret.as_json(), + ) + .await + .map_err(|e| { + error!("Failed to create oauth secret: {}", e); + internal_server_error!() + })?; + + let connection_definition = + find_connection_definition(&state, &payload.connection_definition_id).await?; + + let key = format!( + "{}::{}::{}", + user_event_access.environment, connection_definition.platform, payload.group + ); + + let event_access = CreateEventAccessPayloadWithOwnership { + name: payload.label.clone(), + group: Some(payload.group.clone()), + platform: connection_definition.platform.clone(), + namespace: None, + connection_type: connection_definition.r#type.clone(), + environment: user_event_access.environment, + paths: connection_definition.paths.clone(), + ownership: user_event_access.ownership.clone(), + } + .as_event_access(&state.config) + .map_err(|e| { + error!("Error creating event access for connection: {:?}", e); + + internal_server_error!() + })?; + + let connection = Connection { + id: Id::new(IdPrefix::Connection, Utc::now()), + platform_version: connection_definition.clone().platform_version, + connection_definition_id: connection_definition.id, + r#type: connection_definition.to_connection_type(), + name: payload.label, + key: key.clone().into(), + group: payload.group, + environment: user_event_access.environment, + platform: platform.into(), + secrets_service_id: secret.id, + event_access_id: event_access.id, + access_key: event_access.access_key, + settings: connection_definition.settings, + throughput: Throughput { key, limit: 100 }, + ownership: user_event_access.ownership.clone(), + oauth: Some(OAuth::Enabled { + connection_oauth_definition_id: oauth_definition.id, + expires_in: Some(oauth_secret.expires_in), + expires_at: Some( + (chrono::Utc::now() + + Duration::try_seconds(oauth_secret.expires_in as i64) + .ok_or(anyhow!("Invalid expires_in timestamp")) + .map_err(|e| { + error!("Failed to decode oauth response: {}", e); + internal_server_error!() + })?) + .timestamp(), + ), + }), + record_metadata: Default::default(), + }; + + state + .app_stores + .connection + .create_one(&connection) + .await + .map_err(|e| { + error!("Failed to create connection: {}", e); + internal_server_error!() + })?; + + Ok(Json(connection)) +} + +fn request( + oauth_definition: &ConnectionOAuthDefinition, + payload: &OAuthPayload, +) -> ApiResult { + let payload = serde_json::to_value(payload).map_err(|e| { + error!("Failed to serialize oauth payload: {}", e); + internal_server_error!() + })?; + let computation = oauth_definition + .compute + .init + .computation + .clone() + .map(|computation| computation.compute::(&payload)) + .transpose() + .map_err(|e| { + error!("Failed to compute oauth payload: {}", e); + internal_server_error!() + })?; + + let headers = header(oauth_definition, computation.as_ref())?; + let query = query(oauth_definition, computation.as_ref())?; + let body = body(&payload, computation.as_ref())?; + + let request = reqwest::Client::new() + .post(oauth_definition.configuration.init.uri()) + .headers(headers); + + let request = match oauth_definition.configuration.init.content { + Some(ContentType::Json) => request.json(&body).query(&query), + Some(ContentType::Form) => request.form(&body).query(&query), + _ => request.query(&query), + }; + + request.build().map_err(|e| { + error!("Failed to build static request: {}", e); + internal_server_error!() + }) +} + +fn query( + oauth_definition: &ConnectionOAuthDefinition, + computation: Option<&Computation>, +) -> ApiResult> { + let query_params = oauth_definition + .configuration + .init + .query_params + .as_ref() + .map(|query_params| { + let mut map = HashMap::new(); + for (key, value) in query_params { + let key = key.to_string(); + let value = value.as_str(); + + map.insert(key, value.to_string()); + } + map + }); + + match query_params { + Some(query_params) => { + let payload = computation.and_then(|computation| computation.clone().query_params); + + let handlebars = handlebars::Handlebars::new(); + + let query_params_str = to_string_pretty(&query_params).map_err(|e| { + error!("Failed to serialize query params: {}", e); + internal_server_error!() + })?; + + let query_params = handlebars + .render_template(&query_params_str, &payload) + .map_err(|e| { + error!("Failed to render query params: {}", e); + internal_server_error!() + })?; + + let query_params: BTreeMap = serde_json::from_str(&query_params) + .map_err(|e| { + error!("Failed to deserialize query params: {}", e); + internal_server_error!() + })?; + + Ok(Some(serde_json::to_value(query_params).map_err(|e| { + error!("Failed to serialize query params: {}", e); + internal_server_error!() + })?)) + } + None => Ok(None), + } +} + +fn body(payload: &Value, computation: Option<&Computation>) -> ApiResult> { + let body = computation.and_then(|computation| computation.clone().body); + + match body { + Some(body) => { + let handlebars = handlebars::Handlebars::new(); + + let body_str = to_string_pretty(&body).map_err(|e| { + error!("Failed to serialize body: {}", e); + internal_server_error!() + })?; + + let body = handlebars + .render_template(&body_str, &payload) + .map_err(|e| { + error!("Failed to render body: {}", e); + internal_server_error!() + })?; + + Ok(Some(serde_json::from_str(&body).map_err(|e| { + error!("Failed to deserialize body: {}", e); + internal_server_error!() + })?)) + } + None => Ok(None), + } +} + +fn header( + oauth_definition: &ConnectionOAuthDefinition, + computation: Option<&Computation>, +) -> ApiResult { + let headers = oauth_definition + .configuration + .init + .headers + .as_ref() + .and_then(|headers| { + let mut map = HashMap::new(); + for (key, value) in headers { + let key = key.to_string(); + let value = value.to_str().ok()?; + + map.insert(key, value.to_string()); + } + Some(map) + }); + + match headers { + Some(headers) => { + let payload = computation.and_then(|computation| computation.clone().headers); + + let handlebars = handlebars::Handlebars::new(); + + let headers_str = to_string_pretty(&headers).map_err(|e| { + error!("Failed to serialize headers: {}", e); + internal_server_error!() + })?; + + let headers = handlebars + .render_template(&headers_str, &payload) + .map_err(|e| { + error!("Failed to render headers: {}", e); + internal_server_error!() + })?; + + let headers: BTreeMap = + serde_json::from_str(&headers).map_err(|e| { + error!("Failed to deserialize headers: {}", e); + internal_server_error!() + })?; + + headers + .iter() + .try_fold(HeaderMap::new(), |mut header_map, (key, value)| { + let key = HeaderName::from_str(key).map_err(|e| { + error!("Failed to parse header name: {}", e); + internal_server_error!() + })?; + + let value = HeaderValue::from_str(value).map_err(|e| { + error!("Failed to parse header value: {}", e); + internal_server_error!() + })?; + + header_map.insert(key, value); + + Ok(header_map) + }) + } + None => Ok(HeaderMap::new()), + } +} + +async fn find_connection_definition( + state: &State>, + connection_definition_id: &Id, +) -> ApiResult { + let connection_definition_store: &MongoDbStore = + &state.app_stores.connection_config; + + let connection_definition: ConnectionDefinition = connection_definition_store + .get_one(doc! {"_id": &connection_definition_id.to_string()}) + .await + .map_err(|e| { + error!("Failed to retrieve connection definition: {}", e); + internal_server_error!() + })? + .ok_or_else(|| not_found!("Connection definition not found"))?; + + Ok(connection_definition) +} + +async fn find_oauth_definition( + state: &State>, + platform: &str, +) -> ApiResult { + let oauth_definition_store: &MongoDbStore = + &state.app_stores.oauth_config; + + let oauth_definition: ConnectionOAuthDefinition = oauth_definition_store + .get_one(doc! {"connectionPlatform": &platform}) + .await + .map_err(|e| { + error!("Failed to find oauth definition: {}", e); + internal_server_error!() + })? + .ok_or_else(|| not_found!("Oauth definition"))?; + + Ok(oauth_definition) +} + +async fn find_settings( + state: &State>, + ownership: &Ownership, + is_admin: bool, +) -> ApiResult { + let settings_store: &MongoDbStore = &state.app_stores.settings; + + let ownership_id = if !is_admin { + ownership.id.to_string() + } else { + state.config.engineering_account_id.clone() + }; + + let setting: Settings = settings_store + .get_one(doc! {"ownership.buildableId": &ownership_id}) + .await + .map_err(|e| { + error!("Failed to retrieve from settings store: {}", e); + internal_server_error!() + })? + .ok_or_else(|| not_found!("Settings"))?; + + Ok(setting) +} + +async fn get_secret( + state: &State>, + get_secret_request: GetSecretRequest, +) -> ApiResult { + let secrets_client = &state.secrets_client; + + let encoded_secret = secrets_client + .decrypt(&get_secret_request) + .await + .map_err(|e| { + error!("Failed to retrieve oauth secret: {}", e); + internal_server_error!() + })?; + + serde_json::from_value::(encoded_secret).map_err(|e| { + error!("Failed to deserialize owner secret: {}", e); + internal_server_error!() + }) +} diff --git a/api/src/endpoints/openapi.rs b/api/src/endpoints/openapi.rs new file mode 100644 index 00000000..c54f8b7f --- /dev/null +++ b/api/src/endpoints/openapi.rs @@ -0,0 +1,383 @@ +use super::ApiError; +use crate::{ + debug_error, internal_server_error, + server::AppState, + util::{generate_openapi_schema, generate_path_item}, +}; +use axum::extract::{Json, State}; +use bson::doc; +use convert_case::{Case, Casing}; +use futures::{Stream, StreamExt, TryStreamExt}; +use http::StatusCode; +use indexmap::IndexMap; +use integrationos_domain::{ + algebra::{adapter::StoreAdapter, measured::TimedExt}, + common_model::{CommonEnum, CommonModel}, + mongo::MongoDbStore, +}; +use mongodb::error::Error as MongoError; +use openapiv3::*; +use serde::{Deserialize, Serialize}; +use std::{ + collections::HashSet, + pin::Pin, + sync::{Arc, RwLock}, +}; +use tokio::task::JoinHandle; +use tracing::{debug, error, info}; + +#[derive(Clone, Default, Debug)] +pub struct OpenAPIData { + state: Arc>, +} + +impl OpenAPIData { + pub fn get(&self) -> Result { + self.state.read().map(|state| state.clone()).map_err(|e| { + anyhow::Error::msg(format!("Could not get openapi schema from cache: {e}")) + }) + } + + pub fn set(&self, value: CachedSchema) -> Result<(), anyhow::Error> { + self.state + .write() + .map(|mut state| *state = value) + .map_err(|e| anyhow::Error::msg(format!("Could not set openapi schema in cache: {e}"))) + } + + pub fn clear(&self) -> Result<(), anyhow::Error> { + self.set(CachedSchema::default()) + } + + pub fn spawn_openapi_generation( + &self, + cm_store: MongoDbStore, + ce_store: MongoDbStore, + ) -> JoinHandle> { + spawn_openapi_generation(cm_store, ce_store, self.clone()) + } +} + +#[derive(Debug, Clone, Default)] +pub struct CachedSchema { + schema: Vec, + is_generating: bool, + error: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase", untagged)] +pub enum OpenApiSchema { + OpenAPI(OpenAPI), + Accepted(String), + Error(String), +} + +struct PathWithSchema { + path: IndexMap>, + schema: IndexMap>, +} + +struct PathIter { + paths: Vec>>, + components: IndexMap>, +} + +impl PathIter { + /// Takes a list of paths and components, merges the components, collects + /// all the paths and returns a PathIter + fn from_paths(paths: Vec) -> Self { + let mut components = IndexMap::new(); + + for path in &paths { + components.extend(path.schema.clone()); + } + + let paths = paths + .into_iter() + .map(|path| path.path) + .collect::>>>(); + + Self { paths, components } + } +} + +type StreamResult = Pin> + Send>>; + +#[tracing::instrument(name = "Refresh OpenAPI schema", skip(state))] +pub async fn refresh_openapi( + state: State>, +) -> Result<(StatusCode, Json), ApiError> { + state.openapi_data.clone().clear().map_err(|e| { + error!("Could not clear openapi schema from cache: {:?}", e); + internal_server_error!() + })?; + + spawn_openapi_generation( + state.app_stores.common_model.clone(), + state.app_stores.common_enum.clone(), + state.openapi_data.clone(), + ); + + Ok(( + StatusCode::ACCEPTED, + Json(OpenApiSchema::Accepted( + "OpenAPI schema is being regenerated".to_string(), + )), + )) +} + +#[tracing::instrument(name = "Get OpenAPI schema", skip(state))] +pub async fn get_openapi( + state: State>, +) -> Result<(StatusCode, Json), ApiError> { + let schema = state.openapi_data.get().map_err(|e| { + error!("Could not get openapi schema from cache: {:?}", e); + internal_server_error!() + })?; + + if schema.is_generating { + info!("OpenAPI schema is being generated"); + return Ok(( + StatusCode::ACCEPTED, + Json(OpenApiSchema::Accepted( + "You're early, the schema is being generated".to_string(), + )), + )); + } + + if let Some(error) = &schema.error { + info!("OpenAPI schema generation failed: {}, retrying...", error); + spawn_openapi_generation( + state.app_stores.common_model.clone(), + state.app_stores.common_enum.clone(), + state.openapi_data.clone(), + ); + return Err(debug_error!(format!( + "OpenAPI schema generation failed: {}", + error + ))); + } + + let openapi = serde_json::from_slice(schema.schema.as_ref()).map_err(|e| { + error!("Could not deserialize openapi schema: {:?}", e); + internal_server_error!() + })?; + + Ok((StatusCode::OK, Json(OpenApiSchema::OpenAPI(openapi)))) +} + +fn spawn_openapi_generation( + cm_store: MongoDbStore, + ce_store: MongoDbStore, + state: OpenAPIData, +) -> JoinHandle> { + tokio::spawn(async move { + let stream: StreamResult = cm_store + .collection + .find(Some(doc! { "primary": true }), None) + .await + .map_err(|e| { + error!("Could not fetch common model: {:?}", e); + e + })? + .boxed(); + + let cached_schema = CachedSchema { + schema: Vec::new(), + is_generating: true, + error: None, + }; + + info!("Setting openapi schema as generating in cache"); + state.set(cached_schema.clone()).map_err(|e| { + error!("Could not set openapi schema as generating in cache: {e}"); + e + })?; + + let result = stream + .map(|cm| async { + let cm_store = cm_store.clone(); + let ce_store = ce_store.clone(); + match cm { + Ok(cm) => Some( + generate_references_data(cm, cm_store, ce_store) + .timed(|_, elapsed| { + debug!("Common model processed in {:?}", elapsed); + }) + .await, + ), + Err(e) => { + error!("Could not fetch common model: {e}"); + None + } + } + }) + .buffer_unordered(10) + .filter_map(|x| async { x }) + .try_collect::>() + .await; + + match result { + Ok(paths) => { + info!("Generating openapi schema"); + let paths = PathIter::from_paths(paths); + let schema = generate_openapi_schema(paths.paths, paths.components); + + info!("Deserializing openapi schema"); + let schema = serde_json::to_vec(&schema).map_err(|e| { + error!("Could not serialize openapi schema: {e}"); + e + }); + + if schema.is_err() { + state + .set(CachedSchema { + schema: vec![], + is_generating: false, + error: Some( + "Could not serialize openapi schema, retrying...".to_string(), + ), + }) + .map_err(|e| { + error!("Could not set openapi schema in cache: {e}"); + e + })?; + } + + info!("Setting openapi schema in cache"); + if let Ok(schema) = schema { + state + .set(CachedSchema { + schema, + is_generating: false, + error: None, + }) + .map_err(|e| { + error!("Could not set openapi schema in cache: {e}"); + e + })?; + } + Ok(()) + } + Err(err) => { + error!("Could not generate openapi schema: {err}"); + state + .set(CachedSchema { + schema: vec![], + is_generating: false, + error: Some(format!("Could not generate openapi schema: {err}")), + }) + .map_err(|e| { + error!("Could not set openapi schema in cache: {e}"); + e + }) + } + } + }) +} + +async fn generate_references_data( + cm: CommonModel, + cm_store: MongoDbStore, + ce_store: MongoDbStore, +) -> Result { + let mut schema = IndexMap::new(); + let (child_cms, missing) = cm + .fetch_all_children_common_models(cm_store.clone()) + .await?; + // PERF: Use fetch_all_children_common_enums instead + let mut enum_references = cm + .get_enum_references() + .into_iter() + .filter_map(|x| match x.datatype { + integrationos_domain::common_model::DataType::Enum { reference, .. } => { + Some(reference.to_case(Case::Pascal)) + } + _ => None, + }) + .collect::>(); + + if !missing.is_empty() { + debug!("Missing children. Contact platform to create {:?}", missing); + } + + // Add properties for children + for (k, child_cm) in child_cms.into_iter() { + schema.insert(k, ReferenceOr::Item(child_cm.reference())); + let references = child_cm + .get_enum_references() + .into_iter() + .filter_map(|x| match x.datatype { + integrationos_domain::common_model::DataType::Enum { reference, .. } => { + Some(reference.to_case(Case::Pascal)) + } + _ => None, + }) + .collect::>(); + + enum_references.extend(references); + } + + // Add properties for enum references + let enum_references = ce_store + .get_many( + Some(doc! { + "name": { + "$in": bson::to_bson(&enum_references)? + } + }), + None, + None, + None, + None, + ) + .await?; + + enum_references.into_iter().for_each(|ce| { + schema.insert( + ce.name.clone(), + ReferenceOr::Item(Schema { + schema_data: Default::default(), + schema_kind: SchemaKind::Type(Type::String(StringType { + format: VariantOrUnknownOrEmpty::Unknown(ce.name.to_case(Case::Camel)), + enumeration: ce + .options + .iter() + .map(|option| Some(option.to_owned())) + .collect(), + ..Default::default() + })), + }), + ); + }); + + // Add dummy properties for missing children + for r#ref in missing { + let schema_item = Schema { + schema_data: Default::default(), + schema_kind: SchemaKind::Type(Type::Object(ObjectType { + properties: { + IndexMap::from_iter(vec![( + r#ref.clone(), + ReferenceOr::Item(Box::new(Schema { + schema_data: Default::default(), + schema_kind: SchemaKind::Type(Type::Object(ObjectType { + properties: Default::default(), + ..Default::default() + })), + })), + )]) + }, + ..Default::default() + })), + }; + schema.insert(r#ref.clone(), ReferenceOr::Item(schema_item)); + } + + // Add properties for the common model itself + schema.insert(cm.name.clone(), ReferenceOr::Item(cm.reference())); + + let path = generate_path_item(&cm); + Ok(PathWithSchema { path, schema }) +} diff --git a/api/src/endpoints/passthrough.rs b/api/src/endpoints/passthrough.rs new file mode 100644 index 00000000..76d45a9d --- /dev/null +++ b/api/src/endpoints/passthrough.rs @@ -0,0 +1,120 @@ +use std::{collections::HashMap, sync::Arc}; + +use axum::{ + extract::{Query, State}, + response::IntoResponse, + routing::get, + Extension, Router, +}; +use http::{header::CONTENT_LENGTH, HeaderMap, HeaderName, Method, Uri}; +use hyper::body::Bytes; +use integrationos_domain::common::{ + destination::{Action, Destination}, + event_access::EventAccess, +}; +use tracing::error; + +use crate::{bad_request, metrics::Metric, server::AppState, service_unavailable}; + +use super::{get_connection, INTEGRATION_OS_PASSTHROUGH_HEADER}; + +pub fn get_router() -> Router> { + Router::new().route( + "/*key", + get(passthrough_request) + .post(passthrough_request) + .patch(passthrough_request) + .delete(passthrough_request), + ) +} + +pub async fn passthrough_request( + Extension(user_event_access): Extension>, + State(state): State>, + mut headers: HeaderMap, + query_params: Option>>, + uri: Uri, + method: Method, + body: Bytes, +) -> impl IntoResponse { + let Some(connection_key_header) = headers.get(&state.config.headers.connection_header) else { + return Err(bad_request!("Missing connection key header")); + }; + + let connection = get_connection( + user_event_access.as_ref(), + connection_key_header, + &state.app_stores, + &state.connections_cache, + ) + .await?; + + let destination = Destination { + platform: connection.platform.clone(), + action: Action::Passthrough { + path: uri.path().into(), + method, + }, + connection_key: connection.key.clone(), + }; + + let Query(query_params) = query_params.unwrap_or_default(); + + headers.remove(&state.config.headers.auth_header); + headers.remove(&state.config.headers.connection_header); + + let model_execution_result = state + .extractor_caller + .send_to_destination( + Some(connection.clone()), + &destination, + headers, + query_params, + Some(body.to_vec()), + ) + .await + .map_err(|e| { + error!( + "Error executing connection model definition in passthrough endpoint: {:?}", + e + ); + + service_unavailable!() + })?; + + let mut headers = HeaderMap::new(); + + model_execution_result + .headers() + .into_iter() + .for_each(|(key, value)| match key { + &CONTENT_LENGTH => { + headers.insert(CONTENT_LENGTH, value.clone()); + } + _ => { + if let Ok(header_name) = + HeaderName::try_from(format!("{INTEGRATION_OS_PASSTHROUGH_HEADER}-{key}")) + { + headers.insert(header_name, value.clone()); + }; + } + }); + + let status = model_execution_result.status(); + + let metric = Metric::passthrough(connection); + if let Err(e) = state.metric_tx.send(metric).await { + error!("Could not send metric to receiver: {e}"); + } + + let bytes = model_execution_result.bytes().await.map_err(|e| { + error!( + "Error retrieving bytes from response in passthrough endpoint: {:?}", + e + ); + + service_unavailable!() + })?; + + Ok((status, headers, bytes)) +} diff --git a/api/src/endpoints/pipeline.rs b/api/src/endpoints/pipeline.rs new file mode 100644 index 00000000..bf5c4f6f --- /dev/null +++ b/api/src/endpoints/pipeline.rs @@ -0,0 +1,92 @@ +use super::{create, delete, read, update, CrudHook, CrudRequest}; +use crate::server::{AppState, AppStores}; +use axum::{routing::post, Router}; +use bson::doc; +use integrationos_domain::{ + common::{ + configuration::pipeline::PipelineConfig, destination::Destination, + event_access::EventAccess, middleware::Middleware, mongo::MongoDbStore, + record_metadata::RecordMetadata, signature::Signature, source::Source, Pipeline, + }, + id::{prefix::IdPrefix, Id}, +}; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; + +pub fn get_router() -> Router> { + Router::new() + .route( + "/:id", + post(update::) + .delete(delete::), + ) + .route( + "/", + post(create::) + .get(read::), + ) +} + +#[derive(Serialize, Deserialize)] +#[cfg_attr(feature = "dummy", derive(fake::Dummy))] +pub struct CreatePipelineRequest { + pub name: String, + pub key: String, + pub source: Source, + pub destination: Destination, + pub middleware: Vec, + pub signature: Signature, + pub config: PipelineConfig, +} + +impl CrudHook for CreatePipelineRequest {} + +impl CrudRequest for CreatePipelineRequest { + type Output = Pipeline; + type Error = (); + + fn into_public(self) -> anyhow::Result { + unimplemented!() + } + + fn into_with_event_access(self, event_access: Arc) -> Self::Output { + Self::Output { + id: Id::now(IdPrefix::Pipeline).to_string(), + environment: event_access.environment, + name: self.name, + key: self.key, + source: self.source, + destination: self.destination, + middleware: self.middleware, + ownership: event_access.ownership.clone(), + signature: self.signature, + config: Some(self.config), + record_metadata: RecordMetadata::default(), + } + } + + fn update(self, record: &mut Self::Output) { + let CreatePipelineRequest { + name, + key, + source, + destination, + middleware, + signature, + config, + } = self; + + record.name = name; + record.key = key; + record.source = source; + record.destination = destination; + record.middleware = middleware; + record.signature = signature; + record.config = Some(config); + record.record_metadata.mark_updated(&record.ownership.id); + } + + fn get_store(stores: AppStores) -> MongoDbStore { + stores.pipeline + } +} diff --git a/api/src/endpoints/transactions.rs b/api/src/endpoints/transactions.rs new file mode 100644 index 00000000..5b6e47ab --- /dev/null +++ b/api/src/endpoints/transactions.rs @@ -0,0 +1,38 @@ +use std::sync::Arc; + +use axum::{routing::get, Router}; +use bson::doc; +use integrationos_domain::common::{event_access::EventAccess, mongo::MongoDbStore, Transaction}; +use serde::{Deserialize, Serialize}; + +use crate::server::{AppState, AppStores}; + +use super::{read, CrudRequest}; + +pub fn get_router() -> Router> { + Router::new().route("/", get(read::)) +} + +#[derive(Serialize, Deserialize)] +pub struct TransactionCrud; + +impl CrudRequest for TransactionCrud { + type Output = Transaction; + type Error = (); + + fn into_public(self) -> anyhow::Result { + unimplemented!() + } + + fn into_with_event_access(self, _event_access: Arc) -> Self::Output { + unimplemented!() + } + + fn update(self, _record: &mut Self::Output) { + unimplemented!() + } + + fn get_store(stores: AppStores) -> MongoDbStore { + stores.transactions + } +} diff --git a/api/src/endpoints/unified.rs b/api/src/endpoints/unified.rs new file mode 100644 index 00000000..b19ce7dc --- /dev/null +++ b/api/src/endpoints/unified.rs @@ -0,0 +1,329 @@ +use std::{collections::HashMap, sync::Arc}; + +use axum::{ + extract::{Path, Query, State}, + response::{IntoResponse, Response}, + routing::{delete, get, patch, post}, + Extension, Json, Router, +}; +use bson::doc; +use convert_case::{Case, Casing}; +use http::{HeaderMap, HeaderName}; +use integrationos_domain::{ + common::{ + connection_model_definition::CrudAction, destination::Action, + encrypted_access_key::EncryptedAccessKey, encrypted_data::PASSWORD_LENGTH, + event_access::EventAccess, AccessKey, Event, + }, + ApplicationError, IntegrationOSError, +}; +use serde::{Deserialize, Serialize}; +use serde_json::{json, Value}; +use tracing::error; + +use crate::{ + bad_request, config::Headers, debug_error, internal_server_error, metrics::Metric, + not_found_with_custom_message, server::AppState, service_unavailable, +}; + +use super::{get_connection, INTEGRATION_OS_PASSTHROUGH_HEADER}; + +pub fn get_router() -> Router> { + Router::new() + .route("/:model/:id", get(get_request)) + .route("/:model/:id", patch(update_request)) + .route("/:model", get(list_request)) + .route("/:model/count", get(count_request)) + .route("/:model", post(create_request)) + .route("/:model/:id", delete(delete_request)) +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +pub struct PathParams { + pub id: String, + pub model: String, +} + +pub async fn get_request( + event_access: Extension>, + state: State>, + Path(params): Path, + headers: HeaderMap, + query_params: Option>>, +) -> impl IntoResponse { + process_request( + event_access, + state, + headers, + query_params, + Action::Unified { + name: params.model.to_case(Case::Pascal).into(), + action: CrudAction::GetOne, + id: Some(params.id.into()), + }, + None, + ) + .await +} + +pub async fn update_request( + event_access: Extension>, + state: State>, + Path(params): Path, + headers: HeaderMap, + query_params: Option>>, + Json(body): Json, +) -> impl IntoResponse { + process_request( + event_access, + state, + headers, + query_params, + Action::Unified { + name: params.model.to_case(Case::Pascal).into(), + action: CrudAction::Update, + id: Some(params.id.into()), + }, + Some(body), + ) + .await +} + +pub async fn list_request( + event_access: Extension>, + state: State>, + Path(model): Path, + headers: HeaderMap, + query_params: Option>>, +) -> impl IntoResponse { + process_request( + event_access, + state, + headers, + query_params, + Action::Unified { + name: model.to_case(Case::Pascal).into(), + action: CrudAction::GetMany, + id: None, + }, + None, + ) + .await +} + +pub async fn count_request( + event_access: Extension>, + state: State>, + Path(model): Path, + headers: HeaderMap, + query_params: Option>>, +) -> impl IntoResponse { + process_request( + event_access, + state, + headers, + query_params, + Action::Unified { + name: model.to_case(Case::Pascal).into(), + action: CrudAction::GetCount, + id: None, + }, + None, + ) + .await +} + +pub async fn create_request( + event_access: Extension>, + state: State>, + Path(model): Path, + headers: HeaderMap, + query_params: Option>>, + Json(body): Json, +) -> impl IntoResponse { + process_request( + event_access, + state, + headers, + query_params, + Action::Unified { + name: model.to_case(Case::Pascal).into(), + action: CrudAction::Create, + id: None, + }, + Some(body), + ) + .await +} + +pub async fn delete_request( + event_access: Extension>, + state: State>, + Path(params): Path, + headers: HeaderMap, + query_params: Option>>, +) -> impl IntoResponse { + process_request( + event_access, + state, + headers, + query_params, + Action::Unified { + name: params.model.to_case(Case::Pascal).into(), + action: CrudAction::Delete, + id: Some(params.id.into()), + }, + None, + ) + .await +} + +pub async fn process_request( + Extension(user_event_access): Extension>, + State(state): State>, + mut headers: HeaderMap, + query_params: Option>>, + action: Action, + body: Option, +) -> impl IntoResponse { + let Some(connection_key_header) = headers.get(&state.config.headers.connection_header) else { + return Err(bad_request!("Missing connection key header")); + }; + let connection = get_connection( + user_event_access.as_ref(), + connection_key_header, + &state.app_stores, + &state.connections_cache, + ) + .await?; + + let Query(query_params) = query_params.unwrap_or_default(); + + let include_passthrough = headers + .get(&state.config.headers.enable_passthrough_header) + .and_then(|v| v.to_str().ok()) + .map(|s| s == "true") + .unwrap_or_default(); + + let access_key_header_value = headers.get(&state.config.headers.auth_header).cloned(); + + remove_event_headers(&mut headers, &state.config.headers); + + let Action::Unified { + name: model_name, + action: action_name, + .. + } = &action + else { + return Err(internal_server_error!()); + }; + let event_name = format!( + "{}::{}::{}::{}", + connection.platform, connection.platform_version, model_name, action_name, + ); + + let mut response = state + .extractor_caller + .send_to_destination_unified( + connection.clone(), + action, + include_passthrough, + headers, + query_params, + body, + ) + .await + .map_err(|e| { + error!( + "Error executing connection model definition in unified endpoint: {:?}", + e + ); + if state.config.debug_mode { + return debug_error!(format!("{e:?}")); + } + match e { + IntegrationOSError::Internal(_) => service_unavailable!(), + IntegrationOSError::Application(e) => match e { + ApplicationError::NotFound { .. } => { + not_found_with_custom_message!("The requested resource was not found") + } + _ => internal_server_error!(), + }, + } + })?; + + *response.headers_mut() = response + .headers() + .iter() + .map(|(key, value)| { + ( + HeaderName::try_from(format!("{INTEGRATION_OS_PASSTHROUGH_HEADER}-{key}")).unwrap(), + value.clone(), + ) + }) + .collect::(); + + let (parts, body) = response.into_parts(); + + if let Some(Ok(encrypted_access_key)) = + access_key_header_value.map(|v| v.to_str().map(|s| s.to_string())) + { + if let Ok(encrypted_access_key) = EncryptedAccessKey::parse(&encrypted_access_key) { + let password: [u8; PASSWORD_LENGTH] = state + .config + .event_access_password + .as_bytes() + .try_into() + .map_err(|e| { + error!("event_access_password is not 32 bytes in length: {e}"); + internal_server_error!() + })?; + + let access_key = AccessKey::parse(&encrypted_access_key, &password).map_err(|e| { + error!("Could not decrypt access key: {e}"); + internal_server_error!() + })?; + const META: &str = "meta"; + let body = serde_json::to_string(&json!({ + META: body.get(META) + })) + .map_err(|e| { + error!("Could not serialize meta body to string: {e}"); + internal_server_error!() + })?; + + let name = if parts.status.is_success() { + format!("{event_name}::request-succeeded",) + } else { + format!("{event_name}::request-failed",) + }; + let event = Event::new( + &access_key, + &encrypted_access_key, + &name, + parts.headers.clone(), + body, + ); + if let Err(e) = state.event_tx.send(event).await { + error!("Could not send event to receiver: {e}"); + } + } + }; + + let metric = Metric::unified(connection.clone()); + if let Err(e) = state.metric_tx.send(metric).await { + error!("Could not send metric to receiver: {e}"); + } + + let response = Response::from_parts(parts, ()); + + Ok((response, Json(body))) +} + +fn remove_event_headers(headers: &mut HeaderMap, headers_config: &Headers) { + headers.remove(&headers_config.auth_header); + headers.remove(&headers_config.connection_header); + headers.remove(&headers_config.include_overflow_header); + headers.remove(&headers_config.enable_passthrough_header); + headers.remove(&headers_config.dynamic_platform_header); +} diff --git a/api/src/error.rs b/api/src/error.rs new file mode 100644 index 00000000..52979d28 --- /dev/null +++ b/api/src/error.rs @@ -0,0 +1,93 @@ +#[macro_export] +macro_rules! unauthorized { + () => { + ( + http::StatusCode::UNAUTHORIZED, + axum::Json($crate::api_payloads::ErrorResponse { + error: "Unauthorized".to_string(), + }), + ) + }; +} + +#[macro_export] +macro_rules! not_found { + ($e:expr) => { + ( + http::StatusCode::NOT_FOUND, + axum::Json($crate::api_payloads::ErrorResponse { + error: format!("{} not found", $e), + }), + ) + }; +} + +#[macro_export] +macro_rules! not_found_with_custom_message { + ($e:expr) => { + ( + http::StatusCode::NOT_FOUND, + axum::Json($crate::api_payloads::ErrorResponse { + error: format!("{}", $e), + }), + ) + }; +} + +#[macro_export] +macro_rules! internal_server_error { + () => { + ( + http::StatusCode::INTERNAL_SERVER_ERROR, + axum::Json($crate::api_payloads::ErrorResponse { + error: "Internal error".to_string(), + }), + ) + }; +} + +#[macro_export] +macro_rules! too_many_requests { + () => { + ( + http::StatusCode::TOO_MANY_REQUESTS, + axum::Json($crate::api_payloads::ErrorResponse { + error: "Too Many Requests".to_string(), + }), + ) + }; +} + +#[macro_export] +macro_rules! bad_request { + ($e:expr) => { + ( + http::StatusCode::BAD_REQUEST, + axum::Json($crate::api_payloads::ErrorResponse { + error: $e.to_string(), + }), + ) + }; +} + +#[macro_export] +macro_rules! service_unavailable { + () => { + ( + http::StatusCode::SERVICE_UNAVAILABLE, + axum::Json($crate::api_payloads::ErrorResponse { + error: "Service unavailable".to_string(), + }), + ) + }; +} + +#[macro_export] +macro_rules! debug_error { + ($e:expr) => { + ( + http::StatusCode::UNPROCESSABLE_ENTITY, + axum::Json($crate::api_payloads::ErrorResponse { error: $e }), + ) + }; +} diff --git a/api/src/lib.rs b/api/src/lib.rs new file mode 100644 index 00000000..375024de --- /dev/null +++ b/api/src/lib.rs @@ -0,0 +1,9 @@ +pub mod api_payloads; +pub mod config; +pub mod endpoints; +pub mod error; +pub mod metrics; +pub mod middleware; +pub mod routes; +pub mod server; +pub mod util; diff --git a/api/src/main.rs b/api/src/main.rs new file mode 100644 index 00000000..c1e8fe82 --- /dev/null +++ b/api/src/main.rs @@ -0,0 +1,36 @@ +use std::sync::Arc; + +use anyhow::Result; +use api::{config::Config, server::Server}; +use dotenvy::dotenv; +use envconfig::Envconfig; +use integrationos_domain::service::secrets_client::SecretsClient; +use tracing::info; +use tracing_subscriber::filter::LevelFilter; +use tracing_subscriber::EnvFilter; + +fn main() -> Result<()> { + dotenv().ok(); + + let filter = EnvFilter::builder() + .with_default_directive(LevelFilter::INFO.into()) + .from_env_lossy(); + + tracing_subscriber::fmt().with_env_filter(filter).init(); + + let config = Config::init_from_env()?; + + info!("Starting API with config:\n{config}"); + + let secrets_client = Arc::new(SecretsClient::new(&config.secrets_config)?); + + tokio::runtime::Builder::new_multi_thread() + .worker_threads(config.worker_threads.unwrap_or(num_cpus::get())) + .enable_all() + .build()? + .block_on(async move { + let server: Server = Server::init(config, secrets_client).await?; + + server.run().await + }) +} diff --git a/api/src/metrics.rs b/api/src/metrics.rs new file mode 100644 index 00000000..546c6dea --- /dev/null +++ b/api/src/metrics.rs @@ -0,0 +1,171 @@ +use std::sync::Arc; + +use chrono::{DateTime, Datelike, Utc}; +use http::HeaderValue; +use integrationos_domain::{event_access::EventAccess, ownership::Ownership, Connection}; +use segment::message::{Track, User}; +use serde::Deserialize; +use serde_json::json; + +pub const TOTAL_KEY: &str = "total"; +pub const DAILY_KEY: &str = "daily"; +pub const MONTHLY_KEY: &str = "monthly"; +pub const PLATFORMS_KEY: &str = "platforms"; +pub const CREATED_AT_KEY: &str = "createdAt"; + +#[derive(Debug, Clone, strum::Display, Deserialize)] +#[serde(rename_all = "lowercase")] +#[strum(serialize_all = "lowercase")] +pub enum MetricType { + Passthrough(Arc), + Unified(Arc), + RateLimited( + Arc, + #[serde(with = "http_serde_ext::header_value::option")] Option, + ), +} + +impl MetricType { + pub fn event_name(&self) -> &'static str { + use MetricType::*; + match self { + Passthrough(_) => "Called Passthrough API", + Unified(_) => "Called Unified API", + RateLimited(_, _) => "Reached Rate Limit", + } + } +} + +#[derive(Debug, Clone)] +pub struct Metric { + pub metric_type: MetricType, + pub date: DateTime, +} + +impl Metric { + pub fn passthrough(connection: Arc) -> Self { + Self { + metric_type: MetricType::Passthrough(connection), + date: Utc::now(), + } + } + + pub fn unified(connection: Arc) -> Self { + Self { + metric_type: MetricType::Unified(connection), + date: Utc::now(), + } + } + + pub fn rate_limited(event_access: Arc, key: Option) -> Self { + Self { + metric_type: MetricType::RateLimited(event_access, key), + date: Utc::now(), + } + } + + pub fn ownership(&self) -> &Ownership { + use MetricType::*; + match &self.metric_type { + Passthrough(c) => &c.ownership, + Unified(c) => &c.ownership, + RateLimited(e, _) => &e.ownership, + } + } + + fn platform(&self) -> &str { + use MetricType::*; + match &self.metric_type { + Passthrough(c) => &c.platform, + Unified(c) => &c.platform, + RateLimited(e, _) => &e.platform, + } + } + + pub fn update_doc(&self) -> bson::Document { + let platform = self.platform(); + let metric_type = &self.metric_type; + let day = self.date.day(); + let month = self.date.month(); + let year = self.date.year(); + let daily_key = format!("{year}-{month:02}-{day:02}"); + let monthly_key = format!("{year}-{month:02}"); + bson::doc! { + "$inc": { + format!("{metric_type}.{TOTAL_KEY}"): 1, + format!("{metric_type}.{PLATFORMS_KEY}.{platform}.{TOTAL_KEY}"): 1, + format!("{metric_type}.{DAILY_KEY}.{daily_key}"): 1, + format!("{metric_type}.{PLATFORMS_KEY}.{platform}.{DAILY_KEY}.{daily_key}"): 1, + format!("{metric_type}.{MONTHLY_KEY}.{monthly_key}"): 1, + format!("{metric_type}.{PLATFORMS_KEY}.{platform}.{MONTHLY_KEY}.{monthly_key}"): 1, + }, + "$setOnInsert": { + CREATED_AT_KEY: self.date.timestamp_millis() + } + } + } + + pub fn segment_track(&self) -> Track { + use MetricType::*; + match &self.metric_type { + Unified(conn) => Track { + user: User::UserId { + user_id: self + .ownership() + .clone() + .user_id + .unwrap_or(self.ownership().id.to_string()), + }, + event: self.metric_type.event_name().to_owned(), + properties: json!({ + "connectionDefinitionId": conn.id.to_string(), + "environment": conn.environment, + "key": &conn.key, + "platform": self.platform(), + "platformVersion": &conn.platform_version, + "clientId": self.ownership().client_id, + "version": &conn.record_metadata.version + }), + ..Default::default() + }, + Passthrough(conn) => Track { + user: User::UserId { + user_id: self + .ownership() + .clone() + .user_id + .unwrap_or(self.ownership().id.to_string()), + }, + event: self.metric_type.event_name().to_owned(), + properties: json!({ + "connectionDefinitionId": conn.id.to_string(), + "environment": conn.environment, + "key": &conn.key, + "platform": self.platform(), + "platformVersion": &conn.platform_version, + "clientId": self.ownership().client_id, + "version": &conn.record_metadata.version + }), + ..Default::default() + }, + RateLimited(event_access, key) => Track { + user: User::UserId { + user_id: self + .ownership() + .clone() + .user_id + .unwrap_or(self.ownership().id.to_string()), + }, + event: self.metric_type.event_name().to_owned(), + properties: json!({ + "environment": event_access.environment, + "key": key.as_ref().map(|k| k.to_str().unwrap_or_default().to_string()), + "platform": self.platform(), + "clientId": self.ownership().client_id, + "version": &event_access.record_metadata.version + }), + ..Default::default() + }, + } + } +} diff --git a/api/src/middleware/auth.rs b/api/src/middleware/auth.rs new file mode 100644 index 00000000..aa2a7673 --- /dev/null +++ b/api/src/middleware/auth.rs @@ -0,0 +1,81 @@ +use crate::{ + endpoints::ApiError, internal_server_error, not_found, server::AppState, unauthorized, +}; +use axum::{extract::State, middleware::Next, response::Response}; +use http::Request; +use integrationos_domain::{algebra::adapter::StoreAdapter, ApplicationError, InternalError}; +use mongodb::bson::doc; +use std::sync::Arc; +use tracing::error; + +pub async fn auth( + State(state): State>, + mut req: Request, + next: Next, +) -> Result { + let Some(auth_header) = req.headers().get(&state.config.headers.auth_header) else { + return Err(unauthorized!()); + }; + + if let Some(conn_header) = req.headers().get(&state.config.headers.connection_header) { + // environment can be live or test + // connection header value starts with environment + // auth header value starts with either id_ or sk_ and then the environment + // Make sure the environments match, or we return 404 + if conn_header.as_bytes()[..4] != auth_header.as_bytes()[3..7] { + return Err(not_found!("Connection")); + } + } + + let event_access_result = state + .cache + .try_get_with_by_ref(auth_header, async { + let key = auth_header + .to_str() + // A bad header value is a user error, so we return not found + .map_err(|_| ApplicationError::not_found("Invalid auth header", None))?; + + let event_access = state + .app_stores + .event_access + .get_one(doc! { + "accessKey": key, + "deleted": false + }) + .await + .map_err(|e| InternalError::connection_error(&e.to_string(), None))?; + + if let Some(event_access) = event_access { + Ok(Arc::new(event_access)) + } else { + Err(ApplicationError::not_found("Event access", None)) + } + }) + .await; + + match event_access_result { + Ok(data) => { + req.extensions_mut().insert(data); + Ok(next.run(req).await) + } + Err(e) => { + if e.is_application() { + Err(unauthorized!()) + } else { + error!("Error fetching auth data: {:?}", e); + + Err(internal_server_error!()) + } + } + } +} + +#[cfg(test)] +mod test { + #[test] + fn test_header_check() { + let conn = b"test::key"; + let access_key = b"id_test_foo"; + assert_eq!(conn[..4], access_key[3..7]); + } +} diff --git a/api/src/middleware/blocker.rs b/api/src/middleware/blocker.rs new file mode 100644 index 00000000..c3283a6f --- /dev/null +++ b/api/src/middleware/blocker.rs @@ -0,0 +1,141 @@ +use crate::{server::AppState, unauthorized}; +use axum::response::IntoResponse; +use futures_util::StreamExt; +use http::{HeaderName, HeaderValue, Request}; +use integrationos_domain::Store; +use mongodb::options::FindOptions; +use serde::Deserialize; +use std::{ + collections::BTreeSet, + error::Error, + fmt::Display, + sync::{Arc, RwLock}, + time::Duration, +}; +use tower::{filter::Predicate, BoxError}; +use tracing::{error, trace}; + +pub type Whitelist = Arc>>; + +#[derive(Debug, Clone)] +pub struct BlockInvalidHeaders { + whitelist: Whitelist, + header_name: HeaderName, +} + +impl BlockInvalidHeaders { + pub async fn new(state: Arc) -> Self { + let whitelist = Arc::new(RwLock::new(BTreeSet::new())); + + let header_name = + HeaderName::from_lowercase(state.config.headers.auth_header.as_bytes()).unwrap(); + + let (tx, mut rx) = tokio::sync::mpsc::channel::<()>(1); + + let whitelist_clone = whitelist.clone(); + tokio::spawn(async move { + loop { + #[derive(Deserialize)] + struct SparseEventAccess { + #[serde(with = "http_serde_ext::header_value", rename = "accessKey")] + access_key: HeaderValue, + } + + let mut records = match state + .app_stores + .db + .collection::(&Store::EventAccess.to_string()) + .find( + bson::doc! { "deleted": false }, + FindOptions::builder() + .projection(bson::doc! { + "accessKey": 1 + }) + .build(), + ) + .await + { + Err(e) => { + error!("Could not fetch event access records cursor: {e}"); + continue; + } + Ok(records) => records, + }; + + #[allow(clippy::mutable_key_type)] + let mut new_whitelist = BTreeSet::new(); + while let Some(result) = records.next().await { + match result { + Ok(record) => { + let mut header_value = record.access_key; + header_value.set_sensitive(true); + + new_whitelist.insert(header_value); + } + + Err(e) => { + error!("Could not fetch event access record: {e}"); + continue; + } + } + } + + let len = new_whitelist.len(); + + { + let mut whitelist_clone = whitelist_clone.write().unwrap(); + *whitelist_clone = new_whitelist + } + + trace!("Updated whitelist with {len} entries"); + + let _ = tx.send(()).await; + tokio::time::sleep(Duration::from_secs( + state.config.access_key_whitelist_refresh_interval_secs, + )) + .await; + } + }); + + rx.recv().await; + + BlockInvalidHeaders { + whitelist, + header_name, + } + } +} + +#[derive(Debug)] +struct FastError; + +impl Display for FastError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "Error") + } +} + +impl Error for FastError {} + +impl Predicate> for BlockInvalidHeaders { + type Request = Request; + + fn check(&mut self, request: Request) -> Result { + let Some(header_value) = request.headers().get(&self.header_name) else { + return Err(Box::new(FastError)); + }; + + { + let whitelist = self.whitelist.read().unwrap(); + if !whitelist.contains(header_value) { + return Err(Box::new(FastError)); + } + } + + Ok(request) + } +} + +pub async fn handle_blocked_error(_: BoxError) -> impl IntoResponse { + unauthorized!() +} diff --git a/api/src/middleware/jwt_auth.rs b/api/src/middleware/jwt_auth.rs new file mode 100644 index 00000000..91081773 --- /dev/null +++ b/api/src/middleware/jwt_auth.rs @@ -0,0 +1,78 @@ +use crate::{api_payloads::ErrorResponse, server::AppState, unauthorized}; +use axum::{extract::State, middleware::Next, response::Response, Json}; +use http::{Request, StatusCode}; +use jsonwebtoken::{DecodingKey, Validation}; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; +use tracing::info; + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct Claims { + #[serde(rename = "_id")] + pub id: String, + pub email: String, + pub username: String, + pub user_key: String, + pub first_name: String, + pub last_name: String, + pub buildable_id: String, + pub container_id: String, + pub pointers: Vec, + pub is_buildable_core: bool, + pub iat: i64, + pub exp: i64, + pub aud: String, + pub iss: String, +} + +const BEARER_PREFIX: &str = "Bearer "; + +#[derive(Clone)] +pub struct JwtState { + validation: Validation, + decoding_key: DecodingKey, +} + +impl JwtState { + pub fn new(state: &Arc) -> Self { + Self { + validation: Default::default(), + decoding_key: DecodingKey::from_secret(state.config.jwt_secret.as_ref()), + } + } +} + +pub async fn jwt_auth( + State(state): State>, + mut req: Request, + next: Next, +) -> Result)> { + let Some(auth_header) = req.headers().get(http::header::AUTHORIZATION) else { + info!("missing authorization header"); + return Err(unauthorized!()); + }; + + let Ok(auth_header) = auth_header.to_str() else { + info!("invalid authorization header"); + return Err(unauthorized!()); + }; + + if !auth_header.starts_with(BEARER_PREFIX) { + info!("invalid authorization header"); + return Err(unauthorized!()); + } + + let token = &auth_header[BEARER_PREFIX.len()..]; + + match jsonwebtoken::decode::(token, &state.decoding_key, &state.validation) { + Ok(decoded_token) => { + req.extensions_mut().insert(decoded_token.claims); + Ok(next.run(req).await) + } + Err(e) => { + info!("invalid JWT token : {:?}", e); + Err(unauthorized!()) + } + } +} diff --git a/api/src/middleware/mod.rs b/api/src/middleware/mod.rs new file mode 100644 index 00000000..627fafb6 --- /dev/null +++ b/api/src/middleware/mod.rs @@ -0,0 +1,7 @@ +pub mod auth; +pub mod blocker; +pub mod jwt_auth; +pub mod rate_limiter; + +pub use auth::auth; +pub use jwt_auth::jwt_auth; diff --git a/api/src/middleware/rate_limiter.rs b/api/src/middleware/rate_limiter.rs new file mode 100644 index 00000000..3b096c25 --- /dev/null +++ b/api/src/middleware/rate_limiter.rs @@ -0,0 +1,122 @@ +use crate::{metrics::Metric, server::AppState, too_many_requests}; +use anyhow::{Context, Result}; +use axum::{ + extract::State, + middleware::Next, + response::{IntoResponse, Response}, + Extension, +}; +use http::{HeaderName, Request}; +use integrationos_domain::event_access::EventAccess; +use redis_retry::{AsyncCommands, Redis}; +use std::sync::Arc; +use tokio::sync::{mpsc, oneshot}; +use tracing::warn; + +#[derive(Debug, Clone)] +pub struct RateLimiter { + tx: mpsc::Sender<(Arc, oneshot::Sender)>, + key_header_name: HeaderName, + limit_header_name: HeaderName, + remaining_header_name: HeaderName, + reset_header_name: HeaderName, + metric_tx: mpsc::Sender, +} + +impl RateLimiter { + pub async fn new(state: Arc) -> Result { + let mut redis = Redis::new_with_retry_count(&state.config.redis_config, 0) + .await + .with_context(|| "Could not connect to redis")?; + + let (tx, mut rx) = mpsc::channel::<(Arc, oneshot::Sender)>(1024); + + let throughput_key = state.config.redis_config.api_throughput_key.clone(); + + tokio::spawn(async move { + while let Some((id, tx)) = rx.recv().await { + let count: u64 = redis + .hincr(&throughput_key, id.as_ref(), 1) + .await + .unwrap_or_default(); + let _ = tx.send(count); + } + }); + + let key_header_name = + HeaderName::from_lowercase(state.config.headers.connection_header.as_bytes()).unwrap(); + + let limit_header_name = + HeaderName::from_lowercase(state.config.headers.rate_limit_limit.as_bytes()).unwrap(); + + let remaining_header_name = + HeaderName::from_lowercase(state.config.headers.rate_limit_remaining.as_bytes()) + .unwrap(); + + let reset_header_name = + HeaderName::from_lowercase(state.config.headers.rate_limit_reset.as_bytes()).unwrap(); + + Ok(RateLimiter { + tx, + metric_tx: state.metric_tx.clone(), + key_header_name, + limit_header_name, + remaining_header_name, + reset_header_name, + }) + } + + pub async fn get_request_count(&self, id: Arc) -> u64 { + let (tx, rx) = oneshot::channel(); + match self.tx.send((id, tx)).await { + Ok(()) => rx.await.unwrap_or_default(), + Err(e) => { + warn!("Could not send to redis task: {e}"); + 0 + } + } + } +} + +pub async fn rate_limiter( + Extension(event_access): Extension>, + State(state): State>, + req: Request, + next: Next, +) -> Result { + let throughput = event_access.throughput; + + let count = state + .get_request_count(event_access.ownership.id.clone()) + .await; + + if count >= throughput { + let _ = state + .metric_tx + .send(Metric::rate_limited( + event_access.clone(), + req.headers().get(&state.key_header_name).cloned(), + )) + .await; + let mut res = too_many_requests!().into_response(); + + let headers = res.headers_mut(); + + headers.insert(state.limit_header_name.clone(), throughput.into()); + headers.insert(state.remaining_header_name.clone(), 0.into()); + headers.insert(state.reset_header_name.clone(), 60.into()); + + Err(res) + } else { + let mut res = next.run(req).await; + let headers = res.headers_mut(); + + headers.insert(state.limit_header_name.clone(), throughput.into()); + headers.insert( + state.remaining_header_name.clone(), + (throughput - count).into(), + ); + headers.insert(state.reset_header_name.clone(), 60.into()); + Ok(res) + } +} diff --git a/api/src/routes/admin.rs b/api/src/routes/admin.rs new file mode 100644 index 00000000..a5554c03 --- /dev/null +++ b/api/src/routes/admin.rs @@ -0,0 +1,34 @@ +use crate::{ + endpoints::{ + common_model, connection_definition, connection_model_definition, connection_model_schema, + connection_oauth_definition, metrics, openapi, + }, + server::AppState, +}; +use axum::{routing::post, Router}; +use std::sync::Arc; +use tower_http::trace::TraceLayer; + +pub fn get_router() -> Router> { + Router::new() + .nest( + "/connection-definitions", + connection_definition::get_router(), + ) + .nest( + "/connection-oauth-definitions", + connection_oauth_definition::get_router(), + ) + .nest( + "/connection-model-definitions", + connection_model_definition::get_router(), + ) + .route("/openapi", post(openapi::refresh_openapi)) + .nest( + "/connection-model-schemas", + connection_model_schema::get_router(), + ) + .nest("/common-models", common_model::get_router()) + .layer(TraceLayer::new_for_http()) + .nest("/metrics", metrics::get_router()) +} diff --git a/api/src/routes/mod.rs b/api/src/routes/mod.rs new file mode 100644 index 00000000..e336085f --- /dev/null +++ b/api/src/routes/mod.rs @@ -0,0 +1,46 @@ +pub mod admin; +pub mod protected; +pub mod public; + +use crate::{ + api_payloads::{ErrorResponse, RootResponse}, + server::AppState, +}; +use axum::{response::IntoResponse, routing::get, Json, Router}; +use http::StatusCode; +use std::sync::Arc; +use tower_http::cors::CorsLayer; + +pub fn get_admin_router(state: &Arc) -> Router> { + let path = format!("/{}", state.config.api_version); + Router::new() + .nest(&path, admin::get_router()) + .nest(&path, public::get_router(state)) + .route("/", get(get_root)) + .fallback(not_found_handler) + .layer(CorsLayer::permissive()) +} + +pub async fn get_public_router(state: &Arc) -> Router> { + let path = format!("/{}", state.config.api_version); + let public_path = format!("{path}/public"); + Router::new() + .nest(&path, protected::get_router(state).await) + .nest(&public_path, public::get_router(state)) + .route("/", get(get_root)) + .fallback(not_found_handler) + .layer(CorsLayer::permissive()) +} + +pub async fn get_root() -> impl IntoResponse { + Json(RootResponse { success: true }) +} + +pub async fn not_found_handler() -> impl IntoResponse { + ( + StatusCode::NOT_FOUND, + Json(ErrorResponse { + error: "Not found".to_string(), + }), + ) +} diff --git a/api/src/routes/protected.rs b/api/src/routes/protected.rs new file mode 100644 index 00000000..7df300b7 --- /dev/null +++ b/api/src/routes/protected.rs @@ -0,0 +1,77 @@ +use std::{iter::once, sync::Arc}; + +use axum::{ + error_handling::HandleErrorLayer, + routing::{get, post}, + Router, +}; +use http::HeaderName; +use integrationos_domain::common::connection_model_schema::PublicConnectionModelSchema; +use tower::{filter::FilterLayer, ServiceBuilder}; +use tower_http::{sensitive_headers::SetSensitiveRequestHeadersLayer, trace::TraceLayer}; +use tracing::warn; + +use crate::{ + endpoints::{ + connection, + connection_model_definition::test_connection_model_definition, + connection_model_schema::{self, public_get_connection_model_schema}, + event_access, events, oauth, passthrough, pipeline, transactions, unified, + }, + middleware::{ + auth, + blocker::{handle_blocked_error, BlockInvalidHeaders}, + rate_limiter::{self, RateLimiter}, + }, + server::AppState, +}; + +pub async fn get_router(state: &Arc) -> Router> { + let r = Router::new() + .nest("/pipelines", pipeline::get_router()) + .nest("/events", events::get_router()) + .nest("/transactions", transactions::get_router()) + .nest("/connections", connection::get_router()) + .route( + "/connection-model-definitions/test/:id", + post(test_connection_model_definition), + ) + .route( + "/connection-model-schemas", + get(public_get_connection_model_schema::< + connection_model_schema::PublicGetConnectionModelSchema, + PublicConnectionModelSchema, + >), + ) + .nest("/event-access", event_access::get_router()) + .nest("/passthrough", passthrough::get_router()) + .nest("/oauth", oauth::get_router()) + .nest("/unified", unified::get_router()); + + let r = match RateLimiter::new(state.clone()).await { + Ok(rate_limiter) => r.layer(axum::middleware::from_fn_with_state( + Arc::new(rate_limiter), + rate_limiter::rate_limiter, + )), + Err(e) => { + warn!("Could not connect to redis: {e}"); + r + } + }; + + r.layer(axum::middleware::from_fn_with_state( + state.clone(), + auth::auth, + )) + .layer(TraceLayer::new_for_http()) + .layer(SetSensitiveRequestHeadersLayer::new(once( + HeaderName::from_lowercase(state.config.headers.auth_header.as_bytes()).unwrap(), + ))) + .layer( + ServiceBuilder::new() + .layer(HandleErrorLayer::new(handle_blocked_error)) + .layer(FilterLayer::new( + BlockInvalidHeaders::new(state.clone()).await, + )), + ) +} diff --git a/api/src/routes/public.rs b/api/src/routes/public.rs new file mode 100644 index 00000000..83552fdc --- /dev/null +++ b/api/src/routes/public.rs @@ -0,0 +1,64 @@ +use std::sync::Arc; + +use axum::{ + routing::{get, post}, + Router, +}; +use integrationos_domain::{ + common_model::CommonModel, connection_definition::ConnectionDefinition, +}; +use tower_http::trace::TraceLayer; + +use crate::{ + endpoints::{ + common_enum, common_model, connection_definition, connection_model_schema, + connection_oauth_definition, event_access::create_event_access_for_new_user, openapi, read, + read_cached, + }, + middleware::jwt_auth::{self, JwtState}, + server::AppState, +}; + +const OBFUSCATED_ENDPOINT: &str = + "/e7262bf18c81bc1ff7f726e6d1a6da59f6e77dde0d63d9b60c041af57be8c197"; + +pub fn get_router(state: &Arc) -> Router> { + Router::new() + .route( + "/event-access/default", + post(create_event_access_for_new_user).layer(axum::middleware::from_fn_with_state( + Arc::new(JwtState::new(state)), + jwt_auth::jwt_auth, + )), + ) + .route( + "/connection-definitions", + get(read_cached::), + ) + .route( + "/connection-oauth-definition-schema", + get(read_cached::< + connection_oauth_definition::FrontendOauthConnectionDefinition, + connection_oauth_definition::FrontendOauthConnectionDefinition, + >), + ) + .route("/openapi", get(openapi::get_openapi)) + .route( + "/connection-data/models/:platform_name", + get(connection_model_schema::public_get_platform_models), + ) + .route( + "/connection-data/:model/:platform_name", + get(connection_definition::public_get_connection_details), + ) + .nest( + OBFUSCATED_ENDPOINT, + Router::new() + .route( + "/common-models", + get(read::), + ) + .route("/common-enums", get(common_enum::read)), + ) + .layer(TraceLayer::new_for_http()) +} diff --git a/api/src/server.rs b/api/src/server.rs new file mode 100644 index 00000000..0ad11ef8 --- /dev/null +++ b/api/src/server.rs @@ -0,0 +1,304 @@ +use crate::{ + config::Config, + endpoints::{ + connection_oauth_definition::FrontendOauthConnectionDefinition, openapi::OpenAPIData, + GetCache, + }, + metrics::Metric, + routes, +}; +use anyhow::{anyhow, Context, Result}; +use axum::Router; +use http::HeaderValue; +use integrationos_domain::{ + algebra::crypto::Crypto, + common::{ + common_model::CommonModel, + connection_definition::ConnectionDefinition, + connection_model_definition::ConnectionModelDefinition, + connection_model_schema::{ConnectionModelSchema, PublicConnectionModelSchema}, + connection_oauth_definition::{ConnectionOAuthDefinition, Settings}, + cursor::Cursor, + event_access::EventAccess, + mongo::MongoDbStore, + stage::Stage, + Connection, Event, Pipeline, Store, Transaction, + }, + common_model::CommonEnum, + connection_definition::PublicConnectionDetails, + service::unified_destination::UnifiedDestination, +}; +use moka::future::Cache; +use mongodb::{options::UpdateOptions, Client, Database}; +use segment::{AutoBatcher, Batcher, HttpClient}; +use std::{sync::Arc, time::Duration}; +use tokio::{sync::mpsc::Sender, time::timeout, try_join}; +use tracing::{error, info, trace, warn}; + +#[derive(Clone)] +pub struct AppStores { + pub db: Database, + pub model_config: MongoDbStore, + pub oauth_config: MongoDbStore, + pub frontend_oauth_config: MongoDbStore, + pub model_schema: MongoDbStore, + pub public_model_schema: MongoDbStore, + pub common_model: MongoDbStore, + pub common_enum: MongoDbStore, + pub connection: MongoDbStore, + pub public_connection_details: MongoDbStore, + pub settings: MongoDbStore, + pub connection_config: MongoDbStore, + pub pipeline: MongoDbStore, + pub event_access: MongoDbStore, + pub event: MongoDbStore, + pub transactions: MongoDbStore, + pub cursors: MongoDbStore, + pub stages: MongoDbStore, +} + +#[derive(Clone)] +pub struct AppState { + pub app_stores: AppStores, + pub config: Config, + pub cache: Cache>, + pub openapi_data: OpenAPIData, + pub http_client: reqwest::Client, + pub connections_cache: Cache<(Arc, HeaderValue), Arc>, + pub connection_definitions_cache: GetCache, + pub connection_oauth_definitions_cache: GetCache, + pub secrets_client: Arc, + pub extractor_caller: UnifiedDestination, + pub event_tx: Sender, + pub metric_tx: Sender, +} + +#[derive(Clone)] +pub struct Server { + state: Arc, +} + +impl Server { + pub async fn init( + config: Config, + secrets_client: Arc, + ) -> Result { + let client = Client::with_uri_str(&config.db_config.control_db_url).await?; + let db = client.database(&config.db_config.control_db_name); + + let http_client = reqwest::ClientBuilder::new() + .timeout(Duration::from_secs(config.http_client_timeout_secs)) + .build()?; + let model_config = + MongoDbStore::new_with_db(db.clone(), Store::ConnectionModelDefinitions).await?; + let oauth_config = + MongoDbStore::new_with_db(db.clone(), Store::ConnectionOAuthDefinitions).await?; + let frontend_oauth_config = + MongoDbStore::new_with_db(db.clone(), Store::ConnectionOAuthDefinitions).await?; + let model_schema = + MongoDbStore::new_with_db(db.clone(), Store::ConnectionModelSchemas).await?; + let public_model_schema = + MongoDbStore::new_with_db(db.clone(), Store::PublicConnectionModelSchemas).await?; + let common_model = MongoDbStore::new_with_db(db.clone(), Store::CommonModels).await?; + let common_enum = MongoDbStore::new_with_db(db.clone(), Store::CommonEnums).await?; + let connection = MongoDbStore::new_with_db(db.clone(), Store::Connections).await?; + let public_connection_details = + MongoDbStore::new_with_db(db.clone(), Store::PublicConnectionDetails).await?; + let settings = MongoDbStore::new_with_db(db.clone(), Store::Settings).await?; + let connection_config = + MongoDbStore::new_with_db(db.clone(), Store::ConnectionDefinitions).await?; + let pipeline = MongoDbStore::new_with_db(db.clone(), Store::Pipelines).await?; + let event_access = MongoDbStore::new_with_db(db.clone(), Store::EventAccess).await?; + let event = MongoDbStore::new_with_db(db.clone(), Store::Events).await?; + let transactions = MongoDbStore::new_with_db(db.clone(), Store::Transactions).await?; + let cursors = MongoDbStore::new_with_db(db.clone(), Store::Cursors).await?; + let stages = MongoDbStore::new_with_db(db.clone(), Store::Stages).await?; + + let extractor_caller = UnifiedDestination::new( + config.db_config.clone(), + config.cache_size, + secrets_client.clone(), + ) + .await + .with_context(|| "Could not initialize extractor caller")?; + + let app_stores = AppStores { + db: db.clone(), + model_config, + oauth_config, + frontend_oauth_config, + model_schema, + public_model_schema, + settings, + common_model, + common_enum, + connection, + public_connection_details, + connection_config, + pipeline, + event_access, + event, + transactions, + cursors, + stages, + }; + + let cache = Cache::builder() + .max_capacity(config.cache_size) + .time_to_live(Duration::from_secs(config.access_key_cache_ttl_secs)) + .build(); + let connections_cache = Cache::new(config.cache_size); + let connection_definitions_cache = + Arc::new(Cache::builder().max_capacity(config.cache_size).build()); + let connection_oauth_definitions_cache = + Arc::new(Cache::builder().max_capacity(config.cache_size).build()); + let openapi_data = OpenAPIData::default(); + openapi_data.spawn_openapi_generation( + app_stores.common_model.clone(), + app_stores.common_enum.clone(), + ); + + // Create Event buffer in separate thread and batch saves + let events = db.collection::(&Store::Events.to_string()); + let (event_tx, mut receiver) = + tokio::sync::mpsc::channel::(config.event_save_buffer_size); + tokio::spawn(async move { + let mut buffer = Vec::with_capacity(config.event_save_buffer_size); + loop { + let res = timeout( + Duration::from_secs(config.event_save_timeout_secs), + receiver.recv(), + ) + .await; + let is_timeout = if let Ok(Some(event)) = res { + buffer.push(event); + false + } else if let Ok(None) = res { + break; + } else { + trace!("Event receiver timed out waiting for new event"); + true + }; + // Save when buffer is full or timeout elapsed + if buffer.len() == config.event_save_buffer_size + || (is_timeout && !buffer.is_empty()) + { + trace!("Saving {} events", buffer.len()); + let to_save = std::mem::replace( + &mut buffer, + Vec::with_capacity(config.event_save_buffer_size), + ); + let events = events.clone(); + tokio::spawn(async move { + if let Err(e) = events.insert_many(to_save, None).await { + error!("Could not save buffer of events: {e}"); + } + }); + } + } + }); + + // Update metrics in separate thread + let client = HttpClient::default(); + let batcher = Batcher::new(None); + let mut batcher = config + .segment_write_key + .as_ref() + .map(|k| AutoBatcher::new(client, batcher, k.to_string())); + + let metrics = db.collection::(&Store::Metrics.to_string()); + let (metric_tx, mut receiver) = + tokio::sync::mpsc::channel::(config.metric_save_channel_size); + let metric_system_id = config.metric_system_id.clone(); + tokio::spawn(async move { + let options = UpdateOptions::builder().upsert(true).build(); + + loop { + let res = timeout( + Duration::from_secs(config.event_save_timeout_secs), + receiver.recv(), + ) + .await; + if let Ok(Some(metric)) = res { + let doc = metric.update_doc(); + let client = metrics.update_one( + bson::doc! { + "clientId": &metric.ownership().client_id, + }, + doc.clone(), + options.clone(), + ); + let system = metrics.update_one( + bson::doc! { + "clientId": metric_system_id.as_str(), + }, + doc, + options.clone(), + ); + if let Err(e) = try_join!(client, system) { + error!("Could not upsert metric: {e}"); + } + + if let Some(ref mut batcher) = batcher { + let msg = metric.segment_track(); + if let Err(e) = batcher.push(msg).await { + warn!("Tracking msg is too large: {e}"); + } + } + } else if let Ok(None) = res { + break; + } else { + trace!("Event receiver timed out waiting for new event"); + if let Some(ref mut batcher) = batcher { + if let Err(e) = batcher.flush().await { + warn!("Tracking flush is too large: {e}"); + } + } + } + } + if let Some(ref mut batcher) = batcher { + if let Err(e) = batcher.flush().await { + warn!("Tracking flush is too large: {e}"); + } + } + }); + + Ok(Self { + state: Arc::new(AppState { + app_stores, + config, + cache, + http_client, + connections_cache, + connection_definitions_cache, + connection_oauth_definitions_cache, + openapi_data, + secrets_client, + extractor_caller, + event_tx, + metric_tx, + }), + }) + } + + pub async fn run(&self) -> Result<()> { + let app = self.get_router().await; + + let app: Router<()> = app.with_state(self.state.clone()); + + info!("Api server listening on {}", self.state.config.address); + + axum::Server::bind(&self.state.config.address) + .serve(app.into_make_service()) + .await + .map_err(|e| anyhow!("Server error: {}", e)) + } + + async fn get_router(&self) -> Router> { + if self.state.config.is_admin { + routes::get_admin_router(&self.state) + } else { + routes::get_public_router(&self.state).await + } + } +} diff --git a/api/src/util/mod.rs b/api/src/util/mod.rs new file mode 100644 index 00000000..7113453d --- /dev/null +++ b/api/src/util/mod.rs @@ -0,0 +1,5 @@ +pub mod openapi_builder; +pub mod shape_mongo_filter; + +pub use openapi_builder::*; +pub use shape_mongo_filter::*; diff --git a/api/src/util/openapi_builder.rs b/api/src/util/openapi_builder.rs new file mode 100644 index 00000000..a693e728 --- /dev/null +++ b/api/src/util/openapi_builder.rs @@ -0,0 +1,675 @@ +use convert_case::{Case, Casing}; +use indexmap::IndexMap; +use integrationos_domain::common::{ + common_model::CommonModel, connection_model_definition::CrudAction, +}; +use openapiv3::*; +use strum::IntoEnumIterator; +use tracing::debug; + +#[derive(Debug, Clone, PartialEq)] +struct PathItemAction { + item: PathItem, + path: String, +} + +// OPENAPI METADATA +const URI: &str = "https://api.integrationos.com/v1/unified"; +const OPENAPI_VERSION: &str = "3.0.3"; +const SPEC_VERSION: &str = "1.0.0"; +const TITLE: &str = "Common Models"; +const X_SPEAKEASY_NAME_OVERRIDE: &str = "x-speakeasy-name-override"; +const X_SPEAKEASY_IGNORE: &str = "x-speakeasy-ignore"; +const X_INTEGRATIONOS_SECRET: &str = "X-INTEGRATIONOS-SECRET"; +const X_INTEGRATIONOS_CONNECTION_KEY: &str = "X-INTEGRATIONOS-CONNECTION-KEY"; +const X_INTEGRATIONOS_ENABLE_PASSTHROUGH: &str = "X-INTEGRATIONOS-ENABLE-PASSTHROUGH"; +const X_INTEGRATIONOS_PASSTHROUGH_FORWARD: &str = "X-INTEGRATIONOS-PASSTHROUGH-FORWARD"; + +pub fn generate_path_item(common_model: &CommonModel) -> IndexMap> { + IndexMap::from_iter( + items(common_model) + .iter() + .map(|item| (item.path.to_string(), ReferenceOr::Item(item.item.clone()))) + .collect::)>>(), + ) +} + +pub fn generate_openapi_schema( + path: Vec>>, + schemas: IndexMap>, +) -> Box { + debug!( + "Generating OpenAPI schema for common models: {}", + path.len() + ); + + let paths = path + .iter() + .fold(Paths::default(), |mut paths_acc, common_model| { + paths_acc.paths.extend(common_model.clone()); + + paths_acc.paths.sort_keys(); + + paths_acc + }); + + debug!("All common models processed"); + + Box::new(OpenAPI { + openapi: OPENAPI_VERSION.to_string(), + info: Info { + title: TITLE.to_string(), + version: SPEC_VERSION.to_string(), + ..Default::default() + }, + servers: vec![Server { + url: URI.to_string(), + ..Default::default() + }], + paths, + components: Some(Components { + schemas, + ..Default::default() + }), + security: Some(vec![IndexMap::from_iter(vec![( + "secret".to_string(), + vec![], + )])]), + ..Default::default() + }) +} + +fn items(common_model: &CommonModel) -> [PathItemAction; 3] { + [ + PathItemAction { + path: format!("/{}/{{id}}", common_model.name.to_case(Case::Kebab)), + item: PathItem { + get: Some(operation(&CrudAction::GetOne, common_model)), + delete: Some(operation(&CrudAction::Delete, common_model)), + patch: Some(operation(&CrudAction::Update, common_model)), + parameters: header(), + ..Default::default() + }, + }, + PathItemAction { + path: format!("/{}", common_model.name.to_case(Case::Kebab)), + item: PathItem { + description: Some(CrudAction::GetMany.description().into()), + get: Some(operation(&CrudAction::GetMany, common_model)), + post: Some(operation(&CrudAction::Create, common_model)), + parameters: header(), + ..Default::default() + }, + }, + PathItemAction { + path: format!("/{}/count", common_model.name.to_case(Case::Kebab)), + item: PathItem { + description: Some(CrudAction::GetCount.description().into()), + get: Some(operation(&CrudAction::GetCount, common_model)), + parameters: header(), + ..Default::default() + }, + }, + ] +} + +fn operation(action: &CrudAction, common_model: &CommonModel) -> Operation { + let summary = match action { + CrudAction::GetOne => format!("Get {}", common_model.name.to_case(Case::Pascal)), + CrudAction::GetMany => format!("List {}", common_model.name.to_case(Case::Pascal)), + CrudAction::GetCount => format!("Get {} count", common_model.name.to_case(Case::Pascal)), + CrudAction::Create => format!("Create {}", common_model.name.to_case(Case::Pascal)), + CrudAction::Update => format!("Update {}", common_model.name.to_case(Case::Pascal)), + CrudAction::Delete => format!("Delete {}", common_model.name.to_case(Case::Pascal)), + _ => unimplemented!("Not implemented yet"), + }; + + let description = match action { + CrudAction::GetOne => format!( + "Get a single {} record", + common_model.name.to_case(Case::Pascal) + ), + CrudAction::GetMany => format!( + "Get all {} records", + common_model.name.to_case(Case::Pascal) + ), + CrudAction::GetCount => format!( + "Get the count of {} records", + common_model.name.to_case(Case::Pascal) + ), + CrudAction::Create => format!( + "Create a single {} record", + common_model.name.to_case(Case::Pascal) + ), + CrudAction::Update => format!( + "Update a single {} record", + common_model.name.to_case(Case::Pascal) + ), + CrudAction::Delete => format!( + "Delete a single {} record", + common_model.name.to_case(Case::Pascal) + ), + _ => unimplemented!("Not implemented yet"), + }; + + let response = IndexMap::from_iter(vec![( + StatusCode::Code(200), + ReferenceOr::Item(Response { + description: "Successful response".to_string(), + content: content(action, common_model), + ..Default::default() + }), + )]); + + let extensions = IndexMap::from_iter(vec![( + X_SPEAKEASY_NAME_OVERRIDE.to_string(), + serde_json::Value::String( + { + match action { + CrudAction::GetOne => "get", + CrudAction::GetMany => "list", + CrudAction::GetCount => "count", + CrudAction::Create => "create", + CrudAction::Update => "update", + CrudAction::Delete => "delete", + _ => unimplemented!("Not implemented yet"), + } + } + .to_string(), + ), + )]); + + Operation { + tags: vec![common_model.name.to_owned()], + summary: Some(summary), + description: Some(description), + parameters: parameter(action), + request_body: body(action, common_model), + responses: Responses { + responses: response, + ..Default::default() + }, + extensions, + ..Default::default() + } +} + +fn parameter(action: &CrudAction) -> Vec> { + let passthrough_query_param = ReferenceOr::Item(Parameter::Query { + parameter_data: ParameterData { + name: "passthroughForward".to_string(), + description: Some("A string of all query parameters to forward in the request to the 3rd-party platform".to_string()), + required: false, + deprecated: Some(false), + format: ParameterSchemaOrContent::Schema(ReferenceOr::Item(Schema { + schema_data: Default::default(), + schema_kind: SchemaKind::Type(Type::String(StringType { + format: VariantOrUnknownOrEmpty::Unknown("string".to_string()), + ..Default::default() + })), + })), + example: Some(serde_json::Value::String("customParam=sample&customParam2=123".to_string())), + examples: Default::default(), + explode: Default::default(), + extensions: Default::default(), + }, + style: QueryStyle::Form, + allow_reserved: false, + allow_empty_value: None, + }); + + let path = vec![ + ReferenceOr::Item(Parameter::Path { + parameter_data: ParameterData { + name: "id".to_string(), + description: Some("The id of the model".to_string()), + required: true, + deprecated: Some(false), + format: ParameterSchemaOrContent::Schema(ReferenceOr::Item(Schema { + schema_data: Default::default(), + schema_kind: SchemaKind::Type(Type::String(StringType { + format: VariantOrUnknownOrEmpty::Unknown("string".to_string()), + ..Default::default() + })), + })), + example: serde_json::to_value("cm::F5YOwU3hwyA::vTW3YaBvT3CHilxcppJOrQ").ok(), + examples: Default::default(), + explode: Default::default(), + extensions: Default::default(), + }, + style: PathStyle::Simple, + }), + passthrough_query_param.clone(), + ]; + match action { + CrudAction::Create => vec![passthrough_query_param], + CrudAction::GetCount => vec![passthrough_query_param], + CrudAction::GetOne => path, + CrudAction::Delete => path + .into_iter() + .chain(vec![ReferenceOr::Item(Parameter::Query { + parameter_data: ParameterData { + name: MODIFY_TOKEN.to_string(), + description: Some("The modified token of the model".to_string()), + required: false, + deprecated: Some(false), + format: ParameterSchemaOrContent::Schema(ReferenceOr::Item(Schema { + schema_data: Default::default(), + schema_kind: SchemaKind::Type(Type::String(StringType { + format: VariantOrUnknownOrEmpty::Unknown("string".to_string()), + ..Default::default() + })), + })), + example: Default::default(), + examples: Default::default(), + explode: Default::default(), + extensions: Default::default(), + }, + style: QueryStyle::Form, + allow_reserved: false, + allow_empty_value: None, + })]) + .collect(), + CrudAction::Update => path + .into_iter() + .chain(vec![ReferenceOr::Item(Parameter::Query { + parameter_data: ParameterData { + name: MODIFY_TOKEN.to_string(), + description: Some("The modified token of the model".to_string()), + required: false, + deprecated: Some(false), + format: ParameterSchemaOrContent::Schema(ReferenceOr::Item(Schema { + schema_data: Default::default(), + schema_kind: SchemaKind::Type(Type::String(StringType { + format: VariantOrUnknownOrEmpty::Unknown("string".to_string()), + ..Default::default() + })), + })), + example: Default::default(), + examples: Default::default(), + explode: Default::default(), + extensions: Default::default(), + }, + style: QueryStyle::Form, + allow_reserved: false, + allow_empty_value: None, + })]) + .collect(), + CrudAction::GetMany => [ + LIMIT, + CURSOR, + CREATED_AFTER, + CREATED_BEFORE, + UPDATED_AFTER, + UPDATED_BEFORE, + ] + .iter() + .map(|name| { + ReferenceOr::Item(Parameter::Query { + parameter_data: ParameterData { + name: name.to_string(), + description: None, + required: false, + deprecated: Some(false), + format: ParameterSchemaOrContent::Schema(ReferenceOr::Item(Schema { + schema_data: Default::default(), + schema_kind: SchemaKind::Type(Type::String(StringType { + format: VariantOrUnknownOrEmpty::Unknown("string".to_string()), + ..Default::default() + })), + })), + example: Default::default(), + examples: Default::default(), + explode: Default::default(), + extensions: Default::default(), + }, + style: QueryStyle::Form, + allow_reserved: false, + allow_empty_value: None, + }) + }) + .chain(vec![passthrough_query_param]) + .collect(), + _ => vec![], + } +} + +fn header() -> Vec> { + vec![ + ReferenceOr::Item(Parameter::Header { + parameter_data: ParameterData { + name: X_INTEGRATIONOS_SECRET.to_string(), + description: Some("IntegrationOS API key".to_string()), + required: true, + deprecated: Some(false), + format: ParameterSchemaOrContent::Schema(ReferenceOr::Item(Schema { + schema_data: Default::default(), + schema_kind: SchemaKind::Type(Type::String(StringType { + format: VariantOrUnknownOrEmpty::Unknown("string".to_string()), + pattern: None, + enumeration: vec![], + min_length: None, + max_length: None, + })), + })), + example: None, + examples: Default::default(), + explode: Default::default(), + extensions: IndexMap::from_iter(vec![( + X_SPEAKEASY_IGNORE.to_string(), + serde_json::Value::Bool(true), + )]), + }, + style: HeaderStyle::Simple, + }), + ReferenceOr::Item(Parameter::Header { + parameter_data: ParameterData { + name: X_INTEGRATIONOS_CONNECTION_KEY.to_string(), + description: Some("The unique identifier of a Connected Account".to_string()), + required: true, + deprecated: Some(false), + format: ParameterSchemaOrContent::Schema(ReferenceOr::Item(Schema { + schema_data: Default::default(), + schema_kind: SchemaKind::Type(Type::String(StringType { + format: VariantOrUnknownOrEmpty::Unknown("string".to_string()), + ..Default::default() + })), + })), + example: Some(serde_json::Value::String("customHeader=sample;customHeader2=123".to_string())), + examples: Default::default(), + explode: Default::default(), + extensions: Default::default(), + }, + style: HeaderStyle::Simple, + }), + ReferenceOr::Item(Parameter::Header { + parameter_data: ParameterData { + name: X_INTEGRATIONOS_ENABLE_PASSTHROUGH.to_string(), + description: Some("Set to true to receive the exact API response from the connection platform in the passthrough object".to_string()), + required: false, + deprecated: Some(false), + format: ParameterSchemaOrContent::Schema(ReferenceOr::Item(Schema { + schema_data: Default::default(), + schema_kind: SchemaKind::Type(Type::String(StringType { + format: VariantOrUnknownOrEmpty::Unknown("string".to_string()), + ..Default::default() + })), + })), + example: Some(serde_json::Value::String("true".to_string())), + examples: Default::default(), + explode: Default::default(), + extensions: Default::default(), + }, + style: HeaderStyle::Simple, + }), + ReferenceOr::Item(Parameter::Header { + parameter_data: ParameterData { + name: X_INTEGRATIONOS_PASSTHROUGH_FORWARD.to_string(), + description: Some("A string of all headers to forward in the request to the 3rd-party platform".to_string()), + required: false, + deprecated: Some(false), + format: ParameterSchemaOrContent::Schema(ReferenceOr::Item(Schema { + schema_data: Default::default(), + schema_kind: SchemaKind::Type(Type::String(StringType { + format: VariantOrUnknownOrEmpty::Unknown("string".to_string()), + ..Default::default() + })), + })), + example: Some(serde_json::Value::String("customHeader=sample;customHeader2=123".to_string())), + examples: Default::default(), + explode: Default::default(), + extensions: Default::default(), + }, + style: HeaderStyle::Simple, + }), + ] +} + +fn content(action: &CrudAction, common_model: &CommonModel) -> IndexMap { + let mut content = IndexMap::new(); + content.insert( + "application/json".to_string(), + MediaType { + schema: Some(ReferenceOr::Item(Schema { + schema_data: Default::default(), + schema_kind: SchemaKind::Type(Type::Object(ObjectType { + properties: properties(action, common_model), + ..Default::default() + })), + })), + example: Some(action.example(common_model)), + ..Default::default() + }, + ); + content +} + +fn body(action: &CrudAction, common_model: &CommonModel) -> Option> { + match action { + CrudAction::Create | CrudAction::Update => Some(ReferenceOr::Item( + common_model.request_body(CrudAction::Create == *action), + )), + _ => None, + } +} + +fn string_schema(format: &str) -> ReferenceOr> { + ReferenceOr::Item(Box::new(Schema { + schema_data: Default::default(), + schema_kind: SchemaKind::Type(Type::String(StringType { + format: VariantOrUnknownOrEmpty::Unknown(format.to_string()), + ..Default::default() + })), + })) +} + +fn integer_schema(format: IntegerFormat) -> ReferenceOr> { + ReferenceOr::Item(Box::new(Schema { + schema_data: Default::default(), + schema_kind: SchemaKind::Type(Type::Integer(IntegerType { + format: VariantOrUnknownOrEmpty::Item(format), + ..Default::default() + })), + })) +} + +fn boolean_schema() -> ReferenceOr> { + ReferenceOr::Item(Box::new(Schema { + schema_data: Default::default(), + schema_kind: SchemaKind::Type(Type::Boolean(BooleanType { + enumeration: vec![], + })), + })) +} + +fn array_schema(items: ReferenceOr>) -> ReferenceOr> { + ReferenceOr::Item(Box::new(Schema { + schema_data: Default::default(), + schema_kind: SchemaKind::Type(Type::Array(ArrayType { + items: Some(items), + max_items: None, + min_items: None, + unique_items: false, + })), + })) +} + +fn object_schema( + properties: IndexMap>>, + additional_properties: Option, +) -> ReferenceOr> { + ReferenceOr::Item(Box::new(Schema { + schema_data: Default::default(), + schema_kind: SchemaKind::Type(Type::Object(ObjectType { + properties, + additional_properties, + ..Default::default() + })), + })) +} + +fn reference_schema(reference: &str) -> ReferenceOr> { + ReferenceOr::Reference { + reference: "#/components/schemas/".to_owned() + reference, + } +} + +// OPENAPI PROPERTY KEYS +const META: &str = "meta"; +const STATUS: &str = "status"; +const STATUS_CODE: &str = "statusCode"; +const UNIFIED: &str = "unified"; +const PASSTHROUGH: &str = "passthrough"; +const CONNECTION_DEFINITION_KEY: &str = "connectionDefinitionKey"; +const CONNECTION_KEY: &str = "connectionKey"; +const TRANSACTION_KEY: &str = "transactionKey"; +const TXN: &str = "txn"; +const PLATFORM: &str = "platform"; +const PLATFORM_VERSION: &str = "platformVersion"; +const ACTION: &str = "action"; +const COMMON_MODEL: &str = "commonModel"; +const COMMON_MODEL_VERSION: &str = "commonModelVersion"; +const HASH: &str = "hash"; +const MODIFY_TOKEN: &str = "modifyToken"; +const HEARTBEATS: &str = "heartbeats"; +const TOTAL_TRANSACTIONS: &str = "totalTransactions"; +const CACHE: &str = "cache"; +const HIT: &str = "hit"; +const TTL: &str = "ttl"; +const KEY: &str = "key"; +const RATE_LIMIT_REMAINING: &str = "rateLimitRemaining"; +const PLATFORM_RATE_LIMIT_REMAINING: &str = "platformRateLimitRemaining"; +const LATENCY: &str = "latency"; +const TIMESTAMP: &str = "timestamp"; +const COUNT: &str = "count"; +const PAGINATION: &str = "pagination"; +const CURSOR: &str = "cursor"; +const NEXT_CURSOR: &str = "nextCursor"; +const PREV_CURSOR: &str = "previousCursor"; +const LIMIT: &str = "limit"; +const CREATED_AFTER: &str = "createdAfter"; +const CREATED_BEFORE: &str = "createdBefore"; +const UPDATED_AFTER: &str = "updatedAfter"; +const UPDATED_BEFORE: &str = "updatedBefore"; + +fn properties( + action: &CrudAction, + common_model: &CommonModel, +) -> IndexMap>> { + let mut properties = IndexMap::new(); + + properties.insert(STATUS.to_owned(), string_schema("success | failure")); + properties.insert(STATUS_CODE.to_owned(), integer_schema(IntegerFormat::Int32)); + + match action { + CrudAction::GetOne | CrudAction::Create => { + properties.insert( + UNIFIED.to_owned(), + reference_schema(common_model.name.as_str()), + ); + } + CrudAction::GetMany => { + properties.insert( + UNIFIED.to_owned(), + array_schema(reference_schema(common_model.name.as_str())), + ); + properties.insert( + PAGINATION.to_owned(), + object_schema( + IndexMap::from_iter(vec![ + (NEXT_CURSOR.to_owned(), string_schema("string")), + (PREV_CURSOR.to_owned(), string_schema("string")), + (LIMIT.to_owned(), integer_schema(IntegerFormat::Int32)), + ]), + None, + ), + ); + } + CrudAction::GetCount => { + properties.insert( + UNIFIED.to_owned(), + object_schema( + IndexMap::from_iter(vec![( + COUNT.to_owned(), + integer_schema(IntegerFormat::Int32), + )]), + None, + ), + ); + } + CrudAction::Update => { + properties.insert(UNIFIED.to_owned(), object_schema(IndexMap::new(), None)); + } + CrudAction::Delete => { + properties.insert( + UNIFIED.to_owned(), + reference_schema(common_model.name.as_str()), + ); + } + CrudAction::Custom => unimplemented!("Not implemented yet"), + } + + properties.insert( + PASSTHROUGH.to_owned(), + object_schema(IndexMap::new(), Some(AdditionalProperties::Any(true))), + ); + + properties.insert( + META.to_owned(), + object_schema( + IndexMap::from_iter(vec![ + (TIMESTAMP.to_owned(), integer_schema(IntegerFormat::Int64)), + (LATENCY.to_owned(), integer_schema(IntegerFormat::Int32)), + ( + PLATFORM_RATE_LIMIT_REMAINING.to_owned(), + integer_schema(IntegerFormat::Int32), + ), + ( + RATE_LIMIT_REMAINING.to_owned(), + integer_schema(IntegerFormat::Int32), + ), + ( + TOTAL_TRANSACTIONS.to_owned(), + integer_schema(IntegerFormat::Int32), + ), + (HASH.to_owned(), string_schema("string")), + (TRANSACTION_KEY.to_owned(), string_schema("string")), + (TXN.to_owned(), string_schema("string")), + (COMMON_MODEL.to_owned(), string_schema("string")), + (CONNECTION_KEY.to_owned(), string_schema("string")), + (PLATFORM.to_owned(), string_schema("string")), + (PLATFORM_VERSION.to_owned(), string_schema("string")), + ( + CONNECTION_DEFINITION_KEY.to_owned(), + string_schema("string"), + ), + ( + ACTION.to_owned(), + string_schema( + &CrudAction::iter() + .filter(|action| action != &CrudAction::Custom) + .map(|action| action.to_string()) + .collect::>() + .join(" | "), + ), + ), + (COMMON_MODEL_VERSION.to_owned(), string_schema("string")), + (KEY.to_owned(), string_schema("string")), + (HEARTBEATS.to_owned(), array_schema(string_schema("string"))), + ( + CACHE.to_owned(), + object_schema( + IndexMap::from_iter(vec![ + (HIT.to_owned(), boolean_schema()), + (TTL.to_owned(), integer_schema(IntegerFormat::Int32)), + ]), + None, + ), + ), + ]), + None, + ), + ); + + properties +} diff --git a/api/src/util/shape_mongo_filter.rs b/api/src/util/shape_mongo_filter.rs new file mode 100644 index 00000000..f39cde2f --- /dev/null +++ b/api/src/util/shape_mongo_filter.rs @@ -0,0 +1,173 @@ +use axum::extract::Query; +use http::HeaderMap; +use integrationos_domain::common::event_access::EventAccess; +use mongodb::bson::{doc, Document}; +use std::{collections::BTreeMap, sync::Arc}; + +pub const DELETED_STR: &str = "deleted"; +const OWNERSHIP_STR: &str = "ownership.buildableId"; +const ENVIRONMENT_STR: &str = "environment"; +const DUAL_ENVIRONMENT_HEADER: &str = "x-integrationos-show-all-environments"; +const LIMIT_STR: &str = "limit"; +const SKIP_STR: &str = "skip"; + +#[derive(Debug, Clone)] +pub struct MongoQuery { + pub filter: Document, + pub skip: u64, + pub limit: u64, +} + +pub fn shape_mongo_filter( + query: Option>>, + event_access: Option>, + headers: Option, +) -> MongoQuery { + let mut filter = doc! {}; + let mut skip = 0; + let mut limit = 20; + + if let Some(q) = query { + for (key, value) in q.0.iter() { + if key == LIMIT_STR { + limit = value.parse().unwrap_or(20); + } else if key == SKIP_STR { + skip = value.parse().unwrap_or(0); + } else { + match value.as_str() { + "true" => filter.insert(key, true), + "false" => filter.insert(key, false), + _ => filter.insert(key, value.clone()), + }; + } + } + } + + filter.insert(DELETED_STR, false); + + if let Some(event_access) = event_access { + filter.insert(OWNERSHIP_STR, event_access.ownership.id.to_string()); + filter.insert(ENVIRONMENT_STR, event_access.environment.to_string()); + + if let Some(headers) = headers { + if let Some(show_dual_envs) = headers.get(DUAL_ENVIRONMENT_HEADER) { + if show_dual_envs == "true" { + filter.remove(ENVIRONMENT_STR); + } + } + } + } + + MongoQuery { + filter, + limit, + skip, + } +} + +#[cfg(test)] +mod test { + use std::{collections::BTreeMap, sync::Arc}; + + use axum::extract::Query; + use http::HeaderMap; + use integrationos_domain::{ + common::{ + connection_definition::{ConnectionDefinitionType, Paths}, + environment::Environment, + event_access::EventAccess, + ownership::Ownership, + record_metadata::RecordMetadata, + }, + id::{prefix::IdPrefix, Id}, + }; + + use crate::util::shape_mongo_filter::{ + MongoQuery, DELETED_STR, DUAL_ENVIRONMENT_HEADER, ENVIRONMENT_STR, LIMIT_STR, + OWNERSHIP_STR, SKIP_STR, + }; + + use super::shape_mongo_filter; + + #[test] + fn test_shape_mongo_filter() { + let params = BTreeMap::from([ + (DELETED_STR.to_string(), "true".to_string()), + (OWNERSHIP_STR.to_string(), "foo".to_string()), + (ENVIRONMENT_STR.to_string(), "bar".to_string()), + (SKIP_STR.to_string(), "10".to_string()), + (LIMIT_STR.to_string(), "10".to_string()), + ]); + + let MongoQuery { + filter: mut doc, + skip, + limit, + } = shape_mongo_filter(Some(Query(params.clone())), None, None); + assert_eq!(doc.get_str(OWNERSHIP_STR).unwrap(), "foo"); + assert_eq!(doc.get_str(ENVIRONMENT_STR).unwrap(), "bar"); + assert!(!doc.get_bool(DELETED_STR).unwrap()); + assert_eq!(limit, 10); + assert_eq!(skip, 10); + + doc.insert(DELETED_STR, true); + assert!(doc.get_bool(DELETED_STR).unwrap()); + + let event_access = Arc::new(EventAccess { + id: Id::now(IdPrefix::EventAccess), + name: "name".to_string(), + key: "key".to_string(), + namespace: "default".to_string(), + platform: "stripe".to_string(), + r#type: ConnectionDefinitionType::Api, + group: "group".to_string(), + ownership: Ownership::new("baz".to_string()), + paths: Paths::default(), + access_key: "access_key".to_string(), + environment: Environment::Test, + record_metadata: RecordMetadata::default(), + throughput: 1000, + }); + + let MongoQuery { filter: doc, .. } = + shape_mongo_filter(Some(Query(params)), Some(event_access), None); + assert_eq!(doc.get_str(OWNERSHIP_STR).unwrap(), "baz"); + assert_eq!(doc.get_str(ENVIRONMENT_STR).unwrap(), "test"); + } + + #[test] + fn requesting_dual_environments() { + let params = BTreeMap::from([ + (DELETED_STR.to_string(), "true".to_string()), + ("ownership.buildableId".to_string(), "foo".to_string()), + ("environment".to_string(), "bar".to_string()), + ]); + + let mut headers = HeaderMap::new(); + headers.insert(DUAL_ENVIRONMENT_HEADER, "true".parse().unwrap()); + + let event_access = Arc::new(EventAccess { + id: Id::now(IdPrefix::EventAccess), + name: "name".to_string(), + key: "key".to_string(), + namespace: "default".to_string(), + platform: "stripe".to_string(), + r#type: ConnectionDefinitionType::Api, + group: "group".to_string(), + ownership: Ownership::new("baz".to_string()), + paths: Paths::default(), + access_key: "access_key".to_string(), + environment: Environment::Test, + record_metadata: RecordMetadata::default(), + throughput: 1000, + }); + + let MongoQuery { filter: doc, .. } = shape_mongo_filter( + Some(Query(params.clone())), + Some(event_access), + Some(headers), + ); + + assert!(!doc.contains_key(ENVIRONMENT_STR)); + } +} diff --git a/api/tests/api_tests/auth_tests.rs b/api/tests/api_tests/auth_tests.rs new file mode 100644 index 00000000..b8182d36 --- /dev/null +++ b/api/tests/api_tests/auth_tests.rs @@ -0,0 +1,143 @@ +use api::endpoints::event_access::CreateEventAccessPayloadWithOwnership; +use fake::{Fake, Faker}; +use http::{ + header::{AUTHORIZATION, CONTENT_TYPE}, + Method, StatusCode, +}; +use serde_json::{json, Value}; + +use super::test_server::{ApiResponse, TestServer, AUTH_PATHS, PUBLIC_PATHS}; + +#[tokio::test] +async fn test_root() { + let server = TestServer::new(false, None).await; + + let res = server + .send_request::("", Method::GET, None, None) + .await + .unwrap(); + assert_eq!(res.code, StatusCode::OK); +} + +#[tokio::test] +async fn test_authorized() { + let server = TestServer::new(false, None).await; + + for path in AUTH_PATHS { + let res = server + .send_request::(&format!("v1/{path}"), Method::GET, None, None) + .await + .unwrap(); + assert_eq!( + res, + ApiResponse { + code: StatusCode::UNAUTHORIZED, + data: json!({"error": "Unauthorized"}) + } + ); + + let res = server + .send_request::( + &format!("v1/{path}"), + Method::GET, + Some("invalid_access_key"), + None, + ) + .await + .unwrap(); + assert_eq!( + res, + ApiResponse { + code: StatusCode::UNAUTHORIZED, + data: json!({"error": "Unauthorized"}) + } + ); + + let res = server + .send_request::( + &format!("v1/{path}"), + Method::GET, + Some(&server.live_key), + None, + ) + .await + .unwrap(); + + assert_eq!(res.code, StatusCode::OK); + + let res = server + .send_request::( + &format!("v1/{path}"), + Method::GET, + Some(&server.test_key), + None, + ) + .await + .unwrap(); + + assert_eq!(res.code, StatusCode::OK); + } +} + +#[tokio::test] +async fn test_unauthorized() { + let server = TestServer::new(false, None).await; + + for path in PUBLIC_PATHS { + let res = server + .send_request::(&format!("v1/public/{path}"), Method::GET, None, None) + .await + .unwrap(); + assert_eq!(res.code, StatusCode::OK); + } +} + +#[tokio::test] +async fn test_404() { + let server = TestServer::new(false, None).await; + + for method in [Method::GET, Method::POST, Method::DELETE] { + let res = server + .send_request::("v1/invalid_path", method, Some(&server.live_key), None) + .await + .unwrap(); + + assert_eq!( + res, + ApiResponse { + code: StatusCode::NOT_FOUND, + data: json!({"error": "Not found"}) + } + ); + } +} + +#[tokio::test] +async fn test_jwt() { + let server = TestServer::new(false, None).await; + + let jwt_token = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJfaWQiOiI2NTc5ZDUxMGE2ZTQyMTAyMzM0NjI0ZjAiLCJlbWFpbCI6ImVtYWlsQHRlc3QuY29tIiwidXNlcm5hbWUiOiJ1c2VybmFtZSIsInVzZXJLZXkiOiJ1c2VyS2V5IiwiZmlyc3ROYW1lIjoiUGF1bCIsImxhc3ROYW1lIjoiSy4iLCJidWlsZGFibGVJZCI6ImJ1aWxkLTI2MTU4YWFlNzNjMDQ4YTU4YzdhNzU2NjcyNmU4OGY0IiwiY29udGFpbmVySWQiOiJjb250YWluZXItZDRmMGY4YjktMWE1Ni00ODQxLTg1OTctZmQzZDkwZGQ0OWI5IiwicG9pbnRlcnMiOlsiXzFfazFjbkI0Y1hGMzYtYUJJc2gtY1ZWTlZNZllGeE41MWlFTlQ1azlqcXFEbURWZlpJTjVVREhlN0JKRnJaUVJqTm54aEdaOUJNUGdlNjB6RVdVUnROaTUxdTIwdDJiYVJoQ3ZkYms5TkNIblNSV010WldhMmFlVW0wWUpreU1PNGNEUjdVUW5oVmNac3RqUEdfN0lfcV9ya015cjlwaFZoZ1VBa3BCaDVDTlQ2VDIwTDJGTFpoMFVtdldLVzloV3IzN0JWV19tb0hZODFZeUEiLCJfMV81WVU2Uk1kMHRwUVh3YnNvUWtHaUkzT1hPRlhrbms3TUVhaVdTQ1hoUWZKYzZ5N3RqZGROZGRjbXdWRjJmcTRSTktla0ZXRk80M0FMQWNJTVdIYkdYbW9IVVRaelV1eXhMalJ5MDI5Z0tGMlViRTFmTzZVRzR5QWhzbFBJMlpOZXNnT2NiakY1eUdwajdJbkdHNUFVck13NGY0bVdfR29FZVp1ZXBBd2E0WHhzNHB2TXd5d241djc1VzV0dFNJRGtLTHFqUlNUQlczaHpLUSJdLCJpc0J1aWxkYWJsZUNvcmUiOnRydWUsImlhdCI6MTcwMzEwODkwNCwiZXhwIjozMTU3NDYzMTA4OTA0LCJhdWQiOiJidWlsZGFibGUtdXNlcnMiLCJpc3MiOiJidWlsZGFibGUifQ.ecKXIGxXLWd6OearftRZVpGRhyDUVZFrYlwzhr-iG0A"; + + let event_access: CreateEventAccessPayloadWithOwnership = Faker.fake(); + let event_access = serde_json::to_value(&event_access).unwrap(); + + let res = server + .send_request_with_headers::( + "v1/public/event-access/default", + Method::POST, + None, + Some(&event_access), + Some( + vec![ + (AUTHORIZATION.to_string(), format!("Bearer {jwt_token}")), + (CONTENT_TYPE.to_string(), "application/json".to_string()), + ] + .into_iter() + .collect(), + ), + ) + .await + .unwrap(); + + assert_eq!(res.code, StatusCode::OK); +} diff --git a/api/tests/api_tests/get_tests.rs b/api/tests/api_tests/get_tests.rs new file mode 100644 index 00000000..3444b5e2 --- /dev/null +++ b/api/tests/api_tests/get_tests.rs @@ -0,0 +1,177 @@ +use std::{collections::HashMap, ops::Deref}; + +use api::endpoints::{common_model, ReadResponse}; +use fake::{Fake, Faker}; +use http::{Method, StatusCode}; +use integrationos_domain::common::{ + common_model::{CommonModel, DataType, Expandable, Field}, + json_schema::JsonSchema, +}; +use serde_json::{json, Value}; + +use crate::test_server::{test_gateway::TestGateway, TestServer}; + +#[tokio::test] +async fn test_get_events() { + let server = TestServer::new(false, None).await; + + let gateway = TestGateway::new(&server.config).await; + + let payload = json!({"foo":"bar"}); + + let event_response = gateway + .emit_event(&server.live_key, "name", &payload) + .await + .unwrap(); + assert_eq!(event_response.code, StatusCode::OK); + + let res = server + .send_request::("v1/events", Method::GET, Some(&server.live_key), None) + .await + .unwrap(); + + assert_eq!(res.code, StatusCode::OK); + let res: ReadResponse = serde_json::from_value(res.data).unwrap(); + let array = res.rows; + assert_eq!(array.len(), 1); + assert_eq!(array[0]["body"], payload.to_string()); +} + +#[tokio::test] +async fn test_get_expanded_common_model() { + let server = TestServer::new(true, None).await; + + let reference: String = Faker.fake(); + + let base = common_model::CreateRequest { + name: Faker.fake(), + version: Faker.fake(), + fields: vec![ + Field { + name: Faker.fake(), + datatype: DataType::Expandable(Expandable::Unexpanded { + reference: reference.clone(), + }), + required: true, + description: Faker.fake(), + }, + Field { + name: Faker.fake(), + datatype: DataType::Array { + element_type: Box::new(DataType::Expandable(Expandable::Unexpanded { + reference: reference.clone(), + })), + }, + required: true, + description: Faker.fake(), + }, + ], + category: Faker.fake(), + sample: json!({}), + primary: false, + }; + + let expandable = common_model::CreateRequest { + name: reference.clone(), + version: Faker.fake(), + fields: vec![], + category: Faker.fake(), + sample: json!({}), + primary: false, + }; + + let res = server + .send_request::( + "v1/common-models", + Method::POST, + None, + Some(&serde_json::to_value(expandable).unwrap()), + ) + .await + .unwrap(); + assert_eq!(res.code, StatusCode::OK); + + let expandable: CommonModel = serde_json::from_value(res.data).unwrap(); + + let res = server + .send_request::( + "v1/common-models", + Method::POST, + None, + Some(&serde_json::to_value(base).unwrap()), + ) + .await + .unwrap(); + assert_eq!(res.code, StatusCode::OK); + + let base: CommonModel = serde_json::from_value(res.data).unwrap(); + assert!(matches!( + base.fields[0].datatype, + DataType::Expandable(Expandable::Unexpanded { .. }) + )); + let DataType::Array { ref element_type } = base.fields[1].datatype else { + panic!("Incorrect datatype"); + }; + assert!(matches!( + element_type.deref(), + DataType::Expandable(Expandable::Unexpanded { .. }) + )); + + let res = server + .send_request::( + &format!("v1/common-models/{}/expand", base.id), + Method::GET, + None, + None, + ) + .await + .unwrap(); + assert_eq!(res.code, StatusCode::OK); + + let expanded: CommonModel = serde_json::from_value(res.data).unwrap(); + assert_eq!(expanded.name, base.name); + assert_eq!(expanded.category, base.category); + assert_eq!(expanded.fields[0].name, base.fields[0].name); + assert_eq!(expanded.fields[1].name, base.fields[1].name); + + let DataType::Expandable(Expandable::Expanded { + reference: ref new_ref, + ref model, + }) = expanded.fields[0].datatype + else { + panic!("Incorrect datatype"); + }; + + assert_eq!(new_ref, &reference); + let mut new_model = model.clone(); + new_model.interface = HashMap::new(); + assert_eq!(new_model, expandable); + + let DataType::Array { ref element_type } = expanded.fields[1].datatype else { + panic!("Incorrect datatype"); + }; + let DataType::Expandable(Expandable::Expanded { + reference: new_ref, + ref model, + }) = element_type.deref() + else { + panic!("Incorrect datatype"); + }; + assert_eq!(new_ref, &reference); + // it is expected that interface is empty on the right side as it is created on the server + // as a after effect of the create request + let mut new_model = model.clone(); + new_model.interface = HashMap::new(); + assert_eq!(new_model, expandable); + + let res = server + .send_request::( + &format!("v1/common-models/{}/schema", base.id), + Method::GET, + None, + None, + ) + .await + .unwrap(); + assert_eq!(res.code, StatusCode::OK); +} diff --git a/api/tests/api_tests/main.rs b/api/tests/api_tests/main.rs new file mode 100644 index 00000000..31543f68 --- /dev/null +++ b/api/tests/api_tests/main.rs @@ -0,0 +1,9 @@ +mod auth_tests; +mod get_tests; +mod pagination_tests; +mod passthrough_tests; +mod schema_tests; +mod test_crud; +mod test_server; +mod transaction_tests; +mod unified_tests; diff --git a/api/tests/api_tests/pagination_tests.rs b/api/tests/api_tests/pagination_tests.rs new file mode 100644 index 00000000..735977a7 --- /dev/null +++ b/api/tests/api_tests/pagination_tests.rs @@ -0,0 +1,80 @@ +use std::time::Duration; + +use api::endpoints::{pipeline::CreatePipelineRequest, ReadResponse}; +use fake::{Fake, Faker}; +use http::{Method, StatusCode}; +use integrationos_domain::common::Pipeline; +use serde_json::Value; +use tokio::time::sleep; + +use crate::test_server::TestServer; + +#[tokio::test] +async fn test_pagination() { + let server = TestServer::new(false, None).await; + + let mut pipelines = vec![]; + for _ in 0..10 { + let req: CreatePipelineRequest = Faker.fake(); + let res = server + .send_request::( + "v1/pipelines", + Method::POST, + Some(&server.live_key), + Some(&serde_json::to_value(&req).unwrap()), + ) + .await + .unwrap(); + assert_eq!(res.code, StatusCode::OK); + + let pipeline: Pipeline = serde_json::from_value(res.data).unwrap(); + let CreatePipelineRequest { + name, + key, + source, + destination, + middleware, + signature, + ref config, + } = req; + + assert_eq!(name, pipeline.name); + assert_eq!(key, pipeline.key); + assert_eq!(source, pipeline.source); + assert_eq!(destination, pipeline.destination); + assert_eq!(middleware, pipeline.middleware); + assert_eq!(signature, pipeline.signature); + assert_eq!(config, pipeline.config.as_ref().unwrap()); + + pipelines.push(pipeline); + sleep(Duration::from_millis(1)).await; + } + + let pipelines: Vec = pipelines.into_iter().rev().collect(); + + check_response(&server, 1, 0, &pipelines[..1]).await; + check_response(&server, 10, 0, &pipelines).await; + check_response(&server, 0, 10, &pipelines[10..]).await; + check_response(&server, 5, 0, &pipelines[..5]).await; + check_response(&server, 5, 5, &pipelines[5..]).await; + check_response(&server, 5, 10, &pipelines[10..]).await; +} + +async fn check_response(server: &TestServer, limit: u64, skip: u64, pipelines: &[Pipeline]) { + let res = server + .send_request::( + &format!("v1/pipelines?limit={limit}&skip={skip}"), + Method::GET, + Some(&server.live_key), + None, + ) + .await + .unwrap(); + assert_eq!(res.code, StatusCode::OK); + + let res: ReadResponse = serde_json::from_value(res.data).unwrap(); + assert_eq!(&res.rows, pipelines); + assert_eq!(res.limit, limit); + assert_eq!(res.skip, skip); + assert_eq!(res.total, 10); +} diff --git a/api/tests/api_tests/passthrough_tests.rs b/api/tests/api_tests/passthrough_tests.rs new file mode 100644 index 00000000..d127add9 --- /dev/null +++ b/api/tests/api_tests/passthrough_tests.rs @@ -0,0 +1,118 @@ +use api::endpoints::connection_model_definition::CreateRequest as CreateConnectionModelDefinitionRequest; +use fake::{faker::filesystem::raw::DirPath, locales::EN, Fake, Faker}; +use http::{ + header::{AUTHORIZATION, CONTENT_TYPE}, + Method, StatusCode, +}; +use integrationos_domain::common::{ + api_model_config::{AuthMethod, SamplesInput, SchemasInput}, + connection_model_definition::CrudAction, + environment::Environment, +}; +use mockito::Server; +use serde_json::Value; + +use crate::test_server::TestServer; + +#[tokio::test] +async fn test_passthrough_api() { + let mut server = TestServer::new(false, None).await; + let admin_server = + TestServer::new(true, Some(server.config.db_config.control_db_name.clone())).await; + + let (connection, conn_def) = server.create_connection(Environment::Live).await; + + let mut mock_server = Server::new_async().await; + let secret_key = Faker.fake::(); + let url_path: String = DirPath(EN).fake(); + let response_body = format!("{{\"id\": \"{}\"}}", Faker.fake::()); + + let mock = mock_server + .mock("GET", format!("{url_path}/customers").as_str()) + .match_header( + AUTHORIZATION.as_str(), + format!("Bearer {secret_key}").as_str(), + ) + .expect(1) + .with_status(200) + .with_body(response_body.clone()) + .create_async() + .await; + + let create_model_definition_payload = CreateConnectionModelDefinitionRequest { + connection_platform: connection.platform.to_string(), + connection_definition_id: conn_def.id, + platform_version: conn_def.record_metadata.version.to_string(), + title: Faker.fake(), + name: Faker.fake(), + model_name: Faker.fake(), + action_name: Faker.fake::(), + base_url: mock_server.url() + &url_path, + path: "customers".to_string(), + auth_method: AuthMethod::BearerToken { + value: secret_key.to_string(), + }, + http_method: http::Method::GET, + headers: None, + query_params: None, + extractor_config: None, + version: "1.0.0".parse().unwrap(), + schemas: SchemasInput { + headers: None, + query_params: None, + path_params: None, + body: None, + }, + samples: SamplesInput { + headers: None, + query_params: None, + path_params: None, + body: None, + }, + paths: None, + responses: vec![], + is_default_crud_mapping: None, + mapping: None, + }; + + let create_model_definition_response = admin_server + .send_request::( + "v1/connection-model-definitions", + Method::POST, + None, + Some(&serde_json::to_value(&create_model_definition_payload).unwrap()), + ) + .await + .unwrap(); + + assert_eq!(create_model_definition_response.code, StatusCode::OK); + + let call_universal_api = server + .send_request_with_headers::( + "v1/passthrough/customers", + Method::GET, + Some(&server.live_key), + None, + Some( + vec![ + (CONTENT_TYPE.to_string(), "application/json".to_string()), + ( + "x-integrationos-connection-key".to_string(), + connection.key.to_string(), + ), + ] + .into_iter() + .collect(), + ), + ) + .await + .unwrap(); + + // assert_eq!(call_universal_api.code, StatusCode::OK); + assert_eq!( + call_universal_api.data, + serde_json::from_str::(&response_body).unwrap() + ); + + mock.assert_async().await; +} diff --git a/api/tests/api_tests/schema_tests.rs b/api/tests/api_tests/schema_tests.rs new file mode 100644 index 00000000..7a27224a --- /dev/null +++ b/api/tests/api_tests/schema_tests.rs @@ -0,0 +1,103 @@ +use api::endpoints::{connection_model_schema::CreateRequest, ReadResponse}; +use fake::{Fake, Faker}; +use http::{Method, StatusCode}; +use integrationos_domain::{ + common::{ + connection_model_schema::{ConnectionModelSchema, Mappings}, + environment::Environment, + json_schema::JsonSchema, + }, + id::{prefix::IdPrefix, Id}, +}; +use serde_json::Value; + +use crate::test_server::TestServer; + +#[tokio::test] +async fn test_connection_model_schema_api() { + let mut server = TestServer::new(false, None).await; + let admin_server = + TestServer::new(true, Some(server.config.db_config.control_db_name.clone())).await; + + let (_connection, conn_def) = server.create_connection(Environment::Live).await; + + let common_model_name = Faker.fake(); + + let mut create_connection_model_schema: CreateRequest = Faker.fake(); + create_connection_model_schema.connection_definition_id = conn_def.id; + create_connection_model_schema.mapping = Some(Mappings { + from_common_model: String::new(), + to_common_model: String::new(), + common_model_name, + common_model_id: Id::now(IdPrefix::ConnectionModelSchema), + unmapped_fields: JsonSchema::default(), + }); + + let create_response = admin_server + .send_request::( + "v1/connection-model-schemas", + Method::POST, + None, + Some(&create_connection_model_schema), + ) + .await + .unwrap(); + + assert_eq!(create_response.code, StatusCode::OK); + + let public_connection_model_schema = server + .send_request::>( + format!( + "v1/connection-model-schemas?connectionDefinitionId={}", + conn_def.id + ) + .as_str(), + Method::GET, + Some(&server.live_key), + None, + ) + .await + .unwrap(); + + assert_eq!(public_connection_model_schema.code, StatusCode::OK); + + let first_row = public_connection_model_schema + .data + .rows + .first() + .expect("No rows in response"); + + let mapping = first_row.get("mapping").expect("No mapping in row"); + + assert!( + mapping.get("fromCommonModel").is_none(), + "fromCommonModel should not be present" + ); + assert!( + mapping.get("toCommonModel").is_none(), + "toCommonModel should not be present" + ); + assert!( + mapping.get("commonModelId").is_none(), + "commonModelId should not be present" + ); + assert!( + mapping.get("unmappedFields").is_none(), + "unmappedFields should not be present" + ); +} + +#[tokio::test] +async fn test_connection_oauth_definition_schema_api() { + let server = TestServer::new(false, None).await; + let res = server + .send_request::( + "v1/public/connection-oauth-definition-schema", + Method::GET, + None, + None, + ) + .await + .unwrap(); + assert_eq!(res.code, StatusCode::OK); +} diff --git a/api/tests/api_tests/test_crud.rs b/api/tests/api_tests/test_crud.rs new file mode 100644 index 00000000..3058a251 --- /dev/null +++ b/api/tests/api_tests/test_crud.rs @@ -0,0 +1,179 @@ +use std::collections::HashMap; + +use crate::test_server::TestServer; +use api::endpoints::{ + common_model, connection_definition, connection_model_definition, connection_model_schema, + ReadResponse, +}; +use fake::{Fake, Faker}; +use http::{Method, StatusCode}; +use integrationos_domain::common::{ + common_model::CommonModel, connection_definition::ConnectionDefinition, + connection_model_definition::ConnectionModelDefinition, + connection_model_schema::ConnectionModelSchema, +}; +use serde_json::Value; + +macro_rules! test_crud { + ($test:ident, $model:ty, $path:ident, $endpoint:expr) => { + #[tokio::test] + async fn $test() { + let server = TestServer::new(true, None).await; + + let payload: $path::CreateRequest = Faker.fake(); + let payload = serde_json::to_value(&payload).unwrap(); + + const ENDPOINT: &str = $endpoint; + + let res = server + .send_request::(ENDPOINT, Method::POST, None, Some(&payload)) + .await + .unwrap(); + + assert_eq!(res.code, StatusCode::OK); + + let model: $model = serde_json::from_value(res.data).unwrap(); + + let res = server + .send_request::(ENDPOINT, Method::GET, None, None) + .await + .unwrap(); + + assert_eq!(res.code, StatusCode::OK); + + let get_models: ReadResponse<$model> = serde_json::from_value(res.data).unwrap(); + assert_eq!(get_models.rows.len(), 1); + assert_eq!(get_models.rows[0], model); + + let payload: $path::CreateRequest = Faker.fake(); + let payload = serde_json::to_value(&payload).unwrap(); + + let path = format!("{ENDPOINT}/{}", model.id); + + let res = server + .send_request::(&path, Method::PATCH, None, Some(&payload)) + .await; + + let res = res.unwrap(); + + assert_eq!(res.code, StatusCode::OK); + + let res = server + .send_request::(&path, Method::DELETE, None, None) + .await + .unwrap(); + + assert_eq!(res.code, StatusCode::OK); + + let deleted: $model = serde_json::from_value(res.data).unwrap(); + assert_eq!(deleted.id, model.id); + + let res = server + .send_request::(ENDPOINT, Method::GET, None, None) + .await + .unwrap(); + + assert_eq!(res.code, StatusCode::OK); + + let get_models: ReadResponse<$model> = serde_json::from_value(res.data).unwrap(); + assert!(get_models.rows.is_empty()); + } + }; +} + +test_crud!( + test_connection_definitions_crud, + ConnectionDefinition, + connection_definition, + "v1/connection-definitions" +); + +test_crud!( + test_connection_model_definitions_crud, + ConnectionModelDefinition, + connection_model_definition, + "v1/connection-model-definitions" +); + +test_crud!( + test_connection_model_schema_crud, + ConnectionModelSchema, + connection_model_schema, + "v1/connection-model-schemas" +); + +#[tokio::test] +async fn test_common_model_crud() { + let server = TestServer::new(true, None).await; + + let payload: common_model::CreateRequest = Faker.fake(); + let payload = serde_json::to_value(&payload).unwrap(); + + const ENDPOINT: &str = "v1/common-models"; + + let res = server + .send_request::(ENDPOINT, Method::POST, None, Some(&payload)) + .await + .unwrap(); + + assert_eq!(res.code, StatusCode::OK); + + let mut model: CommonModel = serde_json::from_value(res.data).unwrap(); + model.interface = HashMap::new(); + + let res = server + .send_request::(ENDPOINT, Method::GET, None, None) + .await + .unwrap(); + + assert_eq!(res.code, StatusCode::OK); + + let mut get_models: ReadResponse = serde_json::from_value(res.data).unwrap(); + get_models + .rows + .iter_mut() + .for_each(|x| x.interface = HashMap::new()); + + assert_eq!(get_models.rows.len(), 1); + assert_eq!(get_models.rows[0], model); + + let payload: common_model::CreateRequest = Faker.fake(); + let payload = serde_json::to_value(&payload).unwrap(); + + let path = format!("{ENDPOINT}/{}", model.id); + + let res = server + .send_request::(&path, Method::PATCH, None, Some(&payload)) + .await; + + let res = res.unwrap(); + + assert_eq!(res.code, StatusCode::OK); + + let res = server + .send_request::(&path, Method::DELETE, None, None) + .await + .unwrap(); + + assert_eq!(res.code, StatusCode::OK); + + let mut deleted: CommonModel = serde_json::from_value(res.data).unwrap(); + deleted.interface = HashMap::new(); + + assert_eq!(deleted.id, model.id); + + let res = server + .send_request::(ENDPOINT, Method::GET, None, None) + .await + .unwrap(); + + assert_eq!(res.code, StatusCode::OK); + + let mut get_models: ReadResponse = serde_json::from_value(res.data).unwrap(); + get_models + .rows + .iter_mut() + .for_each(|x| x.interface = HashMap::new()); + + assert!(get_models.rows.is_empty()); +} diff --git a/api/tests/api_tests/test_server/mod.rs b/api/tests/api_tests/test_server/mod.rs new file mode 100644 index 00000000..cf9d85ef --- /dev/null +++ b/api/tests/api_tests/test_server/mod.rs @@ -0,0 +1,513 @@ +use std::{ + collections::{BTreeMap, HashMap}, + sync::{Arc, OnceLock, RwLock}, + time::Duration, +}; + +use anyhow::Result; +use api::{ + config::Config, + endpoints::{ + connection_model_definition::CreateRequest as CreateConnectionModelDefinitionRequest, + ReadResponse, + }, + server::Server, +}; +use axum::async_trait; +use envconfig::Envconfig; +use fake::{Fake, Faker}; +use http::StatusCode; +use http::{header::AUTHORIZATION, Method}; +use integrationos_domain::{ + algebra::adapter::StoreAdapter, + common::{ + access_key_data::AccessKeyData, + access_key_prefix::AccessKeyPrefix, + api_model_config::{AuthMethod, SamplesInput, SchemasInput}, + connection_definition::{ConnectionDefinition, ConnectionDefinitionType}, + connection_model_definition::{ + ConnectionModelDefinition, CrudAction, CrudMapping, PlatformInfo, + }, + environment::Environment, + event_access::EventAccess, + event_type::EventType, + mongo::MongoDbStore, + AccessKey, Connection, Store, + }, + create_secret_response::{CreateSecretAuthor, CreateSecretResponse}, + get_secret_request::GetSecretRequest, + prelude::crypto::Crypto, + IntegrationOSError, +}; +use mockito::{Matcher, Server as MockServer, ServerGuard}; +use mongodb::Client; +use rand::Rng; +use serde::{de::DeserializeOwned, Serialize}; +use serde_json::Value; +use serde_json::{from_value, to_value}; +use testcontainers_modules::{ + mongo::Mongo, + redis::Redis, + testcontainers::{clients::Cli as Docker, Container}, +}; +use tokio::net::TcpListener; +use tracing_subscriber::{filter::LevelFilter, EnvFilter}; +use uuid::Uuid; + +use api::endpoints::{ + connection::CreateConnectionPayload, + connection_definition::CreateRequest as CreateConnectionDefinitionRequest, +}; + +pub mod test_core; +#[cfg(test)] +pub mod test_gateway; + +#[allow(dead_code)] +pub const AUTH_PATHS: &[&str] = &[ + "pipelines", + "events", + "transactions", + "connections", + "event-access", +]; + +#[allow(dead_code)] +pub const PUBLIC_PATHS: &[&str] = &["connection-definitions", "openapi"]; + +static TRACING: OnceLock<()> = OnceLock::new(); + +pub(crate) static DOCKER: OnceLock = OnceLock::new(); +static MONGO: OnceLock> = OnceLock::new(); +static REDIS: OnceLock> = OnceLock::new(); + +pub struct TestServer { + port: u16, + pub config: Config, + pub live_key: String, + pub live_access_key: AccessKey, + pub test_key: String, + pub test_access_key: AccessKey, + client: reqwest::Client, + pub mock_server: ServerGuard, + pub secrets_client: Arc, +} + +#[derive(Debug, Clone, Default)] +pub struct MockSecretsClient { + secrets: Arc>>, +} + +#[async_trait] +impl Crypto for MockSecretsClient { + async fn encrypt( + &self, + key: String, + value: &serde_json::Value, + ) -> std::result::Result { + let mut secrets = self.secrets.write().unwrap(); + let id: String = Faker.fake(); + let req = GetSecretRequest { + id, + buildable_id: key.clone(), + }; + secrets.insert(req.clone(), value.clone()); + + Ok(CreateSecretResponse { + id: req.id, + buildable_id: key, + created_at: 0f64, + author: CreateSecretAuthor { id: Faker.fake() }, + encrypted_secret: Faker.fake(), + }) + } + async fn decrypt( + &self, + secret: &GetSecretRequest, + ) -> std::result::Result { + let secrets = self.secrets.read().unwrap(); + let res = secrets.get(secret).unwrap(); + Ok(res.clone()) + } +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct ApiResponse { + pub code: StatusCode, + pub data: T, +} + +impl TestServer { + pub async fn new(is_admin: bool, dn_name: Option) -> Self { + // init tracing once + TRACING.get_or_init(|| { + let filter = EnvFilter::builder() + .with_default_directive(LevelFilter::INFO.into()) + .from_env_lossy(); + + tracing_subscriber::fmt().with_env_filter(filter).init(); + }); + + // Get db connection + let docker = DOCKER.get_or_init(Default::default); + let redis = REDIS.get_or_init(|| docker.run(Redis)); + let mongo = MONGO.get_or_init(|| docker.run(Mongo)); + let host_port = mongo.get_host_port_ipv4(27017); + let db = format!("mongodb://127.0.0.1:{host_port}/?directConnection=true"); + + let host_port = redis.get_host_port_ipv4(6379); + let redis = format!("redis://127.0.0.1:{host_port}"); + + // Get available port for server to listen + let port = TcpListener::bind("127.0.0.1:0") + .await + .unwrap() + .local_addr() + .unwrap() + .port(); + + // Random database name + let db_name = dn_name.unwrap_or_else(|| Uuid::new_v4().to_string()); + + let config = Config::init_from_hashmap(&HashMap::from([ + ("CONTROL_DATABASE_URL".to_string(), db.clone()), + ("CONTROL_DATABASE_NAME".to_string(), db_name.clone()), + ("CONTEXT_DATABASE_URL".to_string(), db.clone()), + ("CONTEXT_DATABASE_NAME".to_string(), db_name.clone()), + ("EVENT_DATABASE_URL".to_string(), db.clone()), + ("EVENT_DATABASE_NAME".to_string(), db_name.clone()), + ( + "INTERNAL_SERVER_ADDRESS".to_string(), + format!("0.0.0.0:{port}"), + ), + ("IS_ADMIN".to_string(), is_admin.to_string()), + ("CLAUDE_API_KEY".to_string(), "".to_string()), + ("OPENAI_API_KEY".to_string(), "".to_string()), + ("MOCK_LLM".to_string(), "true".to_string()), + ("CACHE_SIZE".to_string(), "0".to_string()), + ("REDIS_URL".to_string(), redis), + ])) + .unwrap(); + + let secrets_client = Arc::new(MockSecretsClient::default()); + + let data: AccessKeyData = Faker.fake(); + let ownership_id = data.id.clone(); + let prefix = AccessKeyPrefix { + environment: Environment::Live, + event_type: EventType::SecretKey, + version: 1, + }; + let live_access_key = AccessKey { + prefix, + data: data.clone(), + }; + let iv = rand::thread_rng().gen::<[u8; 16]>(); + let live_encrypted_key = live_access_key + .encode( + &config.event_access_password.as_bytes().try_into().unwrap(), + &iv, + ) + .unwrap(); + + let prefix = AccessKeyPrefix { + environment: Environment::Test, + event_type: EventType::SecretKey, + version: 1, + }; + let test_access_key = AccessKey { prefix, data }; + let test_encrypted_key = test_access_key + .encode( + &config.event_access_password.as_bytes().try_into().unwrap(), + &iv, + ) + .unwrap(); + + // Create live and test keys + let mut live: EventAccess = Faker.fake(); + live.throughput = 500; + live.ownership.id = ownership_id.clone().into(); + live.environment = Environment::Live; + live.record_metadata = Default::default(); + live.access_key = live_encrypted_key.to_string(); + + let mut test: EventAccess = Faker.fake(); + test.throughput = 500; + test.ownership.id = ownership_id.into(); + test.environment = Environment::Test; + test.record_metadata = Default::default(); + test.access_key = test_encrypted_key.to_string(); + + let db = Client::with_uri_str(&db).await.unwrap().database(&db_name); + + let store: MongoDbStore = MongoDbStore::new_with_db(db, Store::EventAccess) + .await + .unwrap(); + + store + .create_many(&[live.clone(), test.clone()]) + .await + .unwrap(); + + let server = Server::init(config.clone(), secrets_client.clone()) + .await + .unwrap(); + + tokio::task::spawn(async move { server.run().await }); + + tokio::time::sleep(Duration::from_millis(50)).await; + + Self { + port, + config, + test_key: test.access_key, + test_access_key, + live_key: live.access_key, + live_access_key, + client: reqwest::Client::new(), + mock_server: MockServer::new_async().await, + secrets_client, + } + } + + pub async fn send_request( + &self, + path: &str, + method: http::Method, + key: Option<&str>, + payload: Option<&T>, + ) -> Result> { + self.send_request_with_headers(path, method, key, payload, None) + .await + } + + pub async fn send_request_with_headers( + &self, + path: &str, + method: http::Method, + key: Option<&str>, + payload: Option<&T>, + headers: Option>, + ) -> Result> { + let mut req = self + .client + .request(method, format!("http://localhost:{}/{path}", self.port)); + if let Some(key) = key { + req = req.header(&self.config.headers.auth_header, key); + } + if let Some(payload) = payload { + req = req.json(payload); + } + if let Some(headers) = headers { + for (k, v) in headers { + req = req.header(k, v); + } + } + + let res = req.send().await?; + + Ok(ApiResponse { + code: res.status(), + data: res.json().await?, + }) + } + + #[allow(dead_code)] + pub async fn create_connection( + &mut self, + env: Environment, + ) -> (Connection, ConnectionModelDefinition) { + let (key, access_key) = match env { + Environment::Live => (self.live_key.as_ref(), &self.live_access_key), + Environment::Development => (self.live_key.as_ref(), &self.test_access_key), + Environment::Test => (self.test_key.as_ref(), &self.test_access_key), + Environment::Production => (self.live_key.as_ref(), &self.live_access_key), + }; + + let admin_server = + TestServer::new(true, Some(self.config.db_config.control_db_name.clone())).await; + + let bearer_key: String = Faker.fake(); + let template: String = Faker.fake(); + let handlebar_template = format!("{{{{{template}}}}}"); + + let mut connection_def: CreateConnectionDefinitionRequest = Faker.fake(); + connection_def.r#type = ConnectionDefinitionType::Api; + let mut test_connection: CreateConnectionModelDefinitionRequest = Faker.fake(); + test_connection.base_url = self.mock_server.url(); + test_connection.auth_method = AuthMethod::BearerToken { + value: handlebar_template.clone(), + }; + test_connection.http_method = Method::GET; + + let res = admin_server + .send_request::( + "v1/connection-model-definitions", + http::Method::POST, + None, + Some(&test_connection), + ) + .await + .unwrap(); + + let mut test_connection = res.data; + + let api_config = match test_connection.platform_info { + PlatformInfo::Api(ref mut api_config_data) => api_config_data.clone(), + _ => panic!(), + }; + + let mut mock = self + .mock_server + .mock( + test_connection.action.as_str(), + format!("/{}", api_config.path).as_str(), + ) + .match_header( + AUTHORIZATION.as_str(), + format!("Bearer {bearer_key}").as_str(), + ); + if let Some(ref headers) = api_config.headers { + for k in headers.keys() { + let val: Vec = headers + .get_all(k) + .into_iter() + .map(|v| Matcher::from(v.to_str().unwrap())) + .collect(); + if val.len() == 1 { + mock = mock.match_header(k.as_str(), Matcher::AllOf(val)); + } + } + } + if let Some(ref query_params) = api_config.query_params { + let params = query_params + .iter() + .map(|(k, v)| Matcher::UrlEncoded(k.into(), v.into())) + .collect(); + + mock = mock.match_query(Matcher::AllOf(params)); + } + mock = mock + .expect(1) + .with_status(200) + .with_body("\"Charges listed\"") + .create_async() + .await; + + connection_def.test_connection = Some(test_connection.id); + + let payload = to_value(&connection_def).unwrap(); + + let res = admin_server + .send_request::( + "v1/connection-definitions", + http::Method::POST, + None, + Some(&payload), + ) + .await + .unwrap(); + + assert!(res.code.is_success()); + + let connection_def = from_value::(res.data).unwrap(); + + let res = self + .send_request::( + &format!("v1/public/connection-definitions?_id={}", connection_def.id), + http::Method::GET, + Some(key), + None, + ) + .await + .unwrap(); + + assert!(res.code.is_success()); + + let res = from_value::>(res.data).unwrap(); + + assert_eq!(res.rows.len(), 1); + + let payload = CreateConnectionPayload { + connection_definition_id: connection_def.id, + name: Faker.fake(), + group: access_key.data.group.clone(), + auth_form_data: HashMap::from([(template, bearer_key.to_string())]), + active: true, + }; + + let res = self + .send_request::( + "v1/connections", + http::Method::POST, + Some(key), + Some(&payload), + ) + .await + .unwrap(); + + mock.assert_async().await; + assert!(res.code.is_success()); + + let connection = res.data; + + assert_eq!(connection.platform.to_string(), connection_def.platform); + assert!(!connection.secrets_service_id.is_empty()); + + let model_def = CreateConnectionModelDefinitionRequest { + connection_platform: connection_def.platform, + connection_definition_id: connection_def.id, + platform_version: connection_def.platform_version, + title: connection_def.name.clone(), + name: connection_def.name.clone(), + model_name: connection_def.name.clone(), + base_url: api_config.base_url, + path: api_config.path, + auth_method: api_config.auth_method, + http_method: test_connection.action, + action_name: Faker.fake::(), + headers: api_config.headers, + query_params: api_config.query_params, + extractor_config: test_connection.extractor_config, + version: test_connection.record_metadata.version, + schemas: SchemasInput { + headers: None, + query_params: None, + path_params: None, + body: None, + }, + samples: SamplesInput { + headers: None, + query_params: None, + path_params: None, + body: None, + }, + responses: vec![], + paths: None, + is_default_crud_mapping: None, + mapping: Some(CrudMapping { + action: CrudAction::GetMany, + common_model_name: connection_def.name, + from_common_model: Some( + "function mapCrudRequest(data) { return data; }".to_string(), + ), + to_common_model: Some("function mapCrudRequest(data) { return data; }".to_string()), + }), + }; + + let res = admin_server + .send_request::( + "v1/connection-model-definitions", + http::Method::POST, + None, + Some(&model_def), + ) + .await + .unwrap(); + assert!(res.code.is_success()); + + let conn_model_def = res.data; + + (connection, conn_model_def) + } +} diff --git a/api/tests/api_tests/test_server/test_core.rs b/api/tests/api_tests/test_server/test_core.rs new file mode 100644 index 00000000..019bfb9b --- /dev/null +++ b/api/tests/api_tests/test_server/test_core.rs @@ -0,0 +1,84 @@ +use std::{collections::HashMap, sync::Arc}; + +use api::config::Config as ApiConfig; +use envconfig::Envconfig; +use event_core::{ + config::EventCoreConfig, dispatcher::Dispatcher, event_handler::EventHandler, + mongo_context_store::MongoContextStore, mongo_control_data_store::MongoControlDataStore, +}; +use gateway::config::Config as GatewayConfig; +use http::StatusCode; +use integrationos_domain::common::{ + event_response::EventResponse, event_with_context::EventWithContext, +}; +use tokio::sync::{ + mpsc::{self, Receiver}, + Mutex, +}; + +use super::MockSecretsClient; + +#[allow(dead_code)] +#[derive(Clone)] +pub struct TestCore { + pub config: EventCoreConfig, + pub rx: Arc>>, +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct ApiResponse { + pub code: StatusCode, + pub data: EventResponse, +} + +impl TestCore { + #[allow(dead_code)] + pub async fn new( + api_config: &ApiConfig, + gateway_config: &GatewayConfig, + secrets_client: Arc, + ) -> Self { + let mut config = EventCoreConfig::init_from_hashmap(&HashMap::from([])).unwrap(); + + config.db = api_config.db_config.clone(); + config.redis = gateway_config.redis.clone(); + + let control_store = Arc::new( + MongoControlDataStore::new(&config, secrets_client) + .await + .unwrap(), + ); + + let context_store = Arc::new(MongoContextStore::new(&config).await.unwrap()); + + let dispatcher = Dispatcher { + context_store: context_store.clone(), + event_store: control_store.clone(), + control_data_store: control_store.clone(), + }; + + let event_handler = EventHandler::new(config.redis.clone(), control_store, context_store) + .await + .unwrap(); + + let (tx, rx) = mpsc::channel(100); + + tokio::task::spawn(async move { + loop { + let EventWithContext { context, .. } = event_handler.pop_event().await.unwrap(); + dispatcher.process_context(context).await.unwrap(); + tx.send(()).await.unwrap(); + } + }); + + Self { + config, + rx: Arc::new(Mutex::new(rx)), + } + } + + #[allow(dead_code)] + pub async fn event_completed(&self) { + self.rx.lock().await.recv().await.unwrap() + } +} diff --git a/api/tests/api_tests/test_server/test_gateway.rs b/api/tests/api_tests/test_server/test_gateway.rs new file mode 100644 index 00000000..1fde1d9e --- /dev/null +++ b/api/tests/api_tests/test_server/test_gateway.rs @@ -0,0 +1,83 @@ +use std::collections::HashMap; + +use anyhow::Result; +use api::config::Config as ApiConfig; +use envconfig::Envconfig; +use gateway::{config::Config, finalizer::Finalizer, server::Server}; +use http::StatusCode; +use integrationos_domain::common::event_response::EventResponse; +use serde_json::{json, Value}; +use testcontainers_modules::{redis::Redis, testcontainers::Container}; +use tokio::net::TcpListener; +use uuid::Uuid; + +#[allow(dead_code)] +pub struct TestGateway { + port: u16, + pub config: Config, + client: reqwest::Client, + _redis: Container<'static, Redis>, +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct ApiResponse { + pub code: StatusCode, + pub data: EventResponse, +} + +impl TestGateway { + #[allow(dead_code)] + pub async fn new(api_config: &ApiConfig) -> Self { + // Get available port for server to listen + let port = TcpListener::bind("127.0.0.1:0") + .await + .unwrap() + .local_addr() + .unwrap() + .port(); + + let docker = super::DOCKER.get_or_init(Default::default); + let node = docker.run(Redis); + let host_port = node.get_host_port_ipv4(6379); + let redis = format!("redis://127.0.0.1:{host_port}"); + + let queue_name = Uuid::new_v4().to_string(); + + let mut config = Config::init_from_hashmap(&HashMap::from([ + ("SERVER_ADDRESS".to_string(), format!("0.0.0.0:{port}")), + ("REDIS_URL".to_string(), redis), + ("REDIS_QUEUE_NAME".to_string(), queue_name), + ])) + .unwrap(); + + config.db = api_config.db_config.clone(); + + let finalizer = Finalizer::new(config.clone()).await.unwrap(); + let server = Server::new(config.clone(), finalizer); + + tokio::task::spawn(async move { server.run().await }); + + Self { + port, + config, + client: reqwest::Client::new(), + _redis: node, + } + } + + #[allow(dead_code)] + pub async fn emit_event(&self, key: &str, name: &str, payload: &Value) -> Result { + let req = self + .client + .post(format!("http://localhost:{}/emit", self.port)) + .header("x-buildable-secret", key) + .json(&json!({"event": name, "payload": payload})); + + let res = req.send().await?; + + Ok(ApiResponse { + code: res.status(), + data: res.json().await?, + }) + } +} diff --git a/api/tests/api_tests/transaction_tests.rs b/api/tests/api_tests/transaction_tests.rs new file mode 100644 index 00000000..79bdec6b --- /dev/null +++ b/api/tests/api_tests/transaction_tests.rs @@ -0,0 +1,79 @@ +use std::time::Duration; + +use api::endpoints::{pipeline::CreatePipelineRequest, ReadResponse}; +use fake::{Fake, Faker}; +use http::{Method, StatusCode}; +use integrationos_domain::common::{ + connection_model_definition::PlatformInfo, destination::Action, environment::Environment, + Transaction, +}; +use serde_json::{json, Value}; + +use crate::test_server::{test_core::TestCore, test_gateway::TestGateway, TestServer}; + +#[tokio::test] +async fn test_event_core() { + let mut server = TestServer::new(false, None).await; + + let (connection, conn_def) = server.create_connection(Environment::Live).await; + + let event_name: String = Faker.fake(); + + let mut pipeline: CreatePipelineRequest = Faker.fake(); + pipeline.source.group = connection.group; + pipeline.source.r#type = server.live_access_key.data.event_type.clone(); + pipeline.source.events = vec![event_name.clone()]; + pipeline.middleware = vec![]; + pipeline.destination = Faker.fake(); + let PlatformInfo::Api(api_config) = conn_def.platform_info else { + panic!(); + }; + pipeline.destination.platform = connection.platform.clone(); + pipeline.destination.connection_key = connection.key; + pipeline.destination.action = Action::Passthrough { + method: conn_def.action, + path: api_config.path.into(), + }; + + let payload = serde_json::to_value(&pipeline).unwrap(); + + server + .send_request::( + "v1/pipelines", + Method::POST, + Some(&server.live_key), + Some(&payload), + ) + .await + .unwrap(); + + let gateway = TestGateway::new(&server.config).await; + let core = TestCore::new( + &server.config, + &gateway.config, + server.secrets_client.clone(), + ) + .await; + + let payload = json!({"foo":"bar"}); + + let event_response = gateway + .emit_event(&server.live_key, &event_name, &payload) + .await + .unwrap(); + assert_eq!(event_response.code, StatusCode::OK); + + core.event_completed().await; + + tokio::time::sleep(Duration::from_millis(100)).await; + + let res = server + .send_request::("v1/transactions", Method::GET, Some(&server.live_key), None) + .await + .unwrap(); + + assert!(res.code.is_success()); + + let txs: ReadResponse = serde_json::from_value(res.data).unwrap(); + assert_eq!(txs.rows.len(), 2); +} diff --git a/api/tests/api_tests/unified_tests.rs b/api/tests/api_tests/unified_tests.rs new file mode 100644 index 00000000..b80a0c27 --- /dev/null +++ b/api/tests/api_tests/unified_tests.rs @@ -0,0 +1,629 @@ +use std::time::Duration; + +use api::endpoints::{ + connection_model_definition::CreateRequest as CreateConnectionModelDefinitionRequest, + connection_model_schema::CreateRequest as CreateConnectionModelSchemaRequest, + metrics::MetricResponse, +}; +use chrono::{Datelike, Utc}; +use fake::{faker::filesystem::raw::DirPath, locales::EN, Fake, Faker}; +use http::{ + header::{AUTHORIZATION, CONTENT_TYPE}, + Method, StatusCode, +}; +use integrationos_domain::{ + common::{ + api_model_config::{AuthMethod, SamplesInput, SchemasInput}, + connection_model_definition::{ConnectionModelDefinition, CrudAction, CrudMapping}, + connection_model_schema::{ConnectionModelSchema, Mappings}, + environment::Environment, + Connection, + }, + id::{prefix::IdPrefix, Id}, +}; +use mockito::Mock; +use serde_json::Value; + +use crate::test_server::TestServer; + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn test_unified_api_get_many() { + let mut server = TestServer::new(false, None).await; + let (connection, _) = server.create_connection(Environment::Live).await; + + let name = "Model".to_string(); + + let mock = create_connection_model_definition( + &mut server, + &connection, + CrudMapping { + action: CrudAction::GetMany, + common_model_name: name.clone(), + from_common_model: Some( + "function mapCrudRequest(data) { + data.queryParams = undefined; + return data; + }" + .to_string(), + ), + to_common_model: Some( + "function mapCrudRequest(data) { + data.queryParams = undefined; + return data; + }" + .to_string(), + ), + }, + ) + .await; + + let res = server + .send_request_with_headers::( + &format!("v1/unified/{}?foo=bar", name.to_lowercase()), + Method::GET, + Some(&server.live_key), + None, + Some( + vec![ + (CONTENT_TYPE.to_string(), "application/json".to_string()), + ( + "x-integrationos-connection-key".to_string(), + connection.key.to_string(), + ), + ] + .into_iter() + .collect(), + ), + ) + .await + .unwrap(); + + assert_eq!(res.code, StatusCode::OK); + + mock.assert_async().await; +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn test_unified_api_get_one() { + let mut server = TestServer::new(false, None).await; + let (connection, _) = server.create_connection(Environment::Live).await; + + let name = "Model".to_string(); + + let id: String = Faker.fake(); + + let mock = create_connection_model_definition( + &mut server, + &connection, + CrudMapping { + action: CrudAction::GetOne, + common_model_name: name.clone(), + from_common_model: Some( + "function mapCrudRequest(data) { + return data; + }" + .to_string(), + ), + to_common_model: Some( + "function mapCrudRequest(data) { + data.queryParams = undefined; + return data; + }" + .to_string(), + ), + }, + ) + .await; + + let res = server + .send_request_with_headers::( + &format!("v1/unified/{}/{id}", name.to_lowercase()), + Method::GET, + Some(&server.live_key), + None, + Some( + vec![ + (CONTENT_TYPE.to_string(), "application/json".to_string()), + ( + "x-integrationos-connection-key".to_string(), + connection.key.to_string(), + ), + ] + .into_iter() + .collect(), + ), + ) + .await + .unwrap(); + + assert_eq!(res.code, StatusCode::OK); + + mock.assert_async().await; +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn test_unified_api_get_count() { + let mut server = TestServer::new(false, None).await; + let (connection, _) = server.create_connection(Environment::Live).await; + + let name = "Model".to_string(); + + let mock = create_connection_model_definition( + &mut server, + &connection, + CrudMapping { + action: CrudAction::GetCount, + common_model_name: name.clone(), + from_common_model: Some( + "function mapCrudRequest(data) { + return data; + }" + .to_string(), + ), + to_common_model: Some( + "function mapCrudRequest(data) { + data.queryParams = undefined; + return data; + }" + .to_string(), + ), + }, + ) + .await; + + let res = server + .send_request_with_headers::( + &format!("v1/unified/{}/count", name.to_lowercase()), + Method::GET, + Some(&server.live_key), + None, + Some( + vec![ + (CONTENT_TYPE.to_string(), "application/json".to_string()), + ( + "x-integrationos-connection-key".to_string(), + connection.key.to_string(), + ), + ] + .into_iter() + .collect(), + ), + ) + .await + .unwrap(); + + assert_eq!(res.code, StatusCode::OK); + + mock.assert_async().await; +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn test_unified_api_update() { + let mut server = TestServer::new(false, None).await; + let (connection, _) = server.create_connection(Environment::Live).await; + + let name = "Model".to_string(); + + let id: String = Faker.fake(); + + let mock = create_connection_model_definition( + &mut server, + &connection, + CrudMapping { + action: CrudAction::Update, + common_model_name: name.clone(), + from_common_model: Some( + "function mapCrudRequest(data) { + return data; + }" + .to_string(), + ), + to_common_model: Some( + "function mapCrudRequest(data) { + data.queryParams = undefined; + return data; + }" + .to_string(), + ), + }, + ) + .await; + + let payload: Value = Faker.fake(); + + let res = server + .send_request_with_headers::( + &format!("v1/unified/{}/{id}", name.to_lowercase()), + Method::PATCH, + Some(&server.live_key), + Some(&payload), + Some( + vec![ + (CONTENT_TYPE.to_string(), "application/json".to_string()), + ( + "x-integrationos-connection-key".to_string(), + connection.key.to_string(), + ), + ] + .into_iter() + .collect(), + ), + ) + .await + .unwrap(); + + assert_eq!(res.code, StatusCode::OK); + + mock.assert_async().await; +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn test_unified_api_delete() { + let mut server = TestServer::new(false, None).await; + let (connection, _) = server.create_connection(Environment::Live).await; + + let name = "Model".to_string(); + + let id: String = Faker.fake(); + + let mock = create_connection_model_definition( + &mut server, + &connection, + CrudMapping { + action: CrudAction::Delete, + common_model_name: name.clone(), + from_common_model: Some( + "function mapCrudRequest(data) { + return data; + }" + .to_string(), + ), + to_common_model: Some( + "function mapCrudRequest(data) { + data.queryParams = undefined; + return data; + }" + .to_string(), + ), + }, + ) + .await; + + let res = server + .send_request_with_headers::( + &format!("v1/unified/{}/{id}", name.to_lowercase()), + Method::DELETE, + Some(&server.live_key), + None, + Some( + vec![ + (CONTENT_TYPE.to_string(), "application/json".to_string()), + ( + "x-integrationos-connection-key".to_string(), + connection.key.to_string(), + ), + ] + .into_iter() + .collect(), + ), + ) + .await + .unwrap(); + + assert_eq!(res.code, StatusCode::OK); + + mock.assert_async().await; +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn test_unified_api_create() { + let mut server = TestServer::new(false, None).await; + let (connection, _) = server.create_connection(Environment::Live).await; + + let name = "Model".to_string(); + + let mock = create_connection_model_definition( + &mut server, + &connection, + CrudMapping { + action: CrudAction::Create, + common_model_name: name.clone(), + from_common_model: Some( + "function mapCrudRequest(data) { + return data; + }" + .to_string(), + ), + to_common_model: Some( + "function mapCrudRequest(data) { + data.queryParams = undefined; + return data; + }" + .to_string(), + ), + }, + ) + .await; + + let payload: Value = Faker.fake(); + + let res = server + .send_request_with_headers::( + &format!("v1/unified/{}", name.to_lowercase()), + Method::POST, + Some(&server.live_key), + Some(&payload), + Some( + vec![ + (CONTENT_TYPE.to_string(), "application/json".to_string()), + ( + "x-integrationos-connection-key".to_string(), + connection.key.to_string(), + ), + ] + .into_iter() + .collect(), + ), + ) + .await + .unwrap(); + + assert_eq!(res.code, StatusCode::OK); + + mock.assert_async().await; +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 2)] +async fn test_unified_metrics() { + let mut server = TestServer::new(false, None).await; + let (connection, _) = server.create_connection(Environment::Live).await; + + let name = "Model".to_string(); + + let mock = create_connection_model_definition( + &mut server, + &connection, + CrudMapping { + action: CrudAction::Create, + common_model_name: name.clone(), + from_common_model: None, + to_common_model: None, + }, + ) + .await; + + let payload: Value = Faker.fake(); + + let res = server + .send_request_with_headers::( + &format!("v1/unified/{}", name.to_lowercase()), + Method::POST, + Some(&server.live_key), + Some(&payload), + Some( + vec![ + (CONTENT_TYPE.to_string(), "application/json".to_string()), + ( + "x-integrationos-connection-key".to_string(), + connection.key.to_string(), + ), + ] + .into_iter() + .collect(), + ), + ) + .await + .unwrap(); + + assert_eq!(res.code, StatusCode::OK); + mock.assert_async().await; + + tokio::time::sleep(Duration::from_millis(100)).await; + + let admin_server = + TestServer::new(true, Some(server.config.db_config.control_db_name.clone())).await; + + let res = admin_server + .send_request::<(), MetricResponse>("v1/metrics", Method::GET, None, None) + .await + .unwrap(); + + assert_eq!(res.data.count, 1); + + let res = server + .send_request_with_headers::( + &format!("v1/unified/{}", name.to_lowercase()), + Method::POST, + Some(&server.live_key), + Some(&payload), + Some( + vec![ + (CONTENT_TYPE.to_string(), "application/json".to_string()), + ( + "x-integrationos-connection-key".to_string(), + connection.key.to_string(), + ), + ] + .into_iter() + .collect(), + ), + ) + .await + .unwrap(); + assert_eq!(res.code, StatusCode::OK); + + tokio::time::sleep(Duration::from_millis(100)).await; + + let res = admin_server + .send_request::<(), MetricResponse>( + format!("v1/metrics/{}", connection.ownership.client_id).as_str(), + Method::GET, + None, + None, + ) + .await + .unwrap(); + + assert_eq!(res.data.count, 2); + + let date = Utc::now(); + let day = date.day(); + let month = date.month(); + let year = date.year(); + let daily_key = format!("{year}-{month:02}-{day:02}"); + let monthly_key = format!("{year}-{month:02}"); + + let res = admin_server + .send_request::<(), MetricResponse>( + format!( + "v1/metrics/{}?day={daily_key}", + connection.ownership.client_id + ) + .as_str(), + Method::GET, + None, + None, + ) + .await + .unwrap(); + + assert_eq!(res.data.count, 2); + + let res = admin_server + .send_request::<(), MetricResponse>( + format!( + "v1/metrics/{}?month={monthly_key}&apiType=unified", + connection.ownership.client_id + ) + .as_str(), + Method::GET, + None, + None, + ) + .await + .unwrap(); + + assert_eq!(res.data.count, 2); + + let res = admin_server + .send_request::<(), MetricResponse>( + format!("v1/metrics?platform={}", connection.platform).as_str(), + Method::GET, + None, + None, + ) + .await + .unwrap(); + + assert_eq!(res.data.count, 2); + + let res = admin_server + .send_request::<(), MetricResponse>( + "v1/metrics?apiType=passthrough", + Method::GET, + None, + None, + ) + .await + .unwrap(); + + assert_eq!(res.data.count, 0); +} + +async fn create_connection_model_definition( + server: &mut TestServer, + connection: &Connection, + mapping: CrudMapping, +) -> Mock { + let secret_key = Faker.fake::(); + let url_path: String = DirPath(EN).fake(); + let path: String = Faker.fake(); + let response_body = format!("{{\"id\": \"{}\"}}", Faker.fake::()); + + let mock = server + .mock_server + .mock("GET", format!("{url_path}/{path}").as_str()) + .match_header( + AUTHORIZATION.as_str(), + format!("Bearer {secret_key}").as_str(), + ) + .expect(1) + .with_status(200) + .with_body(response_body.clone()) + .create_async() + .await; + + let create_model_definition_payload = CreateConnectionModelDefinitionRequest { + connection_platform: connection.platform.to_string(), + connection_definition_id: connection.connection_definition_id, + platform_version: connection.record_metadata.version.to_string(), + title: Faker.fake(), + name: Faker.fake(), + model_name: Faker.fake(), + action_name: mapping.action.clone(), + base_url: server.mock_server.url() + &url_path, + path, + auth_method: AuthMethod::BearerToken { + value: secret_key.to_string(), + }, + http_method: http::Method::GET, + headers: None, + query_params: None, + extractor_config: None, + version: "1.0.0".parse().unwrap(), + schemas: SchemasInput { + headers: None, + query_params: None, + path_params: None, + body: None, + }, + samples: SamplesInput { + headers: None, + query_params: None, + path_params: None, + body: None, + }, + paths: None, + responses: vec![], + is_default_crud_mapping: None, + mapping: Some(mapping.clone()), + }; + + let admin_server = + TestServer::new(true, Some(server.config.db_config.control_db_name.clone())).await; + let create_model_definition_response = admin_server + .send_request::( + "v1/connection-model-definitions", + Method::POST, + None, + Some(&create_model_definition_payload), + ) + .await + .unwrap(); + + assert_eq!(create_model_definition_response.code, StatusCode::OK); + + let mut schema: CreateConnectionModelSchemaRequest = Faker.fake(); + schema.connection_platform = connection.platform.to_string(); + schema.mapping = Some(Mappings { + from_common_model: "function mapFromCommonModel(data) { return data; }".to_string(), + to_common_model: "function mapToCommonModel(data) { return data; }".to_string(), + common_model_name: mapping.common_model_name.clone(), + common_model_id: Id::now(IdPrefix::CommonModel), + unmapped_fields: Default::default(), + }); + + let res = admin_server + .send_request::( + "v1/connection-model-schemas", + Method::POST, + None, + Some(&schema), + ) + .await + .unwrap(); + + assert_eq!(res.code, StatusCode::OK); + + mock +} diff --git a/event-core/Cargo.toml b/event-core/Cargo.toml new file mode 100644 index 00000000..9b01ce15 --- /dev/null +++ b/event-core/Cargo.toml @@ -0,0 +1,46 @@ +[package] +name = "event-core" +version = "0.1.0" +edition = "2021" + + +[dependencies] +anyhow.workspace = true +async-trait.workspace = true +bson.workspace = true +chrono.workspace = true +dotenvy.workspace = true +envconfig.workspace = true +futures.workspace = true +google-token-fetcher = { path = "../google-token-fetcher" } +handlebars.workspace = true +http.workspace = true +integrationos-domain = { workspace = true, features = ["unified"] } +js-sandbox-ios.workspace = true +metrics = "0.21.1" +metrics-exporter-prometheus = "0.12.1" +moka.workspace = true +mongodb.workspace = true +redis-retry = { path = "../redis-retry" } +reqwest.workspace = true +serde.workspace = true +serde_json.workspace = true +tokio-condvar = "0.1.0" +tokio.workspace = true +tracing-subscriber.workspace = true +tracing.workspace = true + +[dev-dependencies] +testcontainers-modules = { workspace = true, features = ["mongo"] } +chrono.workspace = true +fake.workspace = true +mockito.workspace = true +uuid.workspace = true + +[[test]] +name = "mock_destination" +required-features = ["integrationos-domain/dummy"] + +[[test]] +name = "mock_storage" +required-features = ["integrationos-domain/dummy"] diff --git a/event-core/README.md b/event-core/README.md new file mode 100644 index 00000000..6d3de2f0 --- /dev/null +++ b/event-core/README.md @@ -0,0 +1,37 @@ +# Event Core + +Processes incoming events from the [gateway](../gateway/) over redis and executes the associated pipelines. + +## Dependencies + +Requires redis to receive events from the [gateway](../gateway). + +```bash +$ docker run -p 6379:6379 redis +``` + +Requires mongodb. + +```bash +$ docker run -p 27017:27017 mongodb +``` + +Connecting to an external mongodb requires setting multiple environment variables in your `.env` file depending on which db you want to use. + +`"CONTROL_DATABASE_URL"` and `"CONTROL_DATABASE_NAME"` are for the db which stores integration records. +`"EVENT_DATABASE_URL"` and `"EVENT_DATABASE_NAME"` are for the db which stores events. +`"CONTEXT_DATABASE_URL"` and `"CONTEXT_DATABASE_NAME"` are for the db which will store contexts and event-transactions. + +## Running + +```bash +$ cargo run +``` + +By default this will log everything, including dependencies, at the `DEBUG` level. To do more granular filtering, you can set the `RUST_LOG` environment variable in the `.env` file or in the command line such as: + +```bash +$ RUST_LOG=event_core=info cargo run +``` + +which will output logs from only this crate at the `INFO` level. diff --git a/event-core/src/config.rs b/event-core/src/config.rs new file mode 100644 index 00000000..7b467b17 --- /dev/null +++ b/event-core/src/config.rs @@ -0,0 +1,38 @@ +use envconfig::Envconfig; +use integrationos_domain::common::{database::DatabaseConfig, secrets::SecretsConfig}; +use redis_retry::Config as RedisConfig; +use std::fmt::{Display, Formatter}; + +#[derive(Envconfig, Clone)] // Intentionally no Debug so secret is not printed +pub struct EventCoreConfig { + #[envconfig(from = "CACHE_SIZE", default = "10000")] + pub cache_size: u64, + #[envconfig(from = "CACHE_TTL_SECS", default = "60")] + pub cache_ttl_secs: u64, + #[envconfig(from = "DB_CONNECTION_COUNT", default = "25")] + pub db_connection_count: u64, + #[envconfig(from = "FETCH_GOOGLE_AUTH_TOKEN", default = "true")] + pub fetch_google_auth_token: bool, + #[envconfig(nested = true)] + pub secrets_config: SecretsConfig, + #[envconfig(nested = true)] + pub redis: RedisConfig, + #[envconfig(nested = true)] + pub db: DatabaseConfig, +} + +impl Display for EventCoreConfig { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + writeln!(f, "CACHE_SIZE: {}", self.cache_size)?; + writeln!(f, "CACHE_TTL_SECS: {}", self.cache_ttl_secs)?; + writeln!(f, "DB_CONNECTION_COUNT: {}", self.db_connection_count)?; + writeln!( + f, + "FETCH_GOOGLE_AUTH_TOKEN: {}", + self.fetch_google_auth_token + )?; + write!(f, "{}", self.secrets_config)?; + write!(f, "{}", self.redis)?; + write!(f, "{}", self.db) + } +} diff --git a/event-core/src/dispatcher.rs b/event-core/src/dispatcher.rs new file mode 100644 index 00000000..cfc22f6f --- /dev/null +++ b/event-core/src/dispatcher.rs @@ -0,0 +1,485 @@ +use crate::{ + metrics::{EVENTS_HISTOGRAM, STAGE_HISTOGRAM, STAGE_LABEL, STATUS_LABEL}, + store::{ContextStore, ControlDataStore, EventStore}, +}; +use anyhow::Result; +use chrono::Utc; +use futures::{ + future::{self}, + FutureExt, +}; +use integrationos_domain::{ + algebra::execution::{ExecutionContext, Status}, + common::{ + extractor_context::Stage as ExtractorStage, middleware::Middleware, + pipeline_context::Stage as PipelineStage, root_context::Stage as RootStage, Event, + ExtractorContext, PipelineContext, RootContext, Transaction, + }, +}; +use js_sandbox_ios::Script; +use serde_json::{json, Value}; +use std::{collections::HashMap, sync::Arc, time::Duration}; +use tokio::{ + pin, select, + time::{interval, sleep, Instant}, +}; +use tracing::{debug, error, info, trace, warn}; + +const KEEP_ALIVE_INTERVAL_SECS: u64 = 10; +const RETRY_INTERVAL_MILLIS: u64 = 500; +const TICK_INTERVAL_MILLIS: u64 = 1000; + +#[derive(Clone)] +pub struct Dispatcher +where + X: ContextStore + Sync + Send + 'static, + Y: EventStore + Sync + Send + 'static, + Z: ControlDataStore + Sync + Send + 'static, +{ + pub context_store: Arc, + pub event_store: Arc, + pub control_data_store: Arc, +} + +macro_rules! select_contexts { + ($self:ident.$fn:ident($contexts:ident, $key:ident)) => { + if !$contexts.is_empty() { + let mut tasks = Vec::with_capacity($contexts.len()); + for context in $contexts.values() { + tasks.push(Box::pin($self.$fn(context.clone()))); + } + loop { + let (task, _, remaining_tasks) = future::select_all(tasks).await; + tasks = remaining_tasks; + + match task { + Ok(context) => { + $contexts.insert(context.$key.clone(), context); + } + Err(err) => { + error!("{err:?}"); + } + } + + if tasks.is_empty() { + break; + } + } + } + }; +} + +impl Dispatcher +where + X: ContextStore + Sync + Send + 'static, + Y: EventStore + Sync + Send + 'static, + Z: ControlDataStore + Sync + Send + 'static, +{ + #[tracing::instrument(skip(self, context), fields(event_id = %context.event_key))] + pub async fn process_context(&self, mut context: RootContext) -> Result { + let time: Instant = Instant::now(); + info!("Processing event"); + let mut interval = interval(Duration::from_secs(KEEP_ALIVE_INTERVAL_SECS)); + 'outer: loop { + let fut = self.process_root_context(context.clone()); + pin!(fut); + loop { + select! { + res = &mut fut => { + match res { + Ok(new_context) => { + let should_save = match new_context.stage { + RootStage::ProcessingPipelines(ref pipelines) => !pipelines.is_empty(), + _ => true + }; + context = new_context; + if should_save { + context.timestamp = Utc::now(); + self.context_store.set(context.clone()).await?; + } + }, + Err(e) => { + error!("Error processing root context: {e}"); + sleep(Duration::from_millis(RETRY_INTERVAL_MILLIS)).await; + } + } + + if context.is_complete() { + break 'outer; + } + continue 'outer; + }, + _ = interval.tick() => { + context.timestamp = Utc::now(); + self.context_store.set(context.clone()).await?; + } + } + } + } + let elapsed = Instant::now() - time; + info!("Finished processing event in {:?}", elapsed); + metrics::histogram!(EVENTS_HISTOGRAM, elapsed, STATUS_LABEL => context.status.to_string()); + Ok(context) + } + + #[tracing::instrument(skip(self, context), fields(stage = %context.stage))] + pub async fn process_root_context(&self, mut context: RootContext) -> Result { + let time: Instant = Instant::now(); + trace!("Processing root context {}", context.event_key); + let event = self.event_store.get(&context.event_key).await?; + trace!("Retrieved event {event:?}"); + + let context = match context.stage { + RootStage::New => { + debug!("Verifying event"); + let verified = self.verify_event(&event).await?; + if verified { + trace!("Event successfully verified"); + context.stage = RootStage::Verified; + } else { + warn!("Event did not verify, dropped"); + context.status = Status::Dropped { + reason: "Did not verify".to_owned(), + }; + } + context + } + RootStage::Verified => { + debug!("Fetching duplicates"); + let mut context = self.fetch_duplicates(context, event).await?; + trace!("Duplicates fetched"); + context.stage = RootStage::ProcessedDuplicates; + context + } + RootStage::ProcessedDuplicates => { + debug!("Getting pipelines"); + let pipelines = self.control_data_store.get_pipelines(&event).await?; + let pipelines: HashMap<_, _> = pipelines + .into_iter() + .map(|p| (p.key.clone(), PipelineContext::new(p.key, &context))) + .collect(); + trace!("Got {} pipelines", pipelines.len()); + context.stage = RootStage::ProcessingPipelines(pipelines); + context + } + RootStage::ProcessingPipelines(ref mut pipelines) => { + debug!("Processing pipelines"); + select_contexts!(self.process_pipeline(pipelines, pipeline_key)); + trace!("Processed pipelines"); + context.stage = RootStage::Finished; + context + } + RootStage::Finished => { + trace!("Finished root context"); + context + } + }; + let elapsed = Instant::now() - time; + trace!("Finished processing root context in {:?}", elapsed); + metrics::histogram!(STAGE_HISTOGRAM, elapsed, STAGE_LABEL => context.stage.to_string()); + Ok(context) + } + + #[tracing::instrument(skip(self, context), fields(pipeline_key = %context.pipeline_key))] + pub async fn process_pipeline(&self, mut context: PipelineContext) -> Result { + debug!("Processing pipeline"); + loop { + let time = Instant::now(); + context = self.process_pipeline_context(context).await?; + let elapsed = Instant::now() - time; + trace!("Finished processing pipeline context in {:?}", elapsed); + metrics::histogram!(STAGE_HISTOGRAM, elapsed, STAGE_LABEL => context.stage.to_string()); + let should_save = match context.stage { + PipelineStage::ExecutingExtractors(ref extractors) => !extractors.is_empty(), + PipelineStage::ExecutedExtractors(ref contexts) => !contexts.is_empty(), + PipelineStage::ExecutedTransformer(ref context) => context.is_some(), + _ => true, + }; + if should_save { + context.timestamp = Utc::now(); + self.context_store.set(context.clone()).await?; + context.transaction = None; + } + if context.is_complete() { + break; + } + } + trace!("Processed pipeline"); + Ok(context) + } + + #[tracing::instrument(skip(self, context), fields(stage = %context.stage))] + pub async fn process_pipeline_context( + &self, + mut context: PipelineContext, + ) -> Result { + trace!("Processing pipeline context"); + let pipeline = self + .control_data_store + .get_pipeline(&context.pipeline_key) + .await?; + trace!("Retrieved pipeline {pipeline:?}"); + let event = self.event_store.get(&context.event_key).await?; + + match context.stage { + PipelineStage::New => { + debug!("Getting extractors"); + let extractors: HashMap = pipeline + .middleware + .into_iter() + .filter_map(|e| match e { + Middleware::HttpExtractor(e) => { + Some((e.key.clone(), ExtractorContext::new(e.key, &context))) + } + Middleware::Transformer { .. } => None, + }) + .collect(); + trace!("Got {} extractors", extractors.len()); + if extractors.is_empty() { + context.stage = PipelineStage::ExecutedExtractors(HashMap::new()); + } else { + context.stage = PipelineStage::ExecutingExtractors(extractors); + } + Ok(context) + } + PipelineStage::ExecutingExtractors(ref mut extractors) => { + debug!("Processing extractors"); + select_contexts!(self.process_extractor(extractors, extractor_key)); + trace!("Processed extractors"); + let mut contexts = HashMap::with_capacity(extractors.len()); + for e in extractors.values() { + match e.stage { + ExtractorStage::New => { + return Ok(context); + } + ExtractorStage::FinishedExtractor(ref context) => { + contexts.insert(e.extractor_key.clone(), context.clone()); + } + } + } + context.stage = PipelineStage::ExecutedExtractors(contexts); + + Ok(context) + } + PipelineStage::ExecutedExtractors(contexts) => { + debug!("Executing transformer"); + let Some(Middleware::Transformer { code, .. }) = pipeline + .middleware + .iter() + .find(|m| matches!(m, Middleware::Transformer { .. })) + else { + trace!("Did not find transformer, sending directly to destination"); + context.stage = PipelineStage::ExecutedTransformer(if contexts.is_empty() { + None + } else { + Some(serde_json::to_value(contexts)?) + }); + return Ok(context); + }; + + let mut script = Script::from_string(code.as_str())? + .with_timeout(Duration::from_secs(TICK_INTERVAL_MILLIS)); + let value: Value = script + .call("transform", (event.clone(), contexts)) + .map_err(|e| { + error!("Failed to transform data with contexts"); + e + })?; + + trace!("Executed transformer"); + context.transaction = Some(Transaction::completed( + &event, + format!("{}::transformer", pipeline.key), + "['{{event}}', '{{context}}']".to_owned(), + value.to_string(), + )); + context.stage = PipelineStage::ExecutedTransformer(Some(value)); + Ok(context) + } + PipelineStage::ExecutedTransformer(ref value) => { + debug!("Sending to destination"); + + let retry = &pipeline.config.clone().unwrap_or_default().policies.retry; + let retry_interval = retry.get_interval().unwrap_or(Duration::from_secs(1)); + let mut interval = + tokio::time::interval(Duration::from_millis(TICK_INTERVAL_MILLIS)); + 'outer: for i in 0..retry.maximum_attempts { + let fut = self.control_data_store.send_to_destination( + &event, + &pipeline, + value.clone(), + ); + pin!(fut); + loop { + select! { + res = &mut fut => { + let tx_key = if i > 0 { + format!("{}::destination::attempt-{i}", pipeline.key) + } else { + format!("{}::destination", pipeline.key) + }; + let input = json!(["{{event}}", "{{context}}"]).to_string(); + match res { + Ok(value) => { + trace!("Sent to destination"); + context.transaction = Some(Transaction::completed( + &event, + tx_key, + input, + value, + )); + context.stage = PipelineStage::FinishedPipeline; + return Ok(context); + } + Err(e) => { + error!("Failed to send to destination: {e}"); + if i < retry.maximum_attempts - 1 { + context.transaction = Some(Transaction::failed( + &event, + tx_key, + input, + e.to_string(), + )); + context.timestamp = Utc::now(); + self.context_store.set(context.clone()).await?; + context.transaction = None; + sleep(retry_interval).await; + } else { + context.transaction = Some(Transaction::panicked( + &event, + tx_key, + input, + e.to_string(), + )); + } + continue 'outer; + } + } + }, + _ = interval.tick() => { + context.transaction = Some(Transaction::completed( + &event, + format!("{}::heartbeat-{}", pipeline.key, i + 1), + "['{{event}}', '{{context}}']".to_owned(), + "{}".to_owned(), + )); + context.timestamp = Utc::now(); + self.context_store.set(context.clone()).await?; + } + } + } + } + context.status = Status::Dropped { + reason: "Failed destination".to_string(), + }; + warn!("Failed destination"); + Ok(context) + } + PipelineStage::FinishedPipeline => { + debug!("Executed pipeline"); + Ok(context) + } + } + } + + #[tracing::instrument(skip(self, context), fields(extractor_key = %context.extractor_key))] + pub async fn process_extractor( + &self, + mut context: ExtractorContext, + ) -> Result { + trace!("Processing extractor"); + let extractor = self + .control_data_store + .get_extractor(&context.extractor_key, &context.pipeline_key) + .await?; + trace!("Retrieved extractor"); + + let retry = &extractor.policies.retry; + let retry_interval = retry.get_interval().unwrap_or(Duration::from_secs(1)); + let max_attempts = retry.maximum_attempts; + + let mut tick_interval = interval(Duration::from_millis(TICK_INTERVAL_MILLIS)); + 'outer: for i in 0..max_attempts { + let fut = self.control_data_store.execute_extractor(&extractor).fuse(); + pin!(fut); + loop { + select! { + res = &mut fut => { + let event = self.event_store.get(&context.event_key).await?; + let tx_key = if i > 0 { + format!("{}::extractor:http::attempt-{i}", extractor.key) + } else { + format!("{}::extractor:http", extractor.key) + }; + let input = json!(["{{event}}"]).to_string(); + match res { + Ok(value) => { + context.transaction = Some(Transaction::completed( + &event, + tx_key, + input, + serde_json::to_string(&value)?, + )); + context.stage = ExtractorStage::FinishedExtractor(value); + trace!("Executed extractor"); + self.context_store.set(context.clone()).await?; + trace!("Saved extractor context"); + return Ok(context); + } + Err(e) => { + if i < max_attempts - 1 { + context.transaction = Some(Transaction::failed( + &event, + tx_key, + input, + e.to_string(), + )); + context.timestamp = Utc::now(); + self.context_store.set(context.clone()).await?; + context.transaction = None; + sleep(retry_interval).await; + } else { + context.transaction = Some(Transaction::panicked( + &event, + tx_key, + input, + e.to_string(), + )); + } + continue 'outer; + } + } + }, + _ = tick_interval.tick() => { + context.transaction = None; + context.timestamp = Utc::now(); + self.context_store.set(context.clone()).await?; + } + } + } + } + + context.status = Status::Dropped { + reason: "Failed extractor".to_string(), + }; + self.context_store.set(context.clone()).await?; + warn!("Failed extractor"); + trace!("Saved failed extractor context"); + Ok(context) + } + + #[tracing::instrument(skip(self, context, _event))] + async fn fetch_duplicates(&self, context: RootContext, _event: Event) -> Result { + // Disable duplicate detection for now + // let duplicates = self.event_store.get_duplicates(&event).await?; + // let mut event = event.add_duplicates(duplicates); + // event.state = EventState::Acknowledged; + // self.event_store.set(event).await?; + Ok(context) + } + + #[tracing::instrument(skip(self, event))] + async fn verify_event(&self, event: &Event) -> Result { + self.control_data_store.verify_event(event).await + } +} diff --git a/event-core/src/event_handler.rs b/event-core/src/event_handler.rs new file mode 100644 index 00000000..7884c889 --- /dev/null +++ b/event-core/src/event_handler.rs @@ -0,0 +1,109 @@ +use crate::store::{ContextStore, ControlDataStore}; +use anyhow::{Context, Result}; +use integrationos_domain::common::{event_with_context::EventWithContext, Event, Transaction}; +use redis_retry::{AsyncCommands, Config, Redis}; +use std::{sync::Arc, time::Duration}; +use tokio::{join, sync::Mutex, time::sleep}; +use tracing::error; + +#[derive(Clone)] +pub struct EventHandler< + T: ControlDataStore + Sync + Send + 'static, + U: ContextStore + Sync + Send + 'static, +> { + config: Config, + redis: Arc>, + control_store: Arc, + context_store: Arc, +} + +impl + EventHandler +{ + pub async fn new(config: Config, control_store: Arc, context_store: Arc) -> Result { + let redis = Arc::new(Mutex::new(Redis::new(&config).await?)); + + Ok(Self { + config, + redis, + control_store, + context_store, + }) + } + + pub async fn pop_event(&self) -> Result { + loop { + { + if let Some(data) = async { + let mut conn = self.redis.lock().await; + conn.rpop::<&str, Option>>(&self.config.queue_name, None) + .await + .with_context(|| "failed to parse redis message") + } + .await? + { + let event: EventWithContext = serde_json::from_slice(&data)?; + return Ok(event); + } + } + sleep(Duration::from_millis(50)).await; + } + } + + pub async fn increment_throughput_count(&self, event: &Event) -> Result { + let connection = self + .control_store + .fetch_connection(event) + .await + .with_context(|| "Could not fetch integration")?; + let throughput = connection.throughput; + + let count: u64 = async { + let mut conn = self.redis.lock().await; + conn.hincr(&self.config.event_throughput_key, &throughput.key, 1) + .await + .with_context(|| "Could not increment throughput for integration") + } + .await?; + + Ok(count <= throughput.limit) + } + + pub async fn defer_event(&self, mut event: EventWithContext) -> Result<()> { + let count = if let Some(transaction) = event.context.transaction { + if let Some(Ok(number)) = transaction + .tx_key + .split("::throttled-") + .last() + .map(|n| n.parse::()) + { + number + 1 + } else { + 1 + } + } else { + 1 + }; + event.context.transaction = Some(Transaction::throttled( + &event.event, + format!("{}::throttled-{count}", event.event.key), + "".to_owned(), + "".to_owned(), + )); + let context_fut = self.context_store.set(event.context.clone()); + let redis_fut = async { + let serialized = serde_json::to_vec(&event) + .with_context(|| "Could not serialize event with context")?; + let mut conn = self.redis.lock().await; + + conn.lpush::<&str, &[u8], ()>(&self.config.queue_name, &serialized) + .await + .with_context(|| "Could not send channel response to queue") + }; + let (context_res, redis_res) = join!(context_fut, redis_fut); + if let Err(e) = context_res { + error!("Could not write throttle context to context store: {e}"); + } + redis_res + } +} diff --git a/event-core/src/lib.rs b/event-core/src/lib.rs new file mode 100644 index 00000000..6d178c72 --- /dev/null +++ b/event-core/src/lib.rs @@ -0,0 +1,8 @@ +pub mod config; +pub mod dispatcher; +pub mod event_handler; +pub mod metrics; +pub mod mocks; +pub mod mongo_context_store; +pub mod mongo_control_data_store; +pub mod store; diff --git a/event-core/src/main.rs b/event-core/src/main.rs new file mode 100644 index 00000000..174a1cd3 --- /dev/null +++ b/event-core/src/main.rs @@ -0,0 +1,148 @@ +use anyhow::{Context, Result}; +use dotenvy::dotenv; +use envconfig::Envconfig; +use event_core::{ + config::EventCoreConfig, + dispatcher::Dispatcher, + event_handler::EventHandler, + metrics::{CONCURRENT_EVENTS_GAUGE, CONCURRENT_EVENTS_PERCENTAGE_GAUGE}, + mongo_context_store::MongoContextStore, + mongo_control_data_store::MongoControlDataStore, +}; +use integrationos_domain::service::secrets_client::SecretsClient; +use metrics_exporter_prometheus::PrometheusBuilder; +use std::sync::Arc; +use tokio::sync::Mutex; +use tokio_condvar::Condvar; +use tracing::{error, info, metadata::LevelFilter, warn}; +use tracing_subscriber::EnvFilter; + +#[tokio::main(flavor = "multi_thread")] +async fn main() -> Result<()> { + dotenv().ok(); + + let filter = EnvFilter::builder() + .with_default_directive(LevelFilter::DEBUG.into()) + .from_env_lossy(); + tracing_subscriber::fmt().with_env_filter(filter).init(); + + let config = EventCoreConfig::init_from_env()?; + + info!("Starting event-core with config: {config}"); + + PrometheusBuilder::new() + .install() + .with_context(|| "failed to install prometheus server")?; + + metrics::describe_gauge!( + CONCURRENT_EVENTS_GAUGE, + "number of events currently being concurrently executed by this worker" + ); + + metrics::describe_gauge!( + CONCURRENT_EVENTS_PERCENTAGE_GAUGE, + "percentage of total capacity of events currently being concurrently executed by this worker" + ); + + let secrets_client = + Arc::new(SecretsClient::new(&config.secrets_config).with_context(|| { + format!( + "Could not parse secrets service config {:?}", + config.secrets_config + ) + })?); + let control_store = Arc::new( + MongoControlDataStore::new(&config, secrets_client) + .await + .with_context(|| "Could not connect to mongo db")?, + ); + + let context_store = Arc::new( + MongoContextStore::new(&config) + .await + .with_context(|| "Could not connect to context store db")?, + ); + + let dispatcher = Dispatcher { + context_store: context_store.clone(), + event_store: control_store.clone(), + control_data_store: control_store.clone(), + }; + + let event_handler = + EventHandler::new(config.redis, control_store.clone(), context_store).await?; + + info!("Listening for events on redis..."); + let sync_pair = Arc::new((Mutex::new(0u64), Condvar::new())); + + loop { + let event_with_context = event_handler.pop_event().await?; + increment_task_count(sync_pair.clone(), config.db_connection_count).await; + let sync_pair_clone = sync_pair.clone(); + let control_store = control_store.clone(); + let dispatcher = dispatcher.clone(); + let event_handler = event_handler.clone(); + tokio::spawn(async move { + match event_handler + .increment_throughput_count(&event_with_context.event) + .await + { + Ok(below_limit) => { + if !below_limit { + warn!( + "Throughput limit hit for {}, sending to back of queue", + event_with_context.event.id + ); + if let Err(e) = event_handler.defer_event(event_with_context).await { + error!("Could not send event back to redis: {e}"); + } + decrement_task_count(sync_pair_clone, config.db_connection_count).await; + return; + } + } + Err(e) => { + error!("Failed to increment throughput count: {e}"); + } + } + + control_store + .event_cache + .insert( + event_with_context.event.id, + event_with_context.event.clone(), + ) + .await; + + if let Err(e) = dispatcher.process_context(event_with_context.context).await { + error!("Could not process event: {e}"); + } + decrement_task_count(sync_pair_clone, config.db_connection_count).await; + }); + let mut task_count: tokio::sync::MutexGuard<'_, u64> = sync_pair.0.lock().await; + while *task_count >= config.db_connection_count { + task_count = sync_pair.1.wait(task_count).await; + } + } +} + +async fn increment_task_count(sync_pair: Arc<(Mutex, Condvar)>, connection_count: u64) { + let mut task_count = sync_pair.0.lock().await; + *task_count += 1; + metrics::gauge!(CONCURRENT_EVENTS_GAUGE, *task_count as f64); + metrics::gauge!( + CONCURRENT_EVENTS_PERCENTAGE_GAUGE, + *task_count as f64 / connection_count as f64 + ); + sync_pair.1.notify_one(); +} + +async fn decrement_task_count(sync_pair: Arc<(Mutex, Condvar)>, connection_count: u64) { + let mut task_count = sync_pair.0.lock().await; + *task_count -= 1; + metrics::gauge!(CONCURRENT_EVENTS_GAUGE, *task_count as f64); + metrics::gauge!( + CONCURRENT_EVENTS_PERCENTAGE_GAUGE, + *task_count as f64 / connection_count as f64 + ); + sync_pair.1.notify_one(); +} diff --git a/event-core/src/metrics.rs b/event-core/src/metrics.rs new file mode 100644 index 00000000..9745090d --- /dev/null +++ b/event-core/src/metrics.rs @@ -0,0 +1,9 @@ +// number of events currently being processed +pub const CONCURRENT_EVENTS_GAUGE: &str = "concurrent_events"; +pub const CONCURRENT_EVENTS_PERCENTAGE_GAUGE: &str = "concurrent_events_percentage"; +// counter of total events processed, bucketed into successful or dropped +pub const EVENTS_HISTOGRAM: &str = "events_processed"; +pub const STATUS_LABEL: &str = "status"; +// histogram of elapsed time per stage +pub const STAGE_HISTOGRAM: &str = "stages_processed"; +pub const STAGE_LABEL: &str = "stage"; diff --git a/event-core/src/mocks/mock_context_store.rs b/event-core/src/mocks/mock_context_store.rs new file mode 100644 index 00000000..ac5f38c1 --- /dev/null +++ b/event-core/src/mocks/mock_context_store.rs @@ -0,0 +1,56 @@ +use crate::store::ContextStore; +use anyhow::{anyhow, Result}; +use async_trait::async_trait; +use integrationos_domain::{algebra::execution::ExecutionContext, id::Id}; +use serde::{Deserialize, Serialize}; +use std::{ + collections::HashMap, + sync::{Arc, Mutex}, +}; + +type Contexts = Arc>>>>; + +#[derive(Clone, Default)] +pub struct MockContextStorage { + pub contexts: Contexts, +} + +impl MockContextStorage { + pub fn new() -> Self { + Self { + contexts: Arc::new(Mutex::new(HashMap::new())), + } + } +} + +#[async_trait] +impl ContextStore for MockContextStorage { + async fn get Deserialize<'a> + Unpin>( + &self, + context_key: &Id, + ) -> Result { + self.contexts + .lock() + .unwrap() + .get(context_key) + .map(|c| { + let last = c.last(); + last.expect("No context for {context_key}") + .downcast_ref::() + .expect("ExecutionContext could not be downcast") + .clone() + }) + .ok_or(anyhow!("No context for {context_key}")) + } + + async fn set(&self, context: T) -> Result<()> { + let context = Box::new(context); + self.contexts + .lock() + .unwrap() + .entry(*context.context_key()) + .and_modify(|v| v.push(context.clone())) + .or_insert(vec![context]); + Ok(()) + } +} diff --git a/event-core/src/mocks/mock_secret_service.rs b/event-core/src/mocks/mock_secret_service.rs new file mode 100644 index 00000000..308c27ba --- /dev/null +++ b/event-core/src/mocks/mock_secret_service.rs @@ -0,0 +1,27 @@ +use anyhow::Result; +use async_trait::async_trait; +use integrationos_domain::{ + algebra::crypto::Crypto, create_secret_response::CreateSecretResponse, + get_secret_request::GetSecretRequest, IntegrationOSError, +}; + +#[derive(Debug, Clone)] +pub struct MockSecretsClient; + +#[async_trait] +impl Crypto for MockSecretsClient { + async fn decrypt( + &self, + _secret: &GetSecretRequest, + ) -> Result { + Ok(serde_json::Value::Null) + } + + async fn encrypt( + &self, + _key: String, + _val: &serde_json::Value, + ) -> Result { + unimplemented!() + } +} diff --git a/event-core/src/mocks/mod.rs b/event-core/src/mocks/mod.rs new file mode 100644 index 00000000..7d9daac9 --- /dev/null +++ b/event-core/src/mocks/mod.rs @@ -0,0 +1,2 @@ +pub mod mock_context_store; +pub mod mock_secret_service; diff --git a/event-core/src/mongo_context_store.rs b/event-core/src/mongo_context_store.rs new file mode 100644 index 00000000..445db928 --- /dev/null +++ b/event-core/src/mongo_context_store.rs @@ -0,0 +1,52 @@ +use crate::{config::EventCoreConfig, store::ContextStore}; +use anyhow::{anyhow, Result}; +use async_trait::async_trait; +use bson::doc; +use integrationos_domain::{algebra::execution::ExecutionContext, id::Id}; +use mongodb::{Client, Database}; +use serde::{Deserialize, Serialize}; +use std::time::Instant; +use tracing::{error, trace}; + +#[derive(Debug, Clone)] +pub struct MongoContextStore { + pub db: Database, + pub collection_name: String, +} + +impl MongoContextStore { + pub async fn new(config: &EventCoreConfig) -> Result { + let client = Client::with_uri_str(&config.db.context_db_url).await?; + Ok(Self { + db: client.database(&config.db.context_db_name), + collection_name: config.db.context_collection_name.clone(), + }) + } +} + +#[async_trait] +impl ContextStore for MongoContextStore { + async fn get Deserialize<'a> + Unpin>( + &self, + context_key: &Id, + ) -> Result { + let coll = self.db.collection(&self.collection_name); + let context = coll + .find_one(doc! { "id": context_key.to_string() }, None) + .await?; + Ok(context.ok_or_else(|| anyhow!("No context found"))?) + } + + async fn set(&self, context: T) -> Result<()> { + let instant = Instant::now(); + let coll = self.db.collection(&self.collection_name); + if let Err(e) = coll.insert_one(context, None).await { + error!("ExecutionContext insertion error {e}"); + } + trace!( + "Wrote context in {}", + (Instant::now() - instant).as_millis() + ); + Ok(()) + } +} diff --git a/event-core/src/mongo_control_data_store.rs b/event-core/src/mongo_control_data_store.rs new file mode 100644 index 00000000..1e04d7e8 --- /dev/null +++ b/event-core/src/mongo_control_data_store.rs @@ -0,0 +1,443 @@ +use crate::{ + config::EventCoreConfig, + store::{ControlDataStore, EventStore}, +}; +use anyhow::{bail, Context as AnyhowContext, Result}; +use async_trait::async_trait; +use bson::{doc, SerializerOptions}; +use futures::future::join_all; +use google_token_fetcher::GoogleTokenFetcher; +use handlebars::Handlebars; +use http::header::AUTHORIZATION; +use integrationos_domain::{ + algebra::{adapter::StoreAdapter, crypto::Crypto}, + common::{ + duplicates::Duplicates, + encrypted_access_key::EncryptedAccessKey, + event_access::EventAccess, + extractor::HttpExtractor, + middleware::Middleware, + mongo::{MongoDbStore, MongoDbStoreConfig}, + Connection, Event, Pipeline, Store, + }, + id::Id, + service::unified_destination::UnifiedDestination, +}; +use moka::future::Cache; +use mongodb::{options::ClientOptions, Client}; +use reqwest::header::{HeaderMap, HeaderName, HeaderValue}; +use serde_json::{json, Value}; +use std::{collections::HashMap, sync::Arc, time::Duration}; +use tracing::{error, warn}; + +#[derive(Clone)] +pub struct MongoControlDataStore { + pub connections_store: MongoDbStore, + pub event_store: MongoDbStore, + pub event_access_store: MongoDbStore, + pub pipelines_store: MongoDbStore, + pub connections_cache: Cache, + pub event_cache: Cache, + pub event_access_cache: Cache, + pub pipelines_cache: Cache>, + pub pipeline_cache: Cache, + pub token_fetcher: Option, + pub http_client: reqwest::Client, + destination_caller: UnifiedDestination, +} + +impl MongoControlDataStore { + pub async fn new( + config: &EventCoreConfig, + secrets_client: Arc, + ) -> Result { + let mut client_options = ClientOptions::parse(&config.db.control_db_url) + .await + .with_context(|| "Could not parse control mongodb url")?; + + client_options.max_pool_size = Some(config.db_connection_count as u32); + let client = Client::with_options(client_options) + .with_context(|| "Failed to create control MongoDB client with options")?; + + let db = client.database(&config.db.control_db_name); + + let connections_store = + MongoDbStore::new(MongoDbStoreConfig::new(db.clone(), Store::Connections)).await?; + let event_access_store = + MongoDbStore::new(MongoDbStoreConfig::new(db.clone(), Store::EventAccess)).await?; + let pipelines_store = + MongoDbStore::new(MongoDbStoreConfig::new(db, Store::Pipelines)).await?; + + let mut event_client_options = ClientOptions::parse(&config.db.event_db_url) + .await + .with_context(|| "Could not parse events mongodb url")?; + + event_client_options.max_pool_size = Some(config.db_connection_count as u32); + let client = Client::with_options(event_client_options) + .with_context(|| "Failed to create events MongoDB client with options")?; + + let event_db = client.database(&config.db.event_db_name); + let event_store = MongoDbStore::new(MongoDbStoreConfig::new(event_db, Store::Events)) + .await + .with_context(|| { + format!( + "Could not connect to event db at {}", + config.db.event_db_name + ) + })?; + + Ok(Self { + connections_store, + event_store, + event_access_store, + pipelines_store, + connections_cache: Cache::builder() + .max_capacity(config.cache_size) + .time_to_live(Duration::from_secs(config.cache_ttl_secs)) + .build(), + event_cache: Cache::new(config.cache_size), + event_access_cache: Cache::builder() + .max_capacity(config.cache_size) + .time_to_live(Duration::from_secs(config.cache_ttl_secs)) + .build(), + pipelines_cache: Cache::builder() + .max_capacity(config.cache_size) + .time_to_live(Duration::from_secs(config.cache_ttl_secs)) + .build(), + pipeline_cache: Cache::builder() + .max_capacity(config.cache_size) + .time_to_live(Duration::from_secs(config.cache_ttl_secs)) + .build(), + token_fetcher: if config.fetch_google_auth_token { + Some(GoogleTokenFetcher::new()) + } else { + None + }, + http_client: reqwest::Client::new(), + destination_caller: UnifiedDestination::new( + config.db.clone(), + config.cache_size, + secrets_client, + ) + .await?, + }) + } + + async fn fetch_google_auth_token(&self, url: &str) -> Option { + let token_fetcher = &(self.token_fetcher.clone()?); + match token_fetcher.get_token(url).await { + Ok(header) => Some(header), + Err(_) => None, + } + } + + #[tracing::instrument(skip(self, event), fields(event.key = %event.key))] + pub async fn fetch_event_access(&self, event: &Event) -> Result> { + if let Some(event_access) = self.event_access_cache.get(&event.access_key).await { + return Ok(Some(event_access)); + } + + let filter = doc! { + "accessKey": &event.access_key, + "deleted": false, + }; + let Some(event_access) = self.event_access_store.get_one(filter).await.map_err(|e| { + error!("Could not query mongodb for event: {e}"); + e + })? + else { + warn!("Could not find event-access record"); + return Ok(None); + }; + self.event_access_cache + .insert(event.access_key.clone(), event_access.clone()) + .await; + Ok(Some(event_access)) + } + + #[tracing::instrument(skip(self, event), fields(event.key = %event.key))] + pub async fn fetch_pipelines(&self, event: &Event) -> Result> { + if let Some(pipelines) = self.pipelines_cache.get(&event.access_key).await { + return Ok(pipelines); + } + let filter = doc! { + "source.events": &event.name, + "source.type": &event.r#type, + "source.group": &event.group, + "active": true, + "deleted": false, + }; + + let pipelines = self + .pipelines_store + .get_many(Some(filter), None, None, None, None) + .await + .with_context(|| "Could not query mongodb for event")?; + + let futs = pipelines + .iter() + .map(|p| self.pipeline_cache.insert(p.key.clone(), p.clone())); + join_all(futs).await; + self.pipelines_cache + .insert(event.access_key.clone(), pipelines.clone()) + .await; + + Ok(pipelines) + } + + #[tracing::instrument(skip(self))] + pub async fn fetch_pipeline(&self, pipeline_id: &str) -> Result { + if let Some(pipeline) = self.pipeline_cache.get(pipeline_id).await { + return Ok(pipeline); + } + let Some(pipeline) = self + .pipelines_store + .get_one(doc! { "key": pipeline_id }) + .await + .map_err(|e| { + error!("Could not query mongodb for event: {e}"); + e + })? + else { + bail!("Pipeline does not exist"); + }; + + self.pipeline_cache + .insert(pipeline_id.to_string(), pipeline.clone()) + .await; + Ok(pipeline) + } +} + +#[async_trait] +impl ControlDataStore for MongoControlDataStore { + #[tracing::instrument(skip(self, event), fields(event.key = %event.key))] + async fn fetch_connection(&self, event: &Event) -> Result { + if let Some(connection) = self.connections_cache.get(&event.access_key).await { + return Ok(connection); + } + + let access_key = EncryptedAccessKey::parse(&event.access_key) + .with_context(|| "Event has invalid access key")?; + let filter = doc! { + format!("accessKey.{}", access_key.prefix.environment): &event.access_key, + "deleted": false, + }; + let Some(connection) = self + .connections_store + .get_one(filter) + .await + .with_context(|| "Could not query mongodb for connection")? + else { + bail!("Could not find connection for event {}", event.id); + }; + + self.connections_cache + .insert(event.access_key.clone(), connection.clone()) + .await; + + Ok(connection) + } + + #[tracing::instrument(skip(self, event), fields(event.key = %event.key))] + async fn verify_event(&self, event: &Event) -> Result { + Ok(self.fetch_event_access(event).await?.is_some()) + } + + #[tracing::instrument(skip(self, event), fields(event.key = %event.key))] + async fn get_pipelines(&self, event: &Event) -> Result> { + let pipelines = self.fetch_pipelines(event).await?; + let mut futs = Vec::with_capacity(pipelines.len()); + for p in &pipelines { + futs.push(self.pipeline_cache.insert(p.key.clone(), p.clone())); + } + join_all(futs).await; + Ok(pipelines) + } + + #[tracing::instrument(skip(self))] + async fn get_pipeline(&self, pipeline_key: &str) -> Result { + match self.pipeline_cache.get(pipeline_key).await { + Some(pipeline) => Ok(pipeline), + None => self.fetch_pipeline(pipeline_key).await, + } + } + + #[tracing::instrument(skip(self))] + async fn get_extractor( + &self, + extractor_key: &str, + pipeline_key: &str, + ) -> Result { + let pipeline = match self.pipeline_cache.get(pipeline_key).await { + Some(pipeline) => pipeline, + None => self.fetch_pipeline(pipeline_key).await?, + }; + + for e in pipeline.middleware { + if let Middleware::HttpExtractor(e) = e { + if e.key == extractor_key { + return Ok(e); + } + } + } + bail!("No extractor found") + } + + #[tracing::instrument(skip(self))] + async fn execute_extractor(&self, e: &HttpExtractor) -> Result { + let auth_token = self.fetch_google_auth_token(&e.url).await; + + // Create a handlebars registry + let handlebars = Handlebars::new(); + + let context = e.context.clone(); + + // Convert context to a HashMap + let context_map: HashMap = + serde_json::from_value(context.unwrap_or(serde_json::json!({})))?; + + // Process headers + let headers_str = handlebars.render_template(&e.headers, &context_map)?; + let headers: HashMap = serde_json::from_str(&headers_str)?; + + // Process data (body) + let data_str = handlebars.render_template(&e.data, &context_map)?; + + // Create a HeaderMap from processed headers + let mut header_map = HeaderMap::new(); + for (key, value) in headers.iter() { + let header_name = HeaderName::from_lowercase(key.as_bytes())?; + let header_value = HeaderValue::from_str(value)?; + header_map.insert(header_name, header_value); + } + + if let Some(ref token) = auth_token { + header_map.insert(AUTHORIZATION, token.try_into()?); + } + + let response = self + .http_client + .request(e.method.clone(), &e.url) + .headers(header_map) + .body(data_str) + .send() + .await?; + + if response.status().is_success() { + let mut headers = HashMap::new(); + for (k, v) in response.headers() { + let k = k.to_string(); + let v = String::from_utf8(v.as_bytes().to_vec())?; + headers.insert(k, v); + } + let response_body = response.json::().await?; + return Ok(json!({ + "headers": headers, + "data": response_body + })); + } else { + bail!(format!( + "Extractor failed: {} - {}", + response.status(), + response.text().await? + )); + } + } + + #[tracing::instrument(skip(self, pipeline), fields(pipeline.id = %pipeline.id))] + async fn send_to_destination( + &self, + event: &Event, + pipeline: &Pipeline, + context: Option, + ) -> Result { + let response = self + .destination_caller + .send_to_destination( + None, + &pipeline.destination, + event.headers.clone(), + HashMap::new(), + context.and_then(|c| serde_json::to_vec(&c).ok()), + ) + .await + .with_context(|| "Error sending event to destination")?; + + let response_string = response.text().await?; + + Ok(response_string) + } +} + +#[async_trait] +impl EventStore for MongoControlDataStore { + #[tracing::instrument(skip(self), fields(event_key = %event_key))] + async fn get(&self, event_key: &Id) -> Result { + if let Some(event) = self.event_cache.get(event_key).await { + return Ok(event); + } + + let Some(event) = self + .event_store + .get_one_by_id(event_key.to_string().as_str()) + .await + .map_err(|e| { + error!("Could not query mongodb for event: {e}"); + e + })? + else { + bail!("Could not find event"); + }; + self.event_cache.insert(*event_key, event.clone()).await; + Ok(event) + } + + #[tracing::instrument(skip(self, event), fields(event.key = %event.key))] + async fn set(&self, event: Event) -> Result<()> { + let options = SerializerOptions::builder().human_readable(false).build(); + self.event_store + .update_one( + &event.id.to_string(), + doc! { "$set": { + "duplicates": bson::to_bson_with_options(&event.duplicates, options.clone())?, + "createdAt": bson::to_bson_with_options(&event.record_metadata.created_at, options.clone())?, + "state": bson::to_bson_with_options(&event.state, options)? + } }, + ) + .await?; + self.event_cache.insert(event.id, event.clone()).await; + Ok(()) + } + + #[tracing::instrument(skip(self, event), fields(event.key = %event.key))] + async fn get_duplicates(&self, event: &Event) -> Result { + let query = doc! { + "$or": [ + { + "hashes.hash": { + "$eq": &event.hashes[0].hash, + } + }, + { + "hashes.hash": { + "$eq": &event.hashes[1].hash, + } + }, + { + "hashes.hash": { + "$eq": &event.hashes[2].hash, + } + }, + ], + "_id": { + "$ne": event.id.to_string() + } + }; + let duplicate_count = self.event_store.count(query, Some(1)).await?; + + Ok(Duplicates { + possible_collision: duplicate_count == 1, + }) + } +} diff --git a/event-core/src/store.rs b/event-core/src/store.rs new file mode 100644 index 00000000..3cdbfb7e --- /dev/null +++ b/event-core/src/store.rs @@ -0,0 +1,42 @@ +use anyhow::Result; +use async_trait::async_trait; +use integrationos_domain::{ + algebra::execution::ExecutionContext, + common::{duplicates::Duplicates, extractor::HttpExtractor, Connection, Event, Pipeline}, + id::Id, +}; +use serde::{Deserialize, Serialize}; +use serde_json::Value; + +#[async_trait] +pub trait ContextStore { + async fn get Deserialize<'a> + Unpin>( + &self, + context_key: &Id, + ) -> Result; + async fn set(&self, context: T) -> Result<()>; +} + +#[async_trait] +pub trait ControlDataStore { + async fn fetch_connection(&self, event: &Event) -> Result; + async fn verify_event(&self, event: &Event) -> Result; + async fn get_pipelines(&self, event: &Event) -> Result>; + async fn get_pipeline(&self, pipeline_key: &str) -> Result; + async fn get_extractor(&self, extractor_key: &str, pipeline_key: &str) + -> Result; + async fn execute_extractor(&self, extractor: &HttpExtractor) -> Result; + async fn send_to_destination( + &self, + event: &Event, + pipeline: &Pipeline, + context: Option, + ) -> Result; +} + +#[async_trait] +pub trait EventStore { + async fn get(&self, event_key: &Id) -> Result; + async fn set(&self, event: Event) -> Result<()>; + async fn get_duplicates(&self, event: &Event) -> Result; +} diff --git a/event-core/tests/mock_destination.rs b/event-core/tests/mock_destination.rs new file mode 100644 index 00000000..d0d19f29 --- /dev/null +++ b/event-core/tests/mock_destination.rs @@ -0,0 +1,231 @@ +use bson::SerializerOptions; +use chrono::Utc; +use envconfig::Envconfig; +use event_core::{ + config::EventCoreConfig, mongo_control_data_store::MongoControlDataStore, + store::ControlDataStore, +}; +use fake::{ + faker::{internet::en::FreeEmail, name::en::Name}, + Fake, Faker, +}; +use http::Method; +use integrationos_domain::{ + algebra::crypto::Crypto, + common::{ + api_model_config::{ApiModelConfig, AuthMethod, SamplesInput, SchemasInput}, + connection_model_definition::{ + ConnectionModelDefinition, CrudAction, PlatformInfo, TestConnection, + }, + destination::Action, + environment::Environment, + ownership::Ownership, + record_metadata::RecordMetadata, + settings::Settings, + Connection, ConnectionType, Pipeline, Throughput, + }, + create_secret_response::CreateSecretResponse, + get_secret_request::GetSecretRequest, + id::{prefix::IdPrefix, Id}, + IntegrationOSError, +}; +use mockito::Server; +use mongodb::Client; +use serde_json::{json, Value}; +use std::{collections::HashMap, str::FromStr, sync::Arc}; +use testcontainers_modules::{mongo::Mongo, testcontainers::clients::Cli as Docker}; +use uuid::Uuid; + +pub async fn seed_db(config: &EventCoreConfig, base_url: String) -> Id { + let client = Client::with_uri_str(&config.db.control_db_url) + .await + .unwrap(); + let db = client.database(&config.db.control_db_name); + let ts = Utc::now(); + let uuid = Uuid::nil(); + let event_access_id = Id::new_with_uuid(IdPrefix::EventAccess, ts, uuid); + + let stripe_model_config = ConnectionModelDefinition { + id: Id::from_str("conn_mod_def::AAAAAAAAAAA::AAAAAAAAAAAAAAAAAAAAAA").unwrap(), + platform_version: "2023-08-16".to_string(), + connection_platform: "stripe".to_string(), + connection_definition_id: Id::from_str("conn::AAAAAAAAAAA::AAAAAAAAAAAAAAAAAAAAAA") + .unwrap(), + title: "Create Stripe Customers".to_string(), + name: "customers".to_string(), + key: "api::stripe::v1::customer::create::create_customer".to_string(), + model_name: "Customers".to_string(), + action_name: Faker.fake::(), + platform_info: PlatformInfo::Api(ApiModelConfig { + base_url, + path: "customers".to_string(), + auth_method: AuthMethod::BearerToken { + value: "{{STRIPE_SECRET_KEY}}".to_string(), + }, + headers: None, + query_params: None, + schemas: SchemasInput { + headers: None, + query_params: None, + path_params: None, + body: None, + }, + content: None, + samples: SamplesInput { + headers: None, + query_params: None, + path_params: None, + body: None, + }, + responses: vec![], + paths: None, + }), + action: http::Method::POST, + extractor_config: None, + test_connection_status: TestConnection::default(), + record_metadata: Default::default(), + is_default_crud_mapping: None, + mapping: None, + }; + + db.collection("connection-model-definitions") + .insert_one( + bson::to_bson_with_options( + &stripe_model_config, + SerializerOptions::builder().human_readable(false).build(), + ) + .unwrap(), + None, + ) + .await + .unwrap(); + + let conn = Connection { + id: Id::new_with_uuid(IdPrefix::Connection, ts, uuid), + platform_version: "platformVersion".to_string(), + connection_definition_id: Id::new_with_uuid(IdPrefix::ConnectionDefinition, ts, uuid), + r#type: ConnectionType::Api { + model_configs: vec![stripe_model_config], + oauth_configs: vec![], + }, + name: "name".to_string(), + key: "key".into(), + group: "group".to_string(), + platform: "platform".to_string().into(), + environment: Environment::Live, + secrets_service_id: "secrets_service_id".to_string(), + event_access_id, + access_key: "accessKey".to_string(), + settings: Settings::default(), + throughput: Throughput { + key: "throughputKey".to_string(), + limit: 100, + }, + ownership: Ownership::default(), + oauth: None, + record_metadata: RecordMetadata::default(), + }; + + db.collection("connections") + .insert_one( + bson::to_bson_with_options( + &conn, + SerializerOptions::builder().human_readable(false).build(), + ) + .unwrap(), + None, + ) + .await + .unwrap(); + conn.id +} + +async fn get_control_store( + config: &EventCoreConfig, + secrets_client: Arc, +) -> MongoControlDataStore { + MongoControlDataStore::new(config, secrets_client) + .await + .unwrap() +} + +#[tokio::test] +async fn test_send_to_destination() { + let docker = Docker::default(); + let mongo = docker.run(Mongo); + let host_port = mongo.get_host_port_ipv4(27017); + let connection_string = format!("mongodb://127.0.0.1:{host_port}/?directConnection=true"); + + let config = EventCoreConfig::init_from_hashmap(&HashMap::from([ + ("CONTROL_DATABASE_URL".to_string(), connection_string), + ( + "CONTROL_DATABASE_NAME".to_string(), + Uuid::new_v4().to_string(), + ), + ])) + .unwrap(); + + let secret_key = "Stripe secret key"; + + let mut mock_server = Server::new_async().await; + + let mock = mock_server + .mock("POST", "/api/customers") + .match_header("Authorization", format!("Bearer {secret_key}").as_str()) + .with_status(200) + .with_body("Great success!") + .expect(1) + .create_async() + .await; + + seed_db(&config, mock_server.url() + "/api").await; + + #[derive(Clone)] + struct SecretsClient; + #[async_trait::async_trait] + impl Crypto for SecretsClient { + async fn decrypt(&self, _secret: &GetSecretRequest) -> Result { + Ok(json!({ + "STRIPE_SECRET_KEY": "Stripe secret key" + })) + } + async fn encrypt( + &self, + _key: String, + _value: &serde_json::Value, + ) -> Result { + unimplemented!() + } + } + + let store = get_control_store(&config, Arc::new(SecretsClient)).await; + + let mut pipeline: Pipeline = Faker.fake(); + pipeline.destination.connection_key = "key".into(); + pipeline.destination.platform = "stripe".into(); + pipeline.destination.action = Action::Passthrough { + method: Method::POST, + path: "customers".into(), + }; + + let event = Faker.fake(); + + let name: String = Name().fake(); + let email: String = FreeEmail().fake(); + + let result = store + .send_to_destination( + &event, + &pipeline, + Some(json!({ + "name": name, + "email": email + })), + ) + .await; + + assert!(result.is_ok()); + assert_eq!(result.unwrap(), "Great success!".to_string()); + + mock.assert_async().await; +} diff --git a/event-core/tests/mock_storage.rs b/event-core/tests/mock_storage.rs new file mode 100644 index 00000000..a3fcbf06 --- /dev/null +++ b/event-core/tests/mock_storage.rs @@ -0,0 +1,311 @@ +use std::{ + collections::HashMap, + sync::{Arc, Mutex}, +}; + +use anyhow::{anyhow, bail, Result}; +use async_trait::async_trait; +use chrono::Utc; +use event_core::{ + dispatcher::Dispatcher, + store::{ContextStore, ControlDataStore, EventStore}, +}; +use fake::{Fake, Faker}; +use integrationos_domain::{ + algebra::execution::ExecutionContext, + common::{ + duplicates::Duplicates, extractor::HttpExtractor, pipeline_context::Stage as PipelineStage, + root_context::Stage, Connection, Event, ExtractorContext, Pipeline, PipelineContext, + RootContext, + }, + id::{prefix::IdPrefix, Id}, +}; +use serde_json::Value; + +type Contexts = Arc>>>>; + +#[derive(Clone, Default)] +pub struct MockStorage { + pub contexts: Contexts, + pub pipelines: Arc>>, + pub events: Arc>>, + pub drop_at: Option, + pub fail_at: Option, + pub fail_pipeline_at: Option, +} + +impl MockStorage { + pub fn new() -> Self { + Self { + contexts: Arc::new(Mutex::new(HashMap::new())), + pipelines: Arc::new(Mutex::new(HashMap::new())), + events: Arc::new(Mutex::new(HashMap::new())), + drop_at: None, + fail_at: None, + fail_pipeline_at: None, + } + } +} + +#[async_trait] +impl ContextStore for MockStorage { + async fn get(&self, context_key: &Id) -> Result { + self.contexts + .lock() + .unwrap() + .get(context_key) + .map(|c| { + let last = c.last(); + last.expect("No context for {context_key}") + .downcast_ref::() + .expect("ExecutionContext could not be downcast") + .clone() + }) + .ok_or(anyhow!("No context for {context_key}")) + } + + async fn set(&self, context: T) -> Result<()> { + let context = Box::new(context); + self.contexts + .lock() + .unwrap() + .entry(*context.context_key()) + .and_modify(|v| v.push(context.clone())) + .or_insert(vec![context]); + Ok(()) + } +} + +macro_rules! fail_at { + ($fail_at:expr, $stage:pat, $message:expr) => { + if matches!($fail_at, $stage) { + bail!($message) + } + }; +} + +#[async_trait] +impl ControlDataStore for MockStorage { + async fn fetch_connection(&self, _event: &Event) -> Result { + unimplemented!() + } + + async fn verify_event(&self, _event: &Event) -> Result { + fail_at!( + self.fail_at, + Some(Stage::ProcessedDuplicates), + "Failed to fetch event" + ); + Ok(self.drop_at != Some(Stage::ProcessedDuplicates)) + } + + async fn get_pipelines(&self, _event: &Event) -> Result> { + fail_at!( + self.fail_at, + Some(Stage::Verified), + "Failed to get pipelines" + ); + Ok(self.pipelines.lock().unwrap().values().cloned().collect()) + } + + async fn get_pipeline(&self, pipeline_key: &str) -> Result { + fail_at!( + self.fail_pipeline_at, + Some(PipelineStage::New), + "Failed to get pipeline" + ); + self.pipelines + .lock() + .unwrap() + .get(pipeline_key) + .ok_or(anyhow!("Could not find pipeline for key {pipeline_key}")) + .cloned() + } + + async fn get_extractor( + &self, + _extractor_key: &str, + _pipeline_key: &str, + ) -> Result { + fail_at!( + self.fail_pipeline_at, + Some(PipelineStage::ExecutingExtractors(..)), + "Failed to get extractor" + ); + unimplemented!() + } + + async fn execute_extractor(&self, _extractor: &HttpExtractor) -> Result { + fail_at!( + self.fail_pipeline_at, + Some(PipelineStage::ExecutingExtractors(..)), + "Failed to get extractor" + ); + unimplemented!() + } + + async fn send_to_destination( + &self, + _event: &Event, + _pipeline: &Pipeline, + _context: Option, + ) -> Result { + Ok("{}".to_string()) + } +} + +#[async_trait] +impl EventStore for MockStorage { + async fn get(&self, event_key: &Id) -> Result { + self.events + .lock() + .unwrap() + .get(event_key) + .ok_or(anyhow!("Could not find event with key {event_key}")) + .cloned() + } + + async fn set(&self, event: Event) -> Result<()> { + self.events.lock().unwrap().insert(event.key, event); + Ok(()) + } + + async fn get_duplicates(&self, _event: &Event) -> Result { + Ok(Duplicates { + possible_collision: true, + }) + } +} + +#[tokio::test] +async fn get_and_set_contexts_downcasting_works() { + let store = MockStorage::default(); + let id = Id::new(IdPrefix::Event, Utc::now()); + + let context = RootContext::new(id); + ContextStore::set(&store, context.clone()).await.unwrap(); + assert_eq!( + context, + ContextStore::get(&store, context.context_key()) + .await + .unwrap() + ); + + let id = Id::new(IdPrefix::Event, Utc::now()); + let context = PipelineContext::new(id.to_string(), &context); + ContextStore::set(&store, context.clone()).await.unwrap(); + assert_eq!( + context, + ContextStore::get(&store, context.context_key()) + .await + .unwrap() + ); + + let id = Id::new(IdPrefix::Event, Utc::now()); + let context = ExtractorContext::new(id.to_string(), &context); + ContextStore::set(&store, context.clone()).await.unwrap(); + assert_eq!( + context, + ContextStore::get(&store, context.context_key()) + .await + .unwrap() + ); +} + +impl MockStorage { + fn get_at(&self, index: usize) -> T { + let c = self.contexts.lock().unwrap(); + let c = c.values().flatten().collect::>(); + + let last = c.get(index); + last.expect("No context for {context_key}") + .downcast_ref::() + .expect("ExecutionContext could not be downcast") + .clone() + } +} + +#[tokio::test] +#[ignore] +async fn run_dispatcher() { + let mut event: Event = Faker.fake(); + event.access_key = "id_live_1_abcd".to_owned(); + let store = Arc::new(MockStorage::new()); + store.events.lock().unwrap().insert(event.id, event.clone()); + + let pipeline: Pipeline = Faker.fake(); + store + .pipelines + .lock() + .unwrap() + .insert(pipeline.id.clone(), pipeline.clone()); + + let dispatcher = Dispatcher { + context_store: store.clone(), + event_store: store.clone(), + control_data_store: store.clone(), + }; + + let context = RootContext::new(event.id); + let res = dispatcher.process_context(context).await; + assert!(res.is_ok()); + + let context = RootContext::new(event.id); + let res = dispatcher.process_root_context(context).await; + + assert!(res.is_ok()); + + macro_rules! root_context { + ($stage:expr) => {{ + let mut context = RootContext::new(event.id.clone()); + context.stage = $stage; + context + }}; + } + + macro_rules! pipeline_context { + ($stage:expr) => {{ + let context = RootContext::new(event.id.clone()); + let mut context = PipelineContext::new(pipeline.id.clone(), &context); + context.stage = $stage; + context + }}; + } + + for i in 0..7 { + match i { + 0 => assert_eq!(root_context!(Stage::Verified), store.get_at(i)), + 1 => assert_eq!(root_context!(Stage::ProcessedDuplicates), store.get_at(i)), + 2 => { + let mut map = HashMap::new(); + map.insert(pipeline.id.clone(), pipeline_context!(PipelineStage::New)); + assert_eq!( + root_context!(Stage::ProcessingPipelines(map)), + store.get_at(i) + ); + } + 3 => { + let map = HashMap::new(); + assert_eq!( + pipeline_context!(PipelineStage::ExecutingExtractors(map)), + store.get_at(i) + ); + } + 4 => assert_eq!( + pipeline_context!(PipelineStage::ExecutedExtractors(HashMap::new())), + store.get_at(i) + ), + 5 => assert_eq!( + pipeline_context!(PipelineStage::ExecutedTransformer(None)), + store.get_at(i) + ), + 6 => assert_eq!( + pipeline_context!(PipelineStage::FinishedPipeline), + store.get_at(i) + ), + _ => { + panic!("We should not have this many") + } + } + } +} diff --git a/gateway/Cargo.toml b/gateway/Cargo.toml new file mode 100644 index 00000000..eb071429 --- /dev/null +++ b/gateway/Cargo.toml @@ -0,0 +1,33 @@ +[package] +name = "gateway" +version = "0.1.0" +edition = "2021" + +[dependencies] +anyhow.workspace = true +async-trait.workspace = true +axum-macros.workspace = true +axum-prometheus = "0.4.0" +axum.workspace = true +dotenvy.workspace = true +envconfig.workspace = true +http.workspace = true +http-serde-ext.workspace = true +integrationos-domain.workspace = true +moka.workspace = true +mongodb.workspace = true +redis-retry = { path = "../redis-retry" } +serde.workspace = true +serde_json.workspace = true +tokio.workspace = true +tower-http.workspace = true +tracing-subscriber.workspace = true +tracing.workspace = true + +[dev-dependencies] +criterion = { version = "0.5.1", features = ["async_tokio"] } +tower = { version = "0.4", features = ["util"] } + +[[bench]] +name = "bench" +harness = false diff --git a/gateway/README.md b/gateway/README.md new file mode 100644 index 00000000..0ea59ee0 --- /dev/null +++ b/gateway/README.md @@ -0,0 +1,35 @@ +# Event Gateway + +Receives events by POSTing to the `/emit/:access_key` endpoint. Validates the access key and then stores the event in mongodb and transmits it over redis. + +## Dependencies + +Requires redis to send events to the [event-core](../event-core). + +```bash +$ docker run -p 6379:6379 redis +``` + +Requires mongodb. + +```bash +$ docker run -p 27017:27017 mongo +``` + +Connecting to an external mongodb requires setting some environment variables in your `.env` file depending on which db you want to use. + +`"EVENT_DATABASE_URL"` and `"EVENT_DATABASE_NAME"` are for the db which stores events. + +## Running + +```bash +$ cargo run +``` + +By default this will log everything, including dependencies, at the `DEBUG` level. To do more granular filtering, you can set the `RUST_LOG` environment variable in the `.env` file or in the command line such as: + +```bash +$ RUST_LOG=gateway=info cargo run +``` + +which will output logs from only this crate at the `INFO` level. diff --git a/gateway/benches/bench.rs b/gateway/benches/bench.rs new file mode 100644 index 00000000..c838c8e6 --- /dev/null +++ b/gateway/benches/bench.rs @@ -0,0 +1,73 @@ +use axum::{body::Bytes, http::HeaderMap}; +use criterion::{criterion_group, criterion_main, Criterion}; +use gateway::{config::Config, mock_finalizer::MockFinalizer, server::Server, state::AppState}; +use integrationos_domain::common::{ + encrypted_access_key::EncryptedAccessKey, encrypted_data::PASSWORD_LENGTH, AccessKey, Event, +}; +use std::{collections::HashMap, hint::black_box, sync::Arc}; +use tokio::runtime::Builder; + +const KEY: &str = "id_test_1_Q71YUIZydcgSwJQNOUCHhaTMqmIvslIafF5LluORJfJKydMGELHtYe_ydtBIrVuomEnOZ4jfZQgtkqWxtG-s7vhbyir4kNjLyHKyDyh1SDubBMlhSI7Mq-M5RVtwnwFqZiOeUkIgHJFgcGQn0Plb1AkAAAAAAAAAAAAAAAAAAAAAAMwWY_9_oDOV75noniBViOVmVPUQqzcW8G3P8nuUD6Q"; +const PASSWORD: &[u8; PASSWORD_LENGTH] = b"32KFFT_i4UpkJmyPwY2TGzgHpxfXs7zS"; + +fn create_event_benchmark(c: &mut Criterion) { + c.bench_function("create and serialize event", |b| { + let key = AccessKey::parse_str(KEY, PASSWORD).unwrap(); + let body = "hello world".to_owned(); + b.iter(|| { + let event = black_box(Event::new( + black_box(&key), + black_box(&EncryptedAccessKey::parse(KEY).unwrap()), + black_box("event.received"), + black_box(HeaderMap::default()), + black_box(body.clone()), + )); + let _ = black_box(serde_json::to_string(black_box(&event)).unwrap()); + }) + }); +} + +async fn handler( + encrypted_access_key: EncryptedAccessKey<'_>, + payload: Bytes, + query: Option>, + headers: HeaderMap, + state: Arc, +) { + let _ = black_box( + Server::handle_event( + black_box(encrypted_access_key), + black_box(payload), + black_box(query), + black_box(headers), + black_box(state), + ) + .await, + ) + .unwrap(); +} + +fn response_benchmark(c: &mut Criterion) { + c.bench_function("respond to emit", |b| { + let rt = Builder::new_current_thread() + .enable_all() + .build() + .expect("Creating runtime failed"); + let access_key = EncryptedAccessKey::parse(KEY).unwrap(); + let config = Config::default(); + let state = Arc::new(AppState::new(config, Arc::new(MockFinalizer))); + let payload = Bytes::from_static(b"{\"foo\":\"bar\",\"baz\":\"qux\"}"); + b.to_async(rt).iter(|| { + handler( + access_key.clone(), + payload.clone(), + None, + HeaderMap::default(), + state.clone(), + ) + }) + }); +} + +criterion_group!(benches, create_event_benchmark, response_benchmark); +criterion_main!(benches); diff --git a/gateway/src/config.rs b/gateway/src/config.rs new file mode 100644 index 00000000..ae89ee39 --- /dev/null +++ b/gateway/src/config.rs @@ -0,0 +1,96 @@ +use envconfig::Envconfig; +use integrationos_domain::common::{database::DatabaseConfig, environment::Environment}; +use redis_retry::Config as RedisConfig; +use std::{ + fmt::{Display, Formatter}, + net::SocketAddr, +}; + +#[derive(Envconfig, Clone)] // Intentionally no Debug so secret is not printed +pub struct Config { + #[envconfig(from = "SERVER_ADDRESS", default = "0.0.0.0:3000")] + pub address: SocketAddr, + #[envconfig(from = "CACHE_SIZE", default = "10000")] + pub cache_size: u64, + #[envconfig(from = "SECRET", default = "32KFFT_i4UpkJmyPwY2TGzgHpxfXs7zS")] + pub secret_key: String, + #[envconfig(from = "ENVIRONMENT", default = "live")] + pub environment: Environment, + #[envconfig(nested = true)] + pub redis: RedisConfig, + #[envconfig(nested = true)] + pub db: DatabaseConfig, +} + +impl Config { + pub fn new() -> Self { + Self::default() + } +} + +impl Display for Config { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + writeln!(f, "SERVER_ADDRESS: {}", self.address)?; + writeln!(f, "CACHE_SIZE: {}", self.cache_size)?; + writeln!(f, "SECRET: ****")?; + writeln!(f, "ENVIRONMENT: {}", self.environment)?; + writeln!(f, "{}", self.redis)?; + writeln!(f, "{}", self.db) + } +} + +impl Default for Config { + fn default() -> Self { + Self { + address: "0.0.0.0:3000".parse().unwrap(), + cache_size: 10_000, + secret_key: "32KFFT_i4UpkJmyPwY2TGzgHpxfXs7zS".to_owned(), + environment: Environment::Test, + redis: RedisConfig::default(), + db: DatabaseConfig::default(), + } + } +} + +#[cfg(test)] + +mod tests { + use super::*; + + #[test] + fn test_config() { + let config = Config::new(); + assert_eq!(config.address, "0.0.0.0:3000".parse().unwrap()); + assert_eq!(config.cache_size, 10_000); + assert_eq!(config.secret_key, "32KFFT_i4UpkJmyPwY2TGzgHpxfXs7zS"); + assert_eq!(config.environment, Environment::Test); + assert_eq!(config.redis.url, "redis://localhost:6379"); + assert_eq!(config.redis.queue_name, "events"); + assert_eq!(config.redis.event_throughput_key, "event_throughput"); + assert_eq!(config.db.event_db_url, "mongodb://localhost:27017"); + assert_eq!(config.db.event_db_name, "database"); + assert_eq!(config.db.control_db_url, "mongodb://localhost:27017"); + assert_eq!(config.db.control_db_name, "database"); + assert_eq!(config.db.context_db_url, "mongodb://localhost:27017"); + assert_eq!(config.db.context_db_name, "database"); + assert_eq!(config.db.context_collection_name, "event-transactions"); + } + + #[test] + fn test_config_display() { + let config = Config::new(); + let mut display = r"SERVER_ADDRESS: 0.0.0.0:3000 +CACHE_SIZE: 10000 +SECRET: **** +ENVIRONMENT: test +" + .to_string(); + + display += &config.redis.to_string(); + display += "\n"; + display += &config.db.to_string(); + display += "\n"; + + assert_eq!(config.to_string(), display); + } +} diff --git a/gateway/src/event_request.rs b/gateway/src/event_request.rs new file mode 100644 index 00000000..6ebca26e --- /dev/null +++ b/gateway/src/event_request.rs @@ -0,0 +1,8 @@ +use serde::{Deserialize, Serialize}; +use serde_json::Value; + +#[derive(Debug, Deserialize, Serialize)] +pub struct EventRequest { + pub event: String, + pub payload: Value, +} diff --git a/gateway/src/finalize_event.rs b/gateway/src/finalize_event.rs new file mode 100644 index 00000000..d74d94c7 --- /dev/null +++ b/gateway/src/finalize_event.rs @@ -0,0 +1,12 @@ +use async_trait::async_trait; +use integrationos_domain::common::{encrypted_access_key::EncryptedAccessKey, Event}; + +#[async_trait] +pub trait FinalizeEvent { + async fn finalize_event( + &self, + event: &Event, + event_name: &str, + access_key: &EncryptedAccessKey, + ) -> Result; +} diff --git a/gateway/src/finalizer.rs b/gateway/src/finalizer.rs new file mode 100644 index 00000000..d72fb2de --- /dev/null +++ b/gateway/src/finalizer.rs @@ -0,0 +1,96 @@ +use crate::{config::Config, finalize_event::FinalizeEvent}; +use anyhow::{bail, Context, Result}; +use async_trait::async_trait; +use integrationos_domain::{ + algebra::adapter::StoreAdapter, + common::{ + encrypted_access_key::EncryptedAccessKey, + event_with_context::EventWithContext, + mongo::{MongoDbStore, MongoDbStoreConfig}, + Event, RootContext, Store, + }, +}; +use mongodb::Collection; +use redis_retry::{AsyncCommands, Redis}; +use std::sync::Arc; +use tokio::sync::Mutex; +use tracing::{debug, error}; + +pub struct Finalizer { + redis: Arc>, + context_collection: Collection, + event_store: MongoDbStore, + queue_name: String, +} + +impl Finalizer { + pub async fn new(config: Config) -> Result { + let redis = Redis::new_with_retry_count(&config.redis, 2).await?; + + let context_mongo_client = mongodb::Client::with_uri_str(config.db.context_db_url) + .await + .with_context(|| "Could not connect to context mongodb")?; + let context_db = context_mongo_client.database(&config.db.context_db_name); + let context_collection = context_db.collection(&config.db.context_collection_name); + + let mongo = mongodb::Client::with_uri_str(config.db.event_db_url) + .await + .with_context(|| "Could not connect to mongodb")?; + let mongo = mongo.database(&config.db.event_db_name); + let event_store = MongoDbStore::new(MongoDbStoreConfig::new(mongo, Store::Events)) + .await + .with_context(|| { + format!( + "Could not connect to event db at {}", + config.db.event_db_name + ) + })?; + Ok(Self { + redis: Arc::new(Mutex::new(redis)), + context_collection, + event_store, + queue_name: config.redis.queue_name, + }) + } +} + +#[async_trait] +impl FinalizeEvent for Finalizer { + async fn finalize_event( + &self, + event: &Event, + _event_name: &str, + _access_key: &EncryptedAccessKey, + ) -> Result { + match self.event_store.create_one(event).await { + Err(e) => { + error!("Failed to save event: {e}"); + bail!(e); + } + Ok(r) => { + debug!("Inserted event {event:?} => result for insertion {r:?}"); + } + } + let context = RootContext::new(event.id); + match self.context_collection.insert_one(&context, None).await { + Err(e) => { + error!("Failed to save event context: {e}"); + bail!(e); + } + Ok(r) => { + debug!("Inserted event context {context:?} => result for insertion {r:?}"); + } + } + + let msg = EventWithContext::new(event.clone(), context); + let msg: Vec = serde_json::to_vec(&msg)?; + let mut conn = self.redis.lock().await; + match conn.lpush(&self.queue_name, &msg).await { + Ok(()) => Ok("Sent on redis".to_string()), + Err(e) => { + error!("Could not publish to redis: {e}"); + bail!(e); + } + } + } +} diff --git a/gateway/src/lib.rs b/gateway/src/lib.rs new file mode 100644 index 00000000..34b6e6e4 --- /dev/null +++ b/gateway/src/lib.rs @@ -0,0 +1,8 @@ +pub mod config; +pub mod event_request; +pub mod finalize_event; +pub mod finalizer; +pub mod mock_finalizer; +pub mod server; +pub mod state; +pub mod util; diff --git a/gateway/src/main.rs b/gateway/src/main.rs new file mode 100644 index 00000000..5e38e032 --- /dev/null +++ b/gateway/src/main.rs @@ -0,0 +1,37 @@ +use anyhow::Result; +use dotenvy::dotenv; +use envconfig::Envconfig; +use gateway::finalizer::Finalizer; +use gateway::{config::Config, server::Server}; +use integrationos_domain::common::encrypted_data::PASSWORD_LENGTH; +use tracing::info; +use tracing::metadata::LevelFilter; +use tracing_subscriber::EnvFilter; + +#[tokio::main(flavor = "multi_thread")] +async fn main() -> Result<()> { + dotenv().ok(); + + let filter = EnvFilter::builder() + .with_default_directive(LevelFilter::DEBUG.into()) + .from_env_lossy(); + tracing_subscriber::fmt().with_env_filter(filter).init(); + + let config = Config::init_from_env()?; + if config.secret_key.len() != PASSWORD_LENGTH { + panic!( + "Secret key must be {PASSWORD_LENGTH} characters long, provided key is {} characters long", + config.secret_key.len() + ); + } + + info!("Starting gateway with config: {config}"); + + let finalizer = Finalizer::new(config.clone()).await?; + + let server = Server::new(config, finalizer); + + server.run().await?; + + Ok(()) +} diff --git a/gateway/src/mock_finalizer.rs b/gateway/src/mock_finalizer.rs new file mode 100644 index 00000000..60b1c2c7 --- /dev/null +++ b/gateway/src/mock_finalizer.rs @@ -0,0 +1,17 @@ +use super::finalize_event::FinalizeEvent; +use async_trait::async_trait; +use integrationos_domain::common::{encrypted_access_key::EncryptedAccessKey, Event}; + +pub struct MockFinalizer; + +#[async_trait] +impl FinalizeEvent for MockFinalizer { + async fn finalize_event( + &self, + _event: &Event, + _event_name: &str, + _access_key: &EncryptedAccessKey, + ) -> Result { + Ok("sent".to_owned()) + } +} diff --git a/gateway/src/server.rs b/gateway/src/server.rs new file mode 100644 index 00000000..6f9967ed --- /dev/null +++ b/gateway/src/server.rs @@ -0,0 +1,373 @@ +use crate::{ + config::Config, event_request::EventRequest, finalize_event::FinalizeEvent, + mock_finalizer::MockFinalizer, state::AppState, util::get_value_from_path, +}; +use anyhow::{anyhow, Result}; +use axum::{ + body::Bytes, + extract::{Path, Query, State}, + http::{HeaderMap, HeaderName, StatusCode}, + routing::{get, post}, + Json, Router, +}; +use axum_prometheus::PrometheusMetricLayer; +use integrationos_domain::common::{ + encrypted_access_key::EncryptedAccessKey, event_response::EventResponse, event_type::EventType, + AccessKey, Event, +}; +use std::{collections::HashMap, iter::once, sync::Arc}; +use tower_http::{ + cors::{Any, CorsLayer}, + sensitive_headers::SetSensitiveRequestHeadersLayer, + trace::TraceLayer, +}; +use tracing::{error, info, warn}; + +const HEADER_STR: &str = "x-buildable-secret"; +const INVALID_ACCESS_KEY_ERROR: (StatusCode, &str) = + (StatusCode::BAD_REQUEST, "Invalid access key"); +const MISSING_HEADER_ERROR: (StatusCode, &str) = + (StatusCode::BAD_REQUEST, "Missing x-buildable-secret header"); + +#[derive(Clone)] +pub struct Server { + config: Config, + finalizer: Arc, +} + +impl Default for Server { + fn default() -> Self { + Self { + config: Config::default(), + finalizer: Arc::new(MockFinalizer), + } + } +} + +impl Server { + pub fn new(config: Config, finalizer: impl FinalizeEvent + Sync + Send + 'static) -> Self { + Self { + config, + finalizer: Arc::new(finalizer), + } + } + + pub async fn run(&self) -> Result<()> { + let app = self.get_router(); + info!("Gateway server listening on {}", self.config.address); + axum::Server::bind(&self.config.address) + .serve(app.into_make_service()) + .await + .map_err(|e| anyhow!("Server error: {}", e)) + } + + fn get_router(&self) -> Router { + let state = Arc::new(AppState::new(self.config.clone(), self.finalizer.clone())); + let mut router = Router::new() + .route("/emit", post(post_event_sk)) + .route("/emit/:id", post(post_event_id)) + .layer(SetSensitiveRequestHeadersLayer::new(once( + HeaderName::from_lowercase(HEADER_STR.as_bytes()).unwrap(), + ))) + .layer(TraceLayer::new_for_http()) + .route("/", get(get_root)) + .layer(CorsLayer::new().allow_origin(Any)) + .with_state(state); + + if !cfg!(test) { + let (prometheus_layer, metric_handle) = PrometheusMetricLayer::pair(); + router = router + .route("/metrics", get(|| async move { metric_handle.render() })) + .layer(prometheus_layer); + } + router + } + + pub async fn handle_event( + encrypted_access_key: EncryptedAccessKey<'_>, + payload: Bytes, + query: Option>, + headers: HeaderMap, + state: Arc, + ) -> Result, (StatusCode, &'static str)> { + if encrypted_access_key.prefix.environment != state.config.environment { + warn!("Identifier is wrong environment"); + return Err(INVALID_ACCESS_KEY_ERROR); + }; + + let encrypted_access_key = encrypted_access_key.to_static(); + let access_key = if let Some(access_key) = state.cache.get(&encrypted_access_key).await { + access_key + } else if let Ok(access_key) = + AccessKey::parse(&encrypted_access_key, &state.get_secret_key()) + { + state + .cache + .insert(encrypted_access_key.clone().to_static(), access_key.clone()) + .await; + access_key + } else { + warn!("Identifier cannot be decrypted"); + return Err(INVALID_ACCESS_KEY_ERROR); + }; + + let (name, payload) = if access_key.prefix.event_type == EventType::SecretKey { + let payload = match serde_json::from_slice::(&payload) { + Ok(payload) => payload, + Err(e) => { + warn!("Failed to deserialize payload: {e:?}"); + return Err((StatusCode::BAD_REQUEST, "Failed to deserialize payload")); + } + }; + (payload.event, payload.payload.to_string()) + } else { + let name = get_value_from_path( + &mut access_key.data.event_path.clone(), + &headers, + &payload, + &query, + ) + .unwrap_or("null".to_string()); + let payload = match String::from_utf8(payload.to_vec()) { + Ok(payload) => payload, + Err(e) => { + warn!("Failed to deserialize payload: {e:?}"); + return Err((StatusCode::BAD_REQUEST, "Failed to deserialize payload")); + } + }; + (name, payload) + }; + + let event = Event::new(&access_key, &encrypted_access_key, &name, headers, payload); + + match state + .finalizer + .finalize_event(&event, &name, &encrypted_access_key) + .await + { + Ok(_) => Ok(Json(EventResponse::new(event))), + Err(e) => { + error!("Failed to finalize event: {e:?}"); + Err(( + StatusCode::INTERNAL_SERVER_ERROR, + "Failed to acknowledge event", + )) + } + } + } +} + +#[axum_macros::debug_handler] +async fn post_event_sk( + headers: HeaderMap, + State(state): State>, + body: Bytes, +) -> Result, (StatusCode, &'static str)> { + let Some(identifier) = headers.get(HEADER_STR) else { + return Err(MISSING_HEADER_ERROR); + }; + + let Ok(identifier) = identifier.to_str() else { + warn!("Could not convert identifier to string"); + return Err(INVALID_ACCESS_KEY_ERROR); + }; + + let encrypted_key = match EncryptedAccessKey::parse(identifier) { + Ok(e) => e, + Err(e) => { + warn!("Could not parse identifier: {e}"); + return Err(INVALID_ACCESS_KEY_ERROR); + } + }; + + if encrypted_key.prefix.event_type != EventType::SecretKey { + warn!("Identifier is not type \"secret key\""); + return Err(INVALID_ACCESS_KEY_ERROR); + } + + Server::handle_event(encrypted_key, body, None, headers.clone(), state).await +} + +#[axum_macros::debug_handler] +async fn post_event_id( + headers: HeaderMap, + query: Option>>, + Path(identifier): Path, + State(state): State>, + body: Bytes, +) -> Result, (StatusCode, &'static str)> { + let encrypted_key = match EncryptedAccessKey::parse(&identifier) { + Ok(e) => e, + Err(e) => { + warn!("Could not parse identifier: {e}"); + return Err(INVALID_ACCESS_KEY_ERROR); + } + }; + + if encrypted_key.prefix.event_type != EventType::Id { + warn!("Identifier is not type \"id\""); + return Err(INVALID_ACCESS_KEY_ERROR); + } + + let query = query.map(|q| q.0); + + Server::handle_event(encrypted_key, body, query, headers, state).await +} + +async fn get_root() {} + +#[cfg(test)] +mod tests { + use axum::{ + body::{Body, HttpBody}, + http::{header::CONTENT_TYPE, Method, Request, StatusCode}, + }; + use integrationos_domain::common::{ + event_state::EventState, + hashes::{HashType, HashValue}, + }; + use tower::ServiceExt; + + use super::*; + + const VALID_ID_KEY: &str = "id_test_1_Q71YUIZydcgSwJQNOUCHhaTMqmIvslIafF5LluORJfJKydMGELHtYe_ydtBIrVuomEnOZ4jfZQgtkqWxtG-s7vhbyir4kNjLyHKyDyh1SDubBMlhSI7Mq-M5RVtwnwFqZiOeUkIgHJFgcGQn0Plb1AkAAAAAAAAAAAAAAAAAAAAAAMwWY_9_oDOV75noniBViOVmVPUQqzcW8G3P8nuUD6Q"; + const VALID_SK_KEY: &str = "sk_test_1_Q71YUIZydcgSwJQNOUCHhaTMqmIvslIafF5LluORJfJKydMGELHtYe_ydtBIrVuomEnOZ4jfZQgtkqWxtG-s7vhbyir4kNjLyHKyDyh1SDubBMlhSI7Mq-M5RVtwnwFqZiOeUkIgHJFgcGQn0Plb1AkAAAAAAAAAAAAAAAAAAAAAAMwWY_9_oDOV75noniBViOVmVPUQqzcW8G3P8nuUD6Q"; + + #[tokio::test] + async fn test_emit_id() { + let router = Server::default().get_router(); + let response = router + .oneshot( + Request::builder() + .uri(format!("/emit/{VALID_ID_KEY}")) + .header(CONTENT_TYPE, "application/json") + .method(Method::POST) + .body(Body::from("{\"foo\": \"bar\"}")) + .unwrap(), + ) + .await + .unwrap(); + + assert_eq!(response.status(), StatusCode::OK); + let body = response.into_body().data().await.unwrap().unwrap(); + let resp = serde_json::from_slice::(&body).unwrap(); + assert_eq!(resp.status, EventState::Acknowledged); + assert_eq!( + resp.hashes, + [ + HashValue { + r#type: HashType::Body, + hash: "22c11adbd1e780c95a6840ea76c1d6727aba620cd41474c712129d1b22f5ea71" + .to_owned(), + }, + HashValue { + r#type: HashType::Event, + hash: "40c5dcdc28d5bdad5346a822218b0ef0ef996427e411849d1c7f4df205b27060" + .to_owned(), + }, + HashValue { + r#type: HashType::ModelBody, + hash: "848be1959192dc863543bd71428dad257c341e34b640d80ac9a4692b06e93db7" + .to_owned(), + }, + ] + ); + } + + #[tokio::test] + async fn test_invalid_emit_id() { + let router = Server::default().get_router(); + let response = router + .oneshot( + Request::builder() + .uri(format!("/emit/{VALID_SK_KEY}")) + .header(CONTENT_TYPE, "application/json") + .method(Method::POST) + .body(Body::from("{\"foo\": \"bar\"}")) + .unwrap(), + ) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::BAD_REQUEST); + let body = response.into_body().data().await.unwrap().unwrap(); + assert_eq!(body, "Invalid access key"); + } + + #[tokio::test] + async fn test_emit_sk() { + let router = Server::default().get_router(); + let response = router + .oneshot( + Request::builder() + .uri("/emit") + .header(CONTENT_TYPE, "application/json") + .header(HEADER_STR, VALID_SK_KEY) + .method(Method::POST) + .body(Body::from("{\"event\": \"foo\", \"payload\": \"bar\"}")) + .unwrap(), + ) + .await + .unwrap(); + + assert_eq!(response.status(), StatusCode::OK); + let body = response.into_body().data().await.unwrap().unwrap(); + let resp = serde_json::from_slice::(&body).unwrap(); + assert_eq!(resp.status, EventState::Acknowledged); + assert_eq!( + resp.hashes, + [ + HashValue { + r#type: HashType::Body, + hash: "2fe2fcbf5698e1ede94a12436044883d964c9d36ba32bee0d6ef69bd9e83bbad" + .to_owned(), + }, + HashValue { + r#type: HashType::Event, + hash: "be1eef101bdd5dd790d31e23da5b41c551647bf82ffa6baf0c32b73b34a7a6f9" + .to_owned(), + }, + HashValue { + r#type: HashType::ModelBody, + hash: "e63f132f3658d07b1b0de65d1c71bc95ec65e950b72d7743033f0450ff1e3bb6" + .to_owned(), + }, + ] + ); + } + + #[tokio::test] + async fn test_invalid_emit_sk() { + let router = Server::default().get_router(); + let response = router + .oneshot( + Request::builder() + .uri("/emit/123") + .header(CONTENT_TYPE, "application/json") + .header(HEADER_STR, VALID_ID_KEY) + .method(Method::POST) + .body(Body::from("{\"event\": \"foo\", \"payload\": \"bar\"}")) + .unwrap(), + ) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::BAD_REQUEST); + let body = response.into_body().data().await.unwrap().unwrap(); + assert_eq!(body, "Invalid access key"); + } + + #[tokio::test] + async fn test_root_returns_ok() { + let router = Server::default().get_router(); + let response = router + .oneshot( + Request::builder() + .uri("/") + .method(Method::GET) + .body(Body::empty()) + .unwrap(), + ) + .await + .unwrap(); + assert_eq!(response.status(), StatusCode::OK); + } +} diff --git a/gateway/src/state.rs b/gateway/src/state.rs new file mode 100644 index 00000000..9d55c15f --- /dev/null +++ b/gateway/src/state.rs @@ -0,0 +1,30 @@ +use super::finalize_event::FinalizeEvent; +use crate::config::Config; +use integrationos_domain::common::{ + encrypted_access_key::EncryptedAccessKey, encrypted_data::PASSWORD_LENGTH, AccessKey, +}; +use moka::future::Cache; +use std::sync::Arc; + +pub struct AppState { + pub config: Config, + pub cache: Cache, AccessKey>, + pub finalizer: Arc, +} + +impl AppState { + pub fn new(config: Config, finalizer: Arc) -> Self { + let cache = Cache::new(config.cache_size); + Self { + config, + cache, + finalizer, + } + } + + pub fn get_secret_key(&self) -> [u8; PASSWORD_LENGTH] { + // We validate that the config must have 32 byte secret key in main.rs + // So this is safe to unwrap + self.config.secret_key.as_bytes().try_into().unwrap() + } +} diff --git a/gateway/src/util.rs b/gateway/src/util.rs new file mode 100644 index 00000000..b3c84c8f --- /dev/null +++ b/gateway/src/util.rs @@ -0,0 +1,150 @@ +use anyhow::{anyhow, Result}; +use http::HeaderMap; +use serde_json::json; +use std::collections::HashMap; + +pub fn get_value_from_path( + path: &mut String, + headers: &HeaderMap, + body: &[u8], + query: &Option>, +) -> Result { + if path.len() < 2 || &path[0..2] != "_." { + return Ok(path.to_owned()); + } + + let body = serde_json::from_slice::(body)?; + let headers = http_serde_ext::header_map::serialize(headers, serde_json::value::Serializer)?; + let mut obj = json!({ + "headers": headers, + "body": body, + "query": query, + }); + + for key in path.split('.').skip(1) { + let temp = match obj.get(key) { + Some(t) => t, + None => { + return Err(anyhow!("No value found for path: {}", path)); + } + }; + obj = temp.clone(); + } + + Ok(obj + .as_str() + .map(|s| s.to_owned()) + .unwrap_or_else(|| obj.to_string())) +} + +#[cfg(test)] +mod tests { + use std::collections::HashMap; + + use http::HeaderMap; + + use super::*; + + #[test] + fn test_stripe_signature() { + let mut path = r#"_.headers.stripe-signature"#.to_owned(); + let headers = r#"{"content-type":"application/json; charset=utf-8","cache-control":"no-cache","user-agent":"Stripe/1.0 (+https://stripe.com/docs/webhooks)","accept":"*/*; q=0.5, application/xml","stripe-signature":"t=1689703968,v1=035b09d5fd7ddad1ba0a05798e7fa914ad704e50e39845eb2d03c2234d1fbb2a,v0=a78258146fc18af95b4bca66051fe7dae809a398ba524d10c0a972b26106d33e","host":"development-stream.event.dev","content-length":"1117","x-cloud-trace-context":"283401a42e9257773bfe4320acce8e17/319072271516621528","via":"1.1 google","x-forwarded-for":"35.154.171.200, 34.117.226.41","x-forwarded-proto":"https","connection":"Keep-Alive"}"#; + let headers = http_serde_ext::header_map::deserialize( + &mut serde_json::Deserializer::from_str(headers), + ) + .unwrap(); + let body = r#"{ + "id": "evt_1NVIOBSGVSOWoR3QvDwZ4VjP", + "object": "event", + "api_version": "2020-08-27", + "created": 1689703967, + "data": { + "object": { + "id": "cus_OHs8z1ZNSlvJ3r", + "object": "customer", + "address": null, + "balance": 0, + "created": 1689703967, + "currency": null, + "default_currency": null, + "default_source": null, + "delinquent": false, + "description": null, + "discount": null, + "email": null, + "invoice_prefix": "957E59A7", + "invoice_settings": { + "custom_fields": null, + "default_payment_method": null, + "footer": null, + "rendering_options": null + }, + "livemode": false, + "metadata": { + }, + "name": "Demo", + "next_invoice_sequence": 1, + "phone": null, + "preferred_locales": [ + + ], + "shipping": null, + "tax_exempt": "none", + "test_clock": null + } + }, + "livemode": false, + "pending_webhooks": 15, + "request": { + "id": "req_4JjE4wAaOiFbkq", + "idempotency_key": "ef53b6c6-2bf1-45b6-9e7c-0c5c65c9d579" + }, + "type": "customer.created" + }"#; + + let res = get_value_from_path(&mut path, &headers, body.to_owned().as_bytes(), &None); + assert_eq!(res.unwrap(), "t=1689703968,v1=035b09d5fd7ddad1ba0a05798e7fa914ad704e50e39845eb2d03c2234d1fbb2a,v0=a78258146fc18af95b4bca66051fe7dae809a398ba524d10c0a972b26106d33e"); + } + + #[test] + fn test_get_value_from_path() { + let mut path = "foo".to_owned(); + let mut headers = HeaderMap::new(); + headers.insert("quux", "quuz".parse().unwrap()); + let body = b"{}"; + let query = None; + let name = get_value_from_path(&mut path, &headers, body, &query).unwrap(); + assert_eq!(name, "foo"); + + let mut path = "_.foo".to_owned(); + assert!(get_value_from_path(&mut path, &headers, body, &query).is_err()); + let mut path = "_...".to_owned(); + assert!(get_value_from_path(&mut path, &headers, body, &query).is_err()); + + let body = b"{\"foo\": \"bar\"}"; + + let mut path = "_.body.foo".to_owned(); + let name = get_value_from_path(&mut path, &headers, body, &query).unwrap(); + assert_eq!(name, "bar"); + + let mut path = "_.body.bar".to_owned(); + assert!(get_value_from_path(&mut path, &headers, body, &query).is_err()); + + let mut query = HashMap::new(); + query.insert("baz".to_owned(), "qux".to_owned()); + let query = Some(query); + let mut path = "_.query.baz".to_owned(); + let name = get_value_from_path(&mut path, &headers, body, &query).unwrap(); + assert_eq!(name, "qux"); + + let mut path = "_.query.foo".to_owned(); + assert!(get_value_from_path(&mut path, &headers, body, &query).is_err()); + + let mut path = "_.headers.quux".to_owned(); + let name = get_value_from_path(&mut path, &headers, body, &query).unwrap(); + assert_eq!(name, "quuz"); + + let mut path = "_.headers.foo".to_owned(); + assert!(get_value_from_path(&mut path, &headers, body, &query).is_err()); + } +} diff --git a/google-token-fetcher/Cargo.toml b/google-token-fetcher/Cargo.toml new file mode 100644 index 00000000..3c3f8af1 --- /dev/null +++ b/google-token-fetcher/Cargo.toml @@ -0,0 +1,11 @@ +[package] +name = "google-token-fetcher" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +anyhow.workspace = true +reqwest.workspace = true +serde.workspace = true diff --git a/google-token-fetcher/src/lib.rs b/google-token-fetcher/src/lib.rs new file mode 100644 index 00000000..57d01960 --- /dev/null +++ b/google-token-fetcher/src/lib.rs @@ -0,0 +1,39 @@ +use anyhow::Result; +use reqwest::Client; +use serde::Deserialize; + +const URL: &str = + "http://metadata/computeMetadata/v1/instance/service-accounts/default/identity?audience="; +const HEADER_KEY: &str = "Metadata-Flavor"; +const HEADER_VALUE: &str = "Google"; + +#[derive(Debug, Clone, Deserialize)] +pub struct GoogleTokenResponse { + data: String, +} + +#[derive(Debug, Clone, Default)] +pub struct GoogleTokenFetcher { + client: Client, +} + +impl GoogleTokenFetcher { + pub fn new() -> Self { + Self { + client: Client::new(), + } + } + + pub async fn get_token(&self, url: &str) -> Result { + let res = self + .client + .get(format!("{URL}{url}")) + .header(HEADER_KEY, HEADER_VALUE) + .send() + .await? + .json::() + .await?; + + Ok(format!("Bearer {}", res.data)) + } +} diff --git a/redis-retry/Cargo.toml b/redis-retry/Cargo.toml new file mode 100644 index 00000000..bfead0fc --- /dev/null +++ b/redis-retry/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "redis-retry" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +anyhow.workspace = true +envconfig.workspace = true +futures-util.workspace = true +redis = { version = "0.23.3", features = ["connection-manager", "tokio-comp"] } +tracing.workspace = true diff --git a/redis-retry/src/config.rs b/redis-retry/src/config.rs new file mode 100644 index 00000000..53a43ce9 --- /dev/null +++ b/redis-retry/src/config.rs @@ -0,0 +1,38 @@ +use envconfig::Envconfig; +use std::fmt::{Display, Formatter}; + +#[derive(Envconfig, Debug, Clone)] +pub struct Config { + #[envconfig(from = "REDIS_URL", default = "redis://localhost:6379")] + pub url: String, + #[envconfig(from = "REDIS_QUEUE_NAME", default = "events")] + pub queue_name: String, + #[envconfig(from = "REDIS_EVENT_THROUGHPUT_KEY", default = "event_throughput")] + pub event_throughput_key: String, + #[envconfig(from = "REDIS_API_THROUGHPUT_KEY", default = "api_throughput")] + pub api_throughput_key: String, +} + +impl Default for Config { + fn default() -> Self { + Self { + url: "redis://localhost:6379".to_owned(), + queue_name: "events".to_owned(), + event_throughput_key: "event_throughput".to_owned(), + api_throughput_key: "api_throughput".to_owned(), + } + } +} + +impl Display for Config { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + writeln!(f, "REDIS_URL: {}", self.url)?; + writeln!(f, "REDIS_QUEUE_NAME: {}", self.queue_name)?; + writeln!( + f, + "REDIS_EVENT_THROUGHPUT_KEY: {}", + self.event_throughput_key + )?; + writeln!(f, "REDIS_API_THROUGHPUT_KEY: {}", self.api_throughput_key) + } +} diff --git a/redis-retry/src/lib.rs b/redis-retry/src/lib.rs new file mode 100644 index 00000000..f1c8bdf1 --- /dev/null +++ b/redis-retry/src/lib.rs @@ -0,0 +1,81 @@ +pub mod config; + +use anyhow::{Context, Result}; +use futures_util::FutureExt; +use redis::{ + aio::{ConnectionLike, ConnectionManager}, + Client, Pipeline, RedisFuture, Value, +}; +use tracing::warn; + +pub use crate::config::Config; +pub use redis::{AsyncCommands, LposOptions, RedisResult}; + +#[derive(Clone)] +pub struct Redis { + client: Client, + conn: ConnectionManager, + retry_count: u64, +} + +impl Redis { + pub async fn new(config: &Config) -> Result { + Self::new_with_retry_count(config, std::u64::MAX).await + } + + pub async fn new_with_retry_count(config: &Config, retry_count: u64) -> Result { + let client = + Client::open(config.url.clone()).with_context(|| "Could not parse redis url")?; + let conn = client + .get_tokio_connection_manager() + .await + .with_context(|| "Could not connect to redis")?; + + Ok(Self { + client, + conn, + retry_count, + }) + } +} + +impl ConnectionLike for Redis { + fn req_packed_command<'a>(&'a mut self, cmd: &'a redis::Cmd) -> RedisFuture<'a, Value> { + (async move { + let mut retry_count = 0u64; + loop { + let res = self.conn.req_packed_command(cmd).await; + if res.is_ok() || retry_count >= self.retry_count { + return res; + } + warn!("Redis failed command, retrying..."); + retry_count += 1; + } + }) + .boxed() + } + + fn req_packed_commands<'a>( + &'a mut self, + cmd: &'a Pipeline, + offset: usize, + count: usize, + ) -> RedisFuture<'a, Vec> { + (async move { + let mut retry_count = 0u64; + loop { + let res = self.conn.req_packed_commands(cmd, offset, count).await; + if res.is_ok() || retry_count >= self.retry_count { + return res; + } + warn!("Redis failed command, retrying..."); + retry_count += 1; + } + }) + .boxed() + } + + fn get_db(&self) -> i64 { + self.client.get_connection_info().redis.db + } +} diff --git a/watchdog/Cargo.toml b/watchdog/Cargo.toml new file mode 100644 index 00000000..34a6fd0f --- /dev/null +++ b/watchdog/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "watchdog" +version = "0.1.0" +edition = "2021" + +[dependencies] +redis-retry = { path = "../redis-retry" } +chrono.workspace = true +mongodb.workspace = true +envconfig.workspace = true +dotenvy.workspace = true +tracing-subscriber.workspace = true +tokio.workspace = true +tracing.workspace = true +integrationos-domain.workspace = true +anyhow.workspace = true +futures.workspace = true +serde_json.workspace = true diff --git a/watchdog/src/config.rs b/watchdog/src/config.rs new file mode 100644 index 00000000..b6a42ff2 --- /dev/null +++ b/watchdog/src/config.rs @@ -0,0 +1,25 @@ +use envconfig::Envconfig; +use integrationos_domain::common::database::DatabaseConfig; +use redis_retry::Config as RedisConfig; +use std::fmt::{Display, Formatter}; + +#[derive(Envconfig, Clone)] // Intentionally no Debug so secret is not printed +pub struct Config { + #[envconfig(from = "EVENT_TIMEOUT", default = "300")] // 300 seconds/ 5 minutes + pub event_timeout: u64, + #[envconfig(from = "POLL_DURATION", default = "10")] // 10 seconds + pub poll_duration: u64, + #[envconfig(nested = true)] + pub redis: RedisConfig, + #[envconfig(nested = true)] + pub db: DatabaseConfig, +} + +impl Display for Config { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + writeln!(f, "POLL_DURATION: {}", self.poll_duration)?; + writeln!(f, "EVENT_TIMEOUT: {}", self.event_timeout)?; + writeln!(f, "{}", self.redis)?; + writeln!(f, "{}", self.db) + } +} diff --git a/watchdog/src/lib.rs b/watchdog/src/lib.rs new file mode 100644 index 00000000..ef68c369 --- /dev/null +++ b/watchdog/src/lib.rs @@ -0,0 +1 @@ +pub mod config; diff --git a/watchdog/src/main.rs b/watchdog/src/main.rs new file mode 100644 index 00000000..8058487a --- /dev/null +++ b/watchdog/src/main.rs @@ -0,0 +1,274 @@ +use anyhow::{Context, Result}; +use chrono::Utc; +use dotenvy::dotenv; +use envconfig::Envconfig; +use futures::{future::join_all, TryStreamExt}; +use integrationos_domain::{ + algebra::adapter::StoreAdapter, + common::{ + event_with_context::EventWithContext, + mongo::{MongoDbStore, MongoDbStoreConfig}, + pipeline_context::Stage as PipelineStage, + root_context::Stage, + Event, ExtractorContext, PipelineContext, RootContext, Store, + }, +}; +use mongodb::{ + bson::{doc, Bson, Document}, + options::FindOneOptions, +}; +use redis_retry::{AsyncCommands, LposOptions, Redis, RedisResult}; +use std::time::Duration; +use tracing::{debug, error, info, metadata::LevelFilter, warn}; +use tracing_subscriber::EnvFilter; +use watchdog::config::Config; + +#[tokio::main(flavor = "multi_thread")] +async fn main() -> Result<()> { + dotenv().ok(); + + let filter = EnvFilter::builder() + .with_default_directive(LevelFilter::DEBUG.into()) + .from_env_lossy(); + tracing_subscriber::fmt().with_env_filter(filter).init(); + + let config = Config::init_from_env()?; + + info!("Starting watchdog with config: {config}"); + + let mut redis = Redis::new(&config.redis).await?; + + let key = config.redis.event_throughput_key.clone(); + let mut redis_clone = redis.clone(); + tokio::spawn(async move { + loop { + let _: RedisResult = async { redis_clone.del(key.clone()).await }.await; + tokio::time::sleep(Duration::from_secs(1)).await; + } + }); + + let key = config.redis.api_throughput_key.clone(); + let mut redis_clone = redis.clone(); + tokio::spawn(async move { + loop { + let _: RedisResult = async { redis_clone.del(key.clone()).await }.await; + tokio::time::sleep(Duration::from_secs(60)).await; + } + }); + + let mongo = mongodb::Client::with_uri_str(config.db.context_db_url) + .await + .with_context(|| "Could not connect to mongodb")?; + let db = mongo.database(&config.db.context_db_name); + let coll = db.collection::(&config.db.context_collection_name); + let root_coll = db.collection::(&config.db.context_collection_name); + let pipeline_coll = db.collection::(&config.db.context_collection_name); + let extractor_coll = db.collection::(&config.db.context_collection_name); + + let event_client = mongodb::Client::with_uri_str(config.db.event_db_url) + .await + .with_context(|| "Could not connect to events db")?; + + let event_db = event_client.database(&config.db.event_db_name); + let event_store = MongoDbStore::new(MongoDbStoreConfig::::new(event_db, Store::Events)) + .await + .with_context(|| { + format!( + "Could not connect to event db at {}", + config.db.event_db_name + ) + })?; + + loop { + let mut count = 0; + let timestamp = Utc::now().timestamp_millis() - (config.event_timeout * 1_000) as i64; + + let pipeline = vec![ + // Sort by timestamp to get latest contexts first + doc! { + "$sort": { + "timestamp": -1 + }, + }, + // Group by event_key + // Get the first (latest) context's stage and status + // Count any contexts that are later than the poll duration cutoff + // If there are any that are later then this context is still not dead + doc! { + "$group": { + "_id": "$eventKey", + "stage": { + "$first": "$stage" + }, + "status": { + "$first": "$status" + }, + "count": { + "$sum": { + "$cond": [{ + "$gt": [ + "$timestamp", timestamp + ] + }, 1, 0] + }, + }, + }, + }, + // Match any contexts that have no contexts after our cutoff date, so presumed dead + // And also not finished and status is succeeded (not dropped) + // These contexts are unfinished and dead, so need to be republished to redis + doc! { + "$match": { + "count": { "$eq": 0 }, + "stage": { "$ne": "Finished" }, + "status": { "$eq": "Succeeded" } + } + }, + ]; + + let mut event_keys = match coll.clone().aggregate(pipeline, None).await { + Ok(e) => e, + Err(e) => { + error!("Failed to fetch event keys: {e}"); + continue; + } + }; + + 'outer: while let Some(event_key) = event_keys.try_next().await? { + let Some(Bson::String(event_key)) = event_key.get("_id") else { + error!("Could not get _id out of event keys response"); + continue; + }; + // Sort by earliest timestamp to get latest context + let options = FindOneOptions::builder() + .sort(doc! { "timestamp": -1 }) + .build(); + + // Get the latest root context, then also get all latest pipeline contexts and extractor contexts if applicable + let root_context = match root_coll + .clone() + .find_one( + doc! { + "eventKey": event_key, + "type": "root" + }, + options.clone(), + ) + .await + { + Ok(c) => c, + Err(e) => { + error!("Failed to fetch root context: {e}"); + continue; + } + }; + let Some(mut root_context) = root_context else { + error!("Did not find root context for {event_key}"); + continue; + }; + + if let Stage::ProcessingPipelines(ref mut pipelines) = root_context.stage { + let futs = pipelines.values().map(|p| { + pipeline_coll.find_one( + doc! { + "eventKey": p.event_key.to_string(), + "pipelineKey": p.pipeline_key.clone(), + "type": "pipeline" + }, + options.clone(), + ) + }); + + let results = join_all(futs).await; + for result in results { + match result { + Ok(context) => { + let Some(mut context) = context else { + error!("Did not find pipeline context for {event_key}"); + continue 'outer; + }; + if let PipelineStage::ExecutingExtractors(ref mut extractors) = + context.stage + { + let futs = extractors.values().map(|e| { + let filter = doc! { + "eventKey": e.event_key.to_string(), + "pipelineKey": e.pipeline_key.clone(), + "extractorKey": e.extractor_key.to_string(), + "type": "extractor" + }; + extractor_coll.find_one(filter, options.clone()) + }); + let results = join_all(futs).await; + for result in results { + match result { + Ok(context) => { + let Some(context) = context else { + error!("Did not find extractor context for {event_key}"); + continue 'outer; + }; + extractors + .insert(context.extractor_key.clone(), context); + } + Err(e) => { + error!("Did not find extractor context for {event_key}: {e}"); + continue 'outer; + } + } + } + } + pipelines.insert(context.pipeline_key.clone(), context); + } + Err(e) => { + error!("Could not fetch pipeline context for {event_key}: {e}"); + continue 'outer; + } + } + } + } + + debug!("Republishing unresponsive context {event_key}"); + + let Some(event) = event_store + .get_one_by_id(event_key) + .await + .with_context(|| "could not fetch event for context {event_key}")? + else { + error!("Event does not exist {event_key}"); + continue; + }; + + let event_with_context = EventWithContext::new(event, root_context); + + let payload = match serde_json::to_vec(&event_with_context) { + Ok(c) => c, + Err(e) => { + error!("Could not serialize payload {event_with_context:?}: {e}"); + continue; + } + }; + if redis + .lpos::<&str, &[u8], Option>( + &config.redis.queue_name, + &payload, + LposOptions::default(), + ) + .await? + .is_some() + { + warn!("Unresponsive context is already in redis {event_key}"); + continue; + } + match redis.lpush(&config.redis.queue_name, payload).await { + Ok(()) => count += 1, + Err(e) => error!("Could not publish event to redis: {e}"), + } + } + + if count > 0 { + info!("Republished {count} new events"); + } + + tokio::time::sleep(Duration::from_secs(config.poll_duration)).await; + } +}