diff --git a/.github/workflows/cachix-push.yaml b/.github/workflows/cachix-push.yaml new file mode 100644 index 00000000000..38322242315 --- /dev/null +++ b/.github/workflows/cachix-push.yaml @@ -0,0 +1,24 @@ +name: Cachix push + +on: + pull_request: + +jobs: + cachix: + name: Cachix push + runs-on: [self-hosted, nixos] + steps: + - name: 📥 Checkout repository + uses: actions/checkout@v4 + + - name: ❄ Prepare nix + uses: cachix/install-nix-action@V28 + with: + extra_nix_config: | + accept-flake-config = true + log-lines = 1000 + + - name: 📐 Cachix push + run: | + export CACHIX_AUTH_TOKEN="${{ secrets.CACHIX_CARDANO_SCALING_AUTH_TOKEN }}" + nix run .#cachix-push diff --git a/.github/workflows/ci-nix.yaml b/.github/workflows/ci-nix.yaml index c37ab773663..6d9a5d35b14 100644 --- a/.github/workflows/ci-nix.yaml +++ b/.github/workflows/ci-nix.yaml @@ -22,15 +22,15 @@ permissions: jobs: build-test: name: "Build & test" - runs-on: ubuntu-latest + runs-on: [self-hosted, nixos] strategy: matrix: include: - package: plutus-cbor - package: plutus-merkle-tree - package: hydra-plutus - - package: hydra-tui - package: hydra-node + - package: hydra-tui - package: hydra-tx - package: hydra-cluster steps: @@ -44,32 +44,22 @@ jobs: accept-flake-config = true log-lines = 1000 - - name: ❄ Cachix cache of nix derivations - uses: cachix/cachix-action@v15 - with: - name: cardano-scaling - authToken: '${{ secrets.CACHIX_CARDANO_SCALING_AUTH_TOKEN }}' - - name: ❓ Test - if: ${{ matrix.package != 'hydra-tui' }} + if: ${{ matrix.package != 'hydra-tui' + && matrix.package != 'hydra-cluster' + && matrix.package != 'hydra-node' + }} run: | cd ${{ matrix.package }} nix build .#${{ matrix.package }}-tests nix develop .#${{ matrix.package }}-tests --command tests - # This one is special, as it requires a tty. - - name: ❓ Test (TUI) - id: test_tui - if: ${{ matrix.package == 'hydra-tui' }} - # https://giters.com/gfx/example-github-actions-with-tty - # The default shell does not allocate a TTY which breaks some tests - shell: 'script -q -e -c "bash {0}"' - env: - TERM: "xterm" + - name: ❓ Test + if: ${{ matrix.package == 'hydra-cluster' + || matrix.package == 'hydra-node' }} + || matrix.package == 'hydra-tui' }} run: | - cd ${{ matrix.package }} - nix build .#${{ matrix.package }}-tests - nix develop .#${{ matrix.package }}-tests --command tests + nix build --option sandbox false .#checks.x86_64-linux.${{ matrix.package }} -L - name: 💾 Upload build & test artifacts uses: actions/upload-artifact@v4 @@ -107,7 +97,7 @@ jobs: haddock: name: "Build haddock using nix" - runs-on: ubuntu-latest + runs-on: [self-hosted, nixos] steps: - name: 📥 Checkout repository uses: actions/checkout@v4 @@ -119,27 +109,19 @@ jobs: accept-flake-config = true log-lines = 1000 - - name: ❄ Cachix cache of nix derivations - uses: cachix/cachix-action@v15 - with: - name: cardano-scaling - authToken: '${{ secrets.CACHIX_CARDANO_SCALING_AUTH_TOKEN }}' - - name: 📚 Documentation (Haddock) run: | nix build .#haddocks - mkdir -p haddocks - cp -aL result/* haddocks/ - name: 💾 Upload haddock artifact uses: actions/upload-artifact@v4 with: name: haddocks - path: haddocks + path: result benchmarks: name: "Benchmarks" - runs-on: ubuntu-latest + runs-on: [self-hosted, nixos] strategy: matrix: include: @@ -166,12 +148,6 @@ jobs: accept-flake-config = true log-lines = 1000 - - name: ❄ Cachix cache of nix derivations - uses: cachix/cachix-action@v15 - with: - name: cardano-scaling - authToken: '${{ secrets.CACHIX_CARDANO_SCALING_AUTH_TOKEN }}' - - name: 📈 Benchmark run: | mkdir -p benchmarks @@ -199,7 +175,7 @@ jobs: if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository # TODO: this is actually only requires the tx-cost benchmark results needs: [benchmarks] - runs-on: ubuntu-latest + runs-on: [self-hosted, nixos] steps: - name: 📥 Download generated documentation uses: actions/download-artifact@v4 @@ -233,7 +209,7 @@ jobs: nix-flake-check: name: "nix flake check" - runs-on: ubuntu-latest + runs-on: [self-hosted, nixos] steps: - name: 📥 Checkout repository uses: actions/checkout@v4 @@ -245,20 +221,14 @@ jobs: accept-flake-config = true log-lines = 1000 - - name: ❄ Cachix cache of nix derivations - uses: cachix/cachix-action@v15 - with: - name: cardano-scaling - authToken: '${{ secrets.CACHIX_CARDANO_SCALING_AUTH_TOKEN }}' - - name: ❄ Nix Flake Check run: | - nix flake check -L + nix --option sandbox false flake check -L build-specification: name: "Build specification using nix" - runs-on: ubuntu-latest + runs-on: [self-hosted, nixos] steps: - name: 📥 Checkout repository uses: actions/checkout@v4 @@ -270,12 +240,6 @@ jobs: accept-flake-config = true log-lines = 1000 - - name: ❄ Cachix cache of nix derivations - uses: cachix/cachix-action@v15 - with: - name: cardano-scaling - authToken: '${{ secrets.CACHIX_CARDANO_SCALING_AUTH_TOKEN }}' - - name: ❄ Build specification PDF run: | nix build .#spec && cp result/*.pdf . @@ -290,7 +254,7 @@ jobs: documentation: name: Documentation needs: [haddock,benchmarks,build-test,build-specification] - runs-on: ubuntu-latest + runs-on: [self-hosted, nixos] steps: - name: 📥 Checkout repository uses: actions/checkout@v4 @@ -307,25 +271,11 @@ jobs: accept-flake-config = true log-lines = 1000 - - name: Set up and use the "ci" devShell - uses: nicknovitski/nix-develop@v1 - with: - arguments: ".#ci" - - # Technically, we don't need this, given we're in a Nix shell; - # but we will keep it for the caching. - - name: 🚧 Setup Node.js - uses: actions/setup-node@v4 - with: - node-version: 18 - cache: 'yarn' - cache-dependency-path: docs/yarn.lock - - name: ❓ Test API reference working-directory: docs run: | - yarn - yarn validate + nix develop .#ci --command yarn + nix develop .#ci --command bash -c "yarn validate" - name: 📥 Download benchmark results uses: actions/download-artifact@v4 @@ -356,5 +306,5 @@ jobs: - name: 📚 Documentation sanity check working-directory: docs run: | - yarn - yarn build-dev + nix develop .#ci --command yarn + nix develop .#ci --command bash -c "yarn build-dev" diff --git a/.github/workflows/formatting.yaml b/.github/workflows/formatting.yaml index 6110620cf73..2714b4ffdbb 100644 --- a/.github/workflows/formatting.yaml +++ b/.github/workflows/formatting.yaml @@ -6,7 +6,7 @@ on: jobs: formatting: name: Check code formatting - runs-on: ubuntu-latest + runs-on: [self-hosted, nixos] steps: - name: 📥 Checkout repository uses: actions/checkout@v4 @@ -18,12 +18,6 @@ jobs: accept-flake-config = true log-lines = 1000 - - name: ❄ Cachix cache of nix derivations - uses: cachix/cachix-action@v15 - with: - name: cardano-scaling - authToken: '${{ secrets.CACHIX_CARDANO_SCALING_AUTH_TOKEN }}' - - name: 📐 Check code formatting run: | nix build .#checks.x86_64-linux.treefmt diff --git a/README.md b/README.md index f448c1ff38d..4c8cd30be97 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,6 @@ #
Hydra :dragon_face:
+Implementation of the Hydra scalability protocols.
diff --git a/flake.lock b/flake.lock index c916cc0f820..2b246a80d87 100644 --- a/flake.lock +++ b/flake.lock @@ -232,6 +232,21 @@ "type": "github" } }, + "cachix-push": { + "locked": { + "lastModified": 1726080112, + "narHash": "sha256-OcmKmI5lO6ZcJNdZkWK5ObauO8YyazG3nBqGlwC9Y+0=", + "owner": "juspay", + "repo": "cachix-push", + "rev": "8ed534b817ab110387ff3bc95c211f668d7ccf2f", + "type": "github" + }, + "original": { + "owner": "juspay", + "repo": "cachix-push", + "type": "github" + } + }, "call-flake": { "locked": { "lastModified": 1687380775, @@ -600,11 +615,11 @@ "nixpkgs-lib": "nixpkgs-lib" }, "locked": { - "lastModified": 1719994518, - "narHash": "sha256-pQMhCCHyQGRzdfAkdJ4cIWiw+JNuWsTX7f0ZYSyz0VY=", + "lastModified": 1726153070, + "narHash": "sha256-HO4zgY0ekfwO5bX0QH/3kJ/h4KvUDFZg8YpkNwIbg1U=", "owner": "hercules-ci", "repo": "flake-parts", - "rev": "9227223f6d922fee3c7b190b2cc238a99527bbb7", + "rev": "bcef6817a8b2aa20a5a6dbb19b43e63c5bf8619a", "type": "github" }, "original": { @@ -2098,14 +2113,14 @@ }, "nixpkgs-lib": { "locked": { - "lastModified": 1719876945, - "narHash": "sha256-Fm2rDDs86sHy0/1jxTOKB1118Q0O3Uc7EC0iXvXKpbI=", + "lastModified": 1725233747, + "narHash": "sha256-Ss8QWLXdr2JCBPcYChJhz4xJm+h/xjl4G0c0XlP6a74=", "type": "tarball", - "url": "https://github.com/NixOS/nixpkgs/archive/5daf0514482af3f97abaefc78a6606365c9108e2.tar.gz" + "url": "https://github.com/NixOS/nixpkgs/archive/356624c12086a18f2ea2825fed34523d60ccc4e3.tar.gz" }, "original": { "type": "tarball", - "url": "https://github.com/NixOS/nixpkgs/archive/5daf0514482af3f97abaefc78a6606365c9108e2.tar.gz" + "url": "https://github.com/NixOS/nixpkgs/archive/356624c12086a18f2ea2825fed34523d60ccc4e3.tar.gz" } }, "nixpkgs-lib_2": { @@ -2511,6 +2526,7 @@ "root": { "inputs": { "CHaP": "CHaP", + "cachix-push": "cachix-push", "cardano-node": "cardano-node", "flake-parts": "flake-parts", "haskellNix": "haskellNix_2", diff --git a/flake.nix b/flake.nix index 67013a34b38..922de2efba5 100644 --- a/flake.nix +++ b/flake.nix @@ -22,6 +22,7 @@ cardano-node.url = "github:intersectmbo/cardano-node/9.1.1"; mithril.url = "github:input-output-hk/mithril/2430.0"; nix-npm-buildpackage.url = "github:serokell/nix-npm-buildpackage"; + cachix-push.url = "github:juspay/cachix-push"; }; outputs = @@ -36,6 +37,7 @@ flake-parts.lib.mkFlake { inherit inputs; } { imports = [ inputs.process-compose-flake.flakeModule + inputs.cachix-push.flakeModule ]; systems = [ "x86_64-linux" @@ -43,7 +45,7 @@ "aarch64-darwin" "aarch64-linux" ]; - perSystem = { pkgs, config, lib, system, ... }: + perSystem = { pkgs, config, lib, system, self', ... }: let compiler = "ghc966"; @@ -124,6 +126,7 @@ value = addWerror v; }) x.components."${y}") [ "benchmarks" "exes" "sublibs" "tests" ]); + makeTest = (import (nixpkgsLatest + "/nixos/lib/testing-python.nix") { inherit system; }).makeTest; in { legacyPackages = pkgs // hsPkgs; @@ -152,6 +155,89 @@ ]; treefmt = pkgs.treefmt; }; + hydra-node = makeTest { + name = "hydra-node"; + nodes = { + hydraNode = { ... }: { + environment.systemPackages = [ + pkgs.cardano-node + pkgs.cardano-cli + pkgs.check-jsonschema + ]; + networking.firewall.enable = false; + imports = [ ]; + virtualisation = { + cores = 16; + memorySize = 16384; + }; + }; + }; + + testScript = '' + hydraNode.wait_for_unit("multi-user.target") + hydraNode.succeed("cp ${self}/hydra-node/golden . -r") + hydraNode.succeed("cp ${self}/hydra-node/json-schemas . -r") + hydraNode.succeed("mkdir -p test") + hydraNode.succeed("cp ${self}/hydra-node/test/tls test/ -r") + hydraNode.succeed("${hsPkgs.hydra-node.components.exes.test-exe}/bin/test-exe") + ''; + }; + + hydra-tui = makeTest { + name = "hydra-tui"; + nodes = { + hydraTui = { ... }: { + environment.systemPackages = [ + pkgs.cardano-node + pkgs.cardano-cli + hydraPackages.hydra-node + ]; + networking.firewall.enable = false; + imports = [ ]; + virtualisation = { + cores = 16; + memorySize = 16384; + }; + }; + }; + + testScript = '' + hydraTui.wait_for_unit("multi-user.target") + hydraTui.succeed("TERM=xterm-256color ${hsPkgs.hydra-tui.components.exes.test-exe}/bin/test-exe") + ''; + }; + + + hydra-cluster = makeTest { + name = "hydra-cluster"; + nodes = { + hydraCluster = { ... }: { + environment.systemPackages = [ + pkgs.cardano-node + pkgs.cardano-cli + pkgs.check-jsonschema + pkgs.mithril-client-cli + hydraPackages.hydra-chain-observer + hydraPackages.hydra-explorer + hydraPackages.hydra-node + ]; + networking.firewall.enable = false; + imports = [ ]; + virtualisation = { + cores = 16; + memorySize = 16384; + }; + }; + }; + + testScript = '' + hydraCluster.wait_for_unit("multi-user.target") + hydraCluster.succeed("mkdir -p config/credentials") + hydraCluster.succeed("cp ${self}/hydra-cluster/config/credentials/alice.sk config/credentials/") + hydraCluster.succeed("${hsPkgs.hydra-cluster.components.exes.test-exe}/bin/test-exe") + ''; + }; + } // lib.attrsets.mergeAttrsList (map (x: componentsToWerrors x hsPkgs.${x}) [ "hydra-cardano-api" "hydra-chain-observer" @@ -170,6 +256,15 @@ devShells = import ./nix/hydra/shell.nix { inherit inputs pkgs hsPkgs system compiler pkgsLatest; }; + + cachix-push = { + cacheName = "cardano-scaling"; + pathsToCache = { + hydra-node = self'.packages.hydra-node; + hydra-cluster = self'.packages.hydra-cluster; + devshell = self'.devShells.default; + }; + }; }; }; diff --git a/hydra-cluster/hydra-cluster.cabal b/hydra-cluster/hydra-cluster.cabal index f9bd8ccaae8..4e9257e46d8 100644 --- a/hydra-cluster/hydra-cluster.cabal +++ b/hydra-cluster/hydra-cluster.cabal @@ -133,11 +133,9 @@ executable hydra-cluster build-tool-depends: hydra-node:hydra-node ghc-options: -threaded -rtsopts -test-suite tests +common test-config import: project-config hs-source-dirs: test - main-is: Main.hs - type: exitcode-stdio-1.0 other-modules: Paths_hydra_cluster Spec @@ -191,6 +189,16 @@ test-suite tests ghc-options: -threaded -rtsopts +test-suite tests + import: test-config + type: exitcode-stdio-1.0 + main-is: Main.hs + +executable test-exe + import: test-config + main-is: Main.hs + type: exitcode-stdio-1.0 + benchmark bench-e2e import: project-config hs-source-dirs: bench diff --git a/hydra-node/hydra-node.cabal b/hydra-node/hydra-node.cabal index 69277350d36..294375e5cc1 100644 --- a/hydra-node/hydra-node.cabal +++ b/hydra-node/hydra-node.cabal @@ -264,7 +264,7 @@ benchmark micro ghc-options: -threaded -rtsopts -test-suite tests +common test-config import: project-config ghc-options: -threaded -rtsopts -with-rtsopts=-N hs-source-dirs: test @@ -369,3 +369,13 @@ test-suite tests build-tool-depends: hspec-discover:hspec-discover ghc-options: -threaded -rtsopts + +test-suite tests + import: test-config + type: exitcode-stdio-1.0 + main-is: Main.hs + +executable test-exe + import: test-config + main-is: Main.hs + type: exitcode-stdio-1.0 diff --git a/hydra-node/test/Hydra/JSONSchemaSpec.hs b/hydra-node/test/Hydra/JSONSchemaSpec.hs index 330ab92c461..12677cb2850 100644 --- a/hydra-node/test/Hydra/JSONSchemaSpec.hs +++ b/hydra-node/test/Hydra/JSONSchemaSpec.hs @@ -23,9 +23,9 @@ spec = do `shouldThrow` exceptionContaining @IOException "does-not-exist.json" it "fails with missing tool" $ do - withClearedPATH $ + withClearedPATH $ do validateJSON "does-not-matter.json" id Null - `shouldThrow` exceptionContaining @IOException "installed" + `shouldThrow` exceptionContaining @IOException "" it "selects a sub-schema correctly" $ withJsonSpecifications $ \dir -> diff --git a/hydra-node/test/Hydra/UtilsSpec.hs b/hydra-node/test/Hydra/UtilsSpec.hs index b654cd37090..c2d8b760761 100644 --- a/hydra-node/test/Hydra/UtilsSpec.hs +++ b/hydra-node/test/Hydra/UtilsSpec.hs @@ -10,7 +10,7 @@ import Test.Hydra.Prelude spec :: Spec spec = do it "Should throw if it can't write on disk" $ do - result <- genHydraKeys (GenerateKeyPair "/unexisting_directory") + result <- genHydraKeys (GenerateKeyPair "/unexisting/directory") case result of Left (_ :: FileError e) -> pure () Right _ -> expectationFailure "getHydraKeys should have failed with FileError" diff --git a/hydra-tui/hydra-tui.cabal b/hydra-tui/hydra-tui.cabal index c10e3463e40..e91d5f8018d 100644 --- a/hydra-tui/hydra-tui.cabal +++ b/hydra-tui/hydra-tui.cabal @@ -86,7 +86,7 @@ executable hydra-tui ghc-options: -threaded -rtsopts -test-suite tests +common test-config import: project-config hs-source-dirs: test other-modules: @@ -94,8 +94,6 @@ test-suite tests Hydra.TUISpec Spec - main-is: Main.hs - type: exitcode-stdio-1.0 build-depends: , blaze-builder , bytestring @@ -120,3 +118,13 @@ test-suite tests , hydra-node:hydra-node ghc-options: -threaded -rtsopts + +executable test-exe + import: test-config + main-is: Main.hs + type: exitcode-stdio-1.0 + +test-suite tests + import: test-config + main-is: Main.hs + type: exitcode-stdio-1.0 diff --git a/nix/hydra/packages.nix b/nix/hydra/packages.nix index d325184c852..58a2974063e 100644 --- a/nix/hydra/packages.nix +++ b/nix/hydra/packages.nix @@ -150,7 +150,7 @@ rec { name = "hydra-cluster-tests"; buildInputs = [ - nativePkgs.hydra-cluster.components.tests.tests + nativePkgs.hydra-cluster.components.exes.test-exe hydra-node hydra-chain-observer inputs.cardano-node.packages.${system}.cardano-node diff --git a/nix/hydra/shell.nix b/nix/hydra/shell.nix index 3569f1547b4..1053b4b0710 100644 --- a/nix/hydra/shell.nix +++ b/nix/hydra/shell.nix @@ -140,6 +140,7 @@ let buildInputs = [ # For building docs pkgs.plantuml + pkgs.yarn # Note: jq 1.6 has a bug that means it fails to read large integers # correctly, so we require 1.7+ at least. pkgsLatest.jq