forked from NixOS/nixpkgs
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* [WIP] Parallel GH actions workflow for Nixpkgs eval Partly taken from NixOS#352808 and NixOS#269403 * ci/parallel.nix: nixfmt * ci/eval: drop swap Turns out we don't actually need it * ci/eval: reformat with shfmt the file was using mixed indentation (2 vs 4 spaces) * ci/eval: depend on coreutils nproc is only part of coreutils and not present on macOS * ci/eval: allow to override cores this is useful to control how much RAM is used during evaluation * ci/eval: use xargs instead of parallel saves us from having to install perl for this script. * ci/eval: add quick test flag * ci/eval: seperate different outputs in json * WIP * Update README.md --------- Co-authored-by: Silvan Mosberger <[email protected]> Co-authored-by: Jörg Thalheim <[email protected]> Co-authored-by: Silvan Mosberger <[email protected]>
- Loading branch information
1 parent
2f21354
commit ff31ef4
Showing
10 changed files
with
423 additions
and
24 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,112 @@ | ||
name: Eval | ||
|
||
on: pull_request_target | ||
|
||
permissions: | ||
contents: read | ||
|
||
jobs: | ||
attrs: | ||
name: Attrs | ||
runs-on: ubuntu-latest | ||
outputs: | ||
systems: ${{ steps.systems.outputs.systems }} | ||
mergedSha: ${{ steps.merged.outputs.mergedSha }} | ||
steps: | ||
# Important: Because of `pull_request_target`, this doesn't check out the PR, | ||
# but rather the base branch of the PR, which is needed so we don't run untrusted code | ||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 | ||
with: | ||
path: base | ||
sparse-checkout: ci | ||
- name: Resolving the merge commit | ||
id: merged | ||
env: | ||
GH_TOKEN: ${{ github.token }} | ||
run: | | ||
if mergedSha=$(base/ci/get-merge-commit.sh ${{ github.repository }} ${{ github.event.number }}); then | ||
echo "Checking the merge commit $mergedSha" | ||
echo "mergedSha=$mergedSha" >> "$GITHUB_OUTPUT" | ||
else | ||
# Skipping so that no notifications are sent | ||
echo "Skipping the rest..." | ||
fi | ||
rm -rf base | ||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 | ||
# Add this to _all_ subsequent steps to skip them | ||
if: steps.merged.outputs.mergedSha | ||
with: | ||
ref: ${{ env.mergedSha }} | ||
path: nixpkgs | ||
|
||
- uses: cachix/install-nix-action@08dcb3a5e62fa31e2da3d490afc4176ef55ecd72 # v30 | ||
if: steps.merged.outputs.mergedSha | ||
|
||
- id: systems | ||
if: steps.merged.outputs.mergedSha | ||
run: | | ||
nix-build nixpkgs/ci -A eval.attrpathsSuperset | ||
echo "systems=$(<result/systems.json)" >> "$GITHUB_OUTPUT" | ||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3 | ||
if: steps.merged.outputs.mergedSha | ||
with: | ||
name: paths | ||
path: result/paths.json | ||
|
||
eval: | ||
name: Eval | ||
runs-on: ubuntu-latest | ||
needs: attrs | ||
if: needs.attrs.outputs.mergedSha | ||
strategy: | ||
matrix: | ||
system: ${{ fromJSON(needs.attrs.outputs.systems) }} | ||
steps: | ||
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 | ||
with: | ||
name: paths | ||
path: paths | ||
|
||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 | ||
with: | ||
ref: ${{ needs.attrs.outputs.mergedSha }} | ||
path: nixpkgs | ||
|
||
- uses: cachix/install-nix-action@08dcb3a5e62fa31e2da3d490afc4176ef55ecd72 # v30 | ||
|
||
- name: Check eval | ||
run: nix-build nixpkgs/ci -A eval.singleSystem --argstr evalSystem ${{ matrix.system }} --arg attrpathFile ./paths/paths.json | ||
|
||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3 | ||
if: needs.attrs.outputs.mergedSha | ||
with: | ||
name: result-${{ matrix.system }} | ||
path: result/paths | ||
|
||
combine: | ||
name: Combined | ||
runs-on: ubuntu-latest | ||
needs: eval | ||
steps: | ||
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 | ||
with: | ||
pattern: result-* | ||
path: results | ||
|
||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 | ||
with: | ||
ref: ${{ needs.attrs.outputs.mergedSha }} | ||
path: nixpkgs | ||
|
||
- uses: cachix/install-nix-action@08dcb3a5e62fa31e2da3d490afc4176ef55ecd72 # v30 | ||
|
||
- name: Check eval | ||
run: | | ||
ls -laar results/* | ||
nix-build nixpkgs/ci -A eval.combine --arg resultsDir ./results | ||
- uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3 | ||
with: | ||
name: outpaths | ||
path: result/outpaths.json |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,251 @@ | ||
{ | ||
lib, | ||
runCommand, | ||
writeShellScript, | ||
linkFarm, | ||
time, | ||
procps, | ||
nix, | ||
jq, | ||
}: | ||
|
||
# Use the GitHub Actions cache to cache /nix/store | ||
# Although can't really merge.. | ||
# Use artifacts and pass files manually, also doesn't have to repeat eval then | ||
let | ||
nixpkgs = | ||
with lib.fileset; | ||
toSource { | ||
root = ../..; | ||
fileset = unions ( | ||
map (lib.path.append ../..) [ | ||
"default.nix" | ||
"doc" | ||
"lib" | ||
"maintainers" | ||
"nixos" | ||
"ci/eval/parallel.nix" | ||
"pkgs" | ||
".version" | ||
] | ||
); | ||
}; | ||
|
||
attrpathsSuperset = | ||
runCommand "attrpaths-superset.json" | ||
{ | ||
src = nixpkgs; | ||
nativeBuildInputs = [ | ||
nix | ||
]; | ||
env.supportedSystems = builtins.toJSON supportedSystems; | ||
passAsFile = [ "supportedSystems" ]; | ||
} | ||
'' | ||
export NIX_STATE_DIR=$(mktemp -d) | ||
mkdir $out | ||
nix-instantiate --eval --strict --json --arg enableWarnings false $src/pkgs/top-level/release-attrpaths-superset.nix -A paths > $out/paths.json | ||
mv "$supportedSystemsPath" $out/systems.json | ||
''; | ||
|
||
supportedSystems = import ../supportedSystems.nix; | ||
|
||
## Takes a path to an attrpathsSuperset result and computes the result of evaluating it entirely | ||
#evalPlan = | ||
# { | ||
# checkMeta ? true, | ||
# includeBroken ? true, | ||
# # TODO | ||
# quickTest ? false, | ||
# }: | ||
# runCommand "eval-plan" | ||
# { | ||
# nativeBuildInputs = [ | ||
# jq | ||
# ]; | ||
# env.cores = toString cores; | ||
# supportedSystems = builtins.toJSON supportedSystems; | ||
# passAsFile = [ "supportedSystems" ]; | ||
# } | ||
# '' | ||
# if [[ -z "$cores" ]]; then | ||
# cores=$NIX_BUILD_CORES | ||
# fi | ||
# echo "Cores: $cores" | ||
# num_attrs=$(jq length "${attrpathsSuperset}/paths.json") | ||
# echo "Attribute count: $num_attrs" | ||
# chunk_size=$(( ${toString simultaneousAttrsPerSystem} / cores )) | ||
# echo "Chunk size: $chunk_size" | ||
# # Same as `num_attrs / chunk_size` but rounded up | ||
# num_chunks=$(( (num_attrs - 1) / chunk_size + 1 )) | ||
# echo "Chunk count: $num_chunks" | ||
|
||
# mkdir -p $out/systems | ||
# mv "$supportedSystemsPath" $out/systems.json | ||
# echo "Systems: $(<$out/systems.json)" | ||
# for system in $(jq -r '.[]' "$out/systems.json"); do | ||
# mkdir -p "$out/systems/$system/chunks" | ||
# printf "%s" "$cores" > "$out/systems/$system/cores" | ||
# for chunk in $(seq -w 0 "$(( num_chunks - 1 ))"); do | ||
# jq '{ | ||
# paths: .[($chunk * $chunk_size):(($chunk + 1) * $chunk_size)], | ||
# systems: [ $system ], | ||
# checkMeta: $checkMeta, | ||
# includeBroken: $includeBroken | ||
# }' \ | ||
# --argjson chunk "$chunk" \ | ||
# --argjson chunk_size "$chunk_size" \ | ||
# --arg system "$system" \ | ||
# --argjson checkMeta "${lib.boolToString checkMeta}" \ | ||
# --argjson includeBroken "${lib.boolToString includeBroken}" \ | ||
# ${attrpathsSuperset}/paths.json \ | ||
# > "$out/systems/$system/chunks/$chunk.json" | ||
# done | ||
# done | ||
# ''; | ||
|
||
singleSystem = | ||
{ | ||
evalSystem, | ||
attrpathFile, | ||
checkMeta ? true, | ||
includeBroken ? true, | ||
# How many attributes to be evaluating at any single time. | ||
# This effectively limits the maximum memory usage. | ||
# Decrease this if too much memory is used | ||
simultaneousAttrsPerSystem ? 100000, | ||
quickTest ? false, | ||
}: | ||
let | ||
singleChunk = writeShellScript "chunk" '' | ||
set -euo pipefail | ||
chunkSize=$1 | ||
myChunk=$2 | ||
outputDir=$3 | ||
system=$4 | ||
nix-env -f "${nixpkgs}/ci/eval/parallel.nix" \ | ||
--query --available \ | ||
--no-name --attr-path --out-path \ | ||
--show-trace \ | ||
--arg chunkSize "$chunkSize" \ | ||
--arg myChunk "$myChunk" \ | ||
--arg attrpathFile "${attrpathFile}" \ | ||
--arg systems "[ \"$system\" ]" \ | ||
--arg checkMeta ${lib.boolToString checkMeta} \ | ||
--arg includeBroken ${lib.boolToString includeBroken} \ | ||
> "$outputDir/$myChunk" | ||
''; | ||
in | ||
runCommand "nixpkgs-eval-${evalSystem}" | ||
{ | ||
nativeBuildInputs = [ | ||
nix | ||
time | ||
procps | ||
jq | ||
]; | ||
env = { inherit evalSystem; }; | ||
} | ||
'' | ||
set -x | ||
export NIX_STATE_DIR=$(mktemp -d) | ||
nix-store --init | ||
echo "System: $evalSystem" | ||
cores=$NIX_BUILD_CORES | ||
echo "Cores: $cores" | ||
num_attrs=$(jq length "${attrpathFile}") | ||
echo "Attribute count: $num_attrs" | ||
chunk_size=$(( ${toString simultaneousAttrsPerSystem} / cores )) | ||
echo "Chunk size: $chunk_size" | ||
# Same as `num_attrs / chunk_size` but rounded up | ||
num_chunks=$(( (num_attrs - 1) / chunk_size + 1 )) | ||
echo "Chunk count: $num_chunks" | ||
( | ||
while true; do | ||
free -g | ||
sleep 20 | ||
done | ||
) & | ||
seq_end=$(( num_chunks - 1 )) | ||
${lib.optionalString quickTest '' | ||
seq_end=0 | ||
''} | ||
chunkOutputDir=$(mktemp -d) | ||
seq -w 0 "$seq_end" | | ||
command time -v xargs -t -I{} -P"$cores" \ | ||
${singleChunk} "$chunk_size" {} "$chunkOutputDir" "$evalSystem" | ||
mkdir $out | ||
cat "$chunkOutputDir"/* > $out/paths | ||
''; | ||
|
||
combine = | ||
{ | ||
resultsDir, | ||
}: | ||
runCommand "combined-result" | ||
{ | ||
nativeBuildInputs = [ | ||
jq | ||
]; | ||
passAsFile = [ "jqScript" ]; | ||
jqScript = # jq | ||
'' | ||
split("\n") | | ||
map(select(. != "") | split(" ") | map(select(. != ""))) | | ||
map( | ||
{ | ||
key: .[0], | ||
value: .[1] | split(";") | map(split("=") | | ||
if length == 1 then | ||
{ key: "out", value: .[0] } | ||
else | ||
{ key: .[0], value: .[1] } | ||
end) | from_entries} | ||
) | from_entries | ||
''; | ||
} | ||
'' | ||
mkdir -p $out | ||
cat ${resultsDir}/*/paths | | ||
jq --sort-keys --raw-input --slurp -f "$jqScriptPath" \ | ||
> $out/outpaths.json | ||
''; | ||
|
||
together = | ||
{ | ||
quickTest ? false, | ||
}: | ||
let | ||
systems = if quickTest then [ "x86_64-linux" ] else supportedSystems; | ||
results = linkFarm "results" ( | ||
map (system: { | ||
name = system; | ||
path = singleSystem { | ||
system = system; | ||
attrpathFile = attrpathsSuperset + "/paths.json"; | ||
inherit quickTest; | ||
}; | ||
}) systems | ||
); | ||
final = combine { | ||
resultsDir = results; | ||
}; | ||
in | ||
final; | ||
|
||
in | ||
{ | ||
inherit | ||
attrpathsSuperset | ||
singleSystem | ||
combine | ||
together | ||
; | ||
} |
Oops, something went wrong.