Skip to content

Commit b324dbc

Browse files
author
Kernel Patches Daemon
committed
adding ci files
1 parent 5abde62 commit b324dbc

35 files changed

+2158
-18
lines changed
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
name: 'run-veristat'
2+
description: 'Run veristat benchmark'
3+
inputs:
4+
veristat_output:
5+
description: 'Veristat output filepath'
6+
required: true
7+
baseline_name:
8+
description: 'Veristat baseline cache name'
9+
required: true
10+
runs:
11+
using: "composite"
12+
steps:
13+
- uses: actions/upload-artifact@v3
14+
with:
15+
name: ${{ inputs.baseline_name }}
16+
if-no-files-found: error
17+
path: ${{ github.workspace }}/${{ inputs.veristat_output }}
18+
19+
# For pull request:
20+
# - get baseline log from cache
21+
# - compare it to current run
22+
- if: ${{ github.event_name == 'pull_request' }}
23+
uses: actions/cache/restore@v3
24+
with:
25+
key: ${{ inputs.baseline_name }}
26+
restore-keys: |
27+
${{ inputs.baseline_name }}-
28+
path: '${{ github.workspace }}/${{ inputs.baseline_name }}'
29+
30+
- if: ${{ github.event_name == 'pull_request' }}
31+
name: Show veristat comparison
32+
shell: bash
33+
run: ./.github/scripts/compare-veristat-results.sh
34+
env:
35+
BASELINE_PATH: ${{ github.workspace }}/${{ inputs.baseline_name }}
36+
VERISTAT_OUTPUT: ${{ inputs.veristat_output }}
37+
38+
# For push: just put baseline log to cache
39+
- if: ${{ github.event_name == 'push' }}
40+
shell: bash
41+
run: |
42+
mv "${{ github.workspace }}/${{ inputs.veristat_output }}" \
43+
"${{ github.workspace }}/${{ inputs.baseline_name }}"
44+
45+
- if: ${{ github.event_name == 'push' }}
46+
uses: actions/cache/save@v3
47+
with:
48+
key: ${{ inputs.baseline_name }}-${{ github.run_id }}
49+
path: '${{ github.workspace }}/${{ inputs.baseline_name }}'

.github/scripts/bpf-objects-rootfs.sh

+12
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
#!/bin/bash
2+
3+
eval "$(guestfish --listen)"
4+
5+
guestfish --verbose --remote \
6+
add /tmp/root.img label:img : \
7+
launch : \
8+
mount /dev/disk/guestfs/img / : \
9+
copy-in /tmp/bpf_objects / : \
10+
chmod 0755 /bpf_objects
11+
12+
guestfish --remote exit
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
#!/bin/bash
2+
3+
if [[ ! -f "${BASELINE_PATH}" ]]; then
4+
echo "# No ${BASELINE_PATH} available" >> "${GITHUB_STEP_SUMMARY}"
5+
6+
echo "No ${BASELINE_PATH} available"
7+
echo "Printing veristat results"
8+
cat "${VERISTAT_OUTPUT}"
9+
10+
exit
11+
fi
12+
13+
selftests/bpf/veristat \
14+
--output-format csv \
15+
--emit file,prog,verdict,states \
16+
--compare "${BASELINE_PATH}" "${VERISTAT_OUTPUT}" > compare.csv
17+
18+
python3 ./.github/scripts/veristat_compare.py compare.csv
+23
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
#!/bin/bash
2+
3+
branch="${GITHUB_BASE_REF}"
4+
5+
if [ "${GITHUB_EVENT_NAME}" = 'push' ]; then
6+
branch="${GITHUB_REF_NAME}"
7+
fi
8+
9+
echo "branch=${branch}" >> "${GITHUB_OUTPUT}"
10+
11+
upstream="${branch//_base/}"
12+
commit="$(
13+
git rev-parse "origin/${upstream}" &> /dev/null \
14+
|| (
15+
git fetch --quiet --prune --no-tags --depth=1 --no-recurse-submodules origin "+refs/heads/${upstream}:refs/remotes/origin/${upstream}" && \
16+
git rev-parse "origin/${upstream}"
17+
)
18+
)"
19+
timestamp_utc="$(TZ=utc git show --format='%cd' --no-patch --date=iso-strict-local "${commit}")"
20+
21+
echo "timestamp=${timestamp_utc}" >> "${GITHUB_OUTPUT}"
22+
echo "commit=${commit}" >> "${GITHUB_OUTPUT}"
23+
echo "Most recent upstream commit is ${commit}"

.github/scripts/matrix.py

+183
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,183 @@
1+
#!/usr/bin/env python3
2+
3+
import os
4+
import dataclasses
5+
import json
6+
7+
from enum import Enum
8+
from typing import Any, Dict, List, Final, Set, Union
9+
10+
MANAGED_OWNER: Final[str] = "kernel-patches"
11+
MANAGED_REPOS: Final[Set[str]] = {
12+
f"{MANAGED_OWNER}/bpf",
13+
f"{MANAGED_OWNER}/vmtest",
14+
}
15+
# We need to run on ubuntu 20.04 because our rootfs is based on debian buster and we
16+
# otherwise get library versioning issue such as
17+
# `./test_verifier: /lib/x86_64-linux-gnu/libc.so.6: version `GLIBC_2.34' not found (required by ./test_verifier)`
18+
DEFAULT_RUNNER: Final[str] = "ubuntu-20.04"
19+
DEFAULT_LLVM_VERSION: Final[int] = 17
20+
21+
22+
class Arch(str, Enum):
23+
"""
24+
CPU architecture supported by CI.
25+
"""
26+
27+
AARCH64 = "aarch64"
28+
S390X = "s390x"
29+
X86_64 = "x86_64"
30+
31+
32+
class Compiler(str, Enum):
33+
GCC = "gcc"
34+
LLVM = "llvm"
35+
36+
37+
@dataclasses.dataclass
38+
class Toolchain:
39+
compiler: Compiler
40+
# This is relevant ONLY for LLVM and should not be required for GCC
41+
version: int
42+
43+
@property
44+
def short_name(self) -> str:
45+
return str(self.compiler.value)
46+
47+
@property
48+
def full_name(self) -> str:
49+
if self.compiler == Compiler.GCC:
50+
return self.short_name
51+
52+
return f"{self.short_name}-{self.version}"
53+
54+
def to_dict(self) -> Dict[str, Union[str, int]]:
55+
return {
56+
"name": self.short_name,
57+
"fullname": self.full_name,
58+
"version": self.version,
59+
}
60+
61+
62+
@dataclasses.dataclass
63+
class BuildConfig:
64+
arch: Arch
65+
toolchain: Toolchain
66+
kernel: str = "LATEST"
67+
run_veristat: bool = False
68+
parallel_tests: bool = False
69+
build_release: bool = False
70+
71+
@property
72+
def runs_on(self) -> List[str]:
73+
if is_managed_repo():
74+
return ["self-hosted", self.arch.value]
75+
return [DEFAULT_RUNNER]
76+
77+
@property
78+
def tests(self) -> Dict[str, Any]:
79+
tests_list = [
80+
"test_progs",
81+
"test_progs_parallel",
82+
"test_progs_no_alu32",
83+
"test_progs_no_alu32_parallel",
84+
"test_maps",
85+
"test_verifier",
86+
]
87+
88+
if self.toolchain.version >= 18:
89+
tests_list.append("test_progs_cpuv4")
90+
91+
if not self.parallel_tests:
92+
tests_list = [test for test in tests_list if not test.endswith("parallel")]
93+
94+
return {"include": [generate_test_config(test) for test in tests_list]}
95+
96+
def to_dict(self) -> Dict[str, Any]:
97+
return {
98+
"arch": self.arch.value,
99+
"toolchain": self.toolchain.to_dict(),
100+
"kernel": self.kernel,
101+
"run_veristat": self.run_veristat,
102+
"parallel_tests": self.parallel_tests,
103+
"build_release": self.build_release,
104+
"runs_on": self.runs_on,
105+
"tests": self.tests,
106+
}
107+
108+
109+
def is_managed_repo() -> bool:
110+
return (
111+
os.environ["GITHUB_REPOSITORY_OWNER"] == MANAGED_OWNER
112+
and os.environ["GITHUB_REPOSITORY"] in MANAGED_REPOS
113+
)
114+
115+
116+
def set_output(name, value):
117+
"""Write an output variable to the GitHub output file."""
118+
with open(os.getenv("GITHUB_OUTPUT"), "a", encoding="utf-8") as file:
119+
file.write(f"{name}={value}\n")
120+
121+
122+
def generate_test_config(test: str) -> Dict[str, Union[str, int]]:
123+
"""Create the configuration for the provided test."""
124+
is_parallel = test.endswith("_parallel")
125+
config = {
126+
"test": test,
127+
"continue_on_error": is_parallel,
128+
# While in experimental mode, parallel jobs may get stuck
129+
# anywhere, including in user space where the kernel won't detect
130+
# a problem and panic. We add a second layer of (smaller) timeouts
131+
# here such that if we get stuck in a parallel run, we hit this
132+
# timeout and fail without affecting the overall job success (as
133+
# would be the case if we hit the job-wide timeout). For
134+
# non-experimental jobs, 360 is the default which will be
135+
# superseded by the overall workflow timeout (but we need to
136+
# specify something).
137+
"timeout_minutes": 30 if is_parallel else 360,
138+
}
139+
return config
140+
141+
142+
if __name__ == "__main__":
143+
matrix = [
144+
BuildConfig(
145+
arch=Arch.X86_64,
146+
toolchain=Toolchain(compiler=Compiler.GCC, version=DEFAULT_LLVM_VERSION),
147+
run_veristat=True,
148+
parallel_tests=True,
149+
),
150+
BuildConfig(
151+
arch=Arch.X86_64,
152+
toolchain=Toolchain(compiler=Compiler.LLVM, version=DEFAULT_LLVM_VERSION),
153+
build_release=True,
154+
),
155+
BuildConfig(
156+
arch=Arch.X86_64,
157+
toolchain=Toolchain(compiler=Compiler.LLVM, version=18),
158+
build_release=True,
159+
),
160+
BuildConfig(
161+
arch=Arch.AARCH64,
162+
toolchain=Toolchain(compiler=Compiler.GCC, version=DEFAULT_LLVM_VERSION),
163+
),
164+
# BuildConfig(
165+
# arch=Arch.AARCH64,
166+
# toolchain=Toolchain(
167+
# compiler=Compiler.LLVM,
168+
# version=DEFAULT_LLVM_VERSION
169+
# ),
170+
# ),
171+
BuildConfig(
172+
arch=Arch.S390X,
173+
toolchain=Toolchain(compiler=Compiler.GCC, version=DEFAULT_LLVM_VERSION),
174+
),
175+
]
176+
177+
# Outside of those repositories we only run on x86_64
178+
if not is_managed_repo():
179+
matrix = [config for config in matrix if config.arch == Arch.X86_64]
180+
181+
json_matrix = json.dumps({"include": [config.to_dict() for config in matrix]})
182+
print(json_matrix)
183+
set_output("build_matrix", json_matrix)
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,63 @@
1+
#!/bin/bash
2+
3+
set -eu
4+
5+
commit_id="${1}"
6+
7+
# $1 - the SHA-1 to fetch and check out
8+
fetch_and_checkout() {
9+
local build_base_sha
10+
11+
build_base_sha="${1}"
12+
# If cached artifacts became stale for one reason or another, we
13+
# may not have the build base SHA available. Fetch it and retry.
14+
git fetch origin "${build_base_sha}" && git checkout --quiet "${build_base_sha}"
15+
}
16+
17+
# $1 - value of KBUILD_OUTPUT
18+
clear_cache_artifacts() {
19+
local output_dir
20+
21+
output_dir="${1}"
22+
echo "Unable to find earlier upstream ref. Discarding KBUILD_OUTPUT contents..."
23+
rm --recursive --force "${output_dir}"
24+
mkdir "${output_dir}"
25+
false
26+
}
27+
28+
# $1 - value of KBUILD_OUTPUT
29+
# $2 - current time in ISO 8601 format
30+
restore_source_code_times() {
31+
local build_output
32+
local current_time
33+
local src_time
34+
local obj_time
35+
36+
build_output="${1}"
37+
current_time="${2}"
38+
src_time="$(date --iso-8601=ns --date="${current_time} - 2 minutes")"
39+
obj_time="$(date --iso-8601=ns --date="${current_time} - 1 minute")"
40+
41+
git ls-files | xargs --max-args=10000 touch -m --no-create --date="${src_time}"
42+
find "${build_output}" -type f | xargs --max-args=10000 touch -m --no-create --date="${obj_time}"
43+
git checkout --quiet -
44+
echo "Adjusted src and obj time stamps relative to system time"
45+
}
46+
47+
mkdir --parents "${KBUILD_OUTPUT}"
48+
current_time="$(date --iso-8601=ns)"
49+
50+
if [ -f "${KBUILD_OUTPUT}/.build-base-sha" ]; then
51+
build_base_sha="$(cat "${KBUILD_OUTPUT}/.build-base-sha")"
52+
echo "Setting up base build state for ${build_base_sha}"
53+
54+
(
55+
git checkout --quiet "${build_base_sha}" \
56+
|| fetch_and_checkout "${build_base_sha}" \
57+
|| clear_cache_artifacts "${KBUILD_OUTPUT}"
58+
) && restore_source_code_times "${KBUILD_OUTPUT}" "${current_time}"
59+
else
60+
echo "No previous build data found"
61+
fi
62+
63+
echo -n "${commit_id}" > "${KBUILD_OUTPUT}/.build-base-sha"

0 commit comments

Comments
 (0)