Skip to content

Commit

Permalink
[NFC][CI] Add check for python formatting, run black (#940)
Browse files Browse the repository at this point in the history
Before pushing PRs with changes to python files, please run `black .` in
the directory with the changed python files (you might need to install
black).

Black formatter: https://pypi.org/project/black/
  • Loading branch information
newling authored Nov 28, 2024
1 parent a0c8039 commit 4f00ff9
Show file tree
Hide file tree
Showing 2 changed files with 31 additions and 17 deletions.
12 changes: 12 additions & 0 deletions .github/workflows/black.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
name: Lint

on: [push, pull_request]

jobs:
lint:
runs-on: nod-ai-shared-cpubuilder-manylinux-x86_64
steps:
- uses: actions/checkout@v4
- uses: psf/black@stable

# see https://black.readthedocs.io/en/stable/integrations/github_actions.html
36 changes: 19 additions & 17 deletions build_tools/ci/cpu_comparison/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,10 +103,10 @@ def run(self, config):
return False

# If use_chess=1, and config has not provided a valid
# path to vitis, then don't run the test. The asymmetry between
# logic for peano and chess is because we don't expect everyone
# path to vitis, then don't run the test. The asymmetry between
# logic for peano and chess is because we don't expect everyone
# running this script to have chess (currently Windows CI for example
# does not).
# does not).
if self.use_chess and not config.vitis_dir:
return False

Expand All @@ -115,7 +115,6 @@ def run(self, config):
if not self.use_chess and not config.peano_dir:
raise RuntimeError("Peano path not provided, and use_chess=False")


# Call into test-specific code to run the test.
return self._execute(config)

Expand Down Expand Up @@ -366,7 +365,9 @@ class MatmulBenchmark(BaseMatmul):
A test of the form matmul(A,B) where A:MxK, B:KxN
"""

benchmark_compilation_flags = ["--iree-amdaie-enable-infinite-loop-around-core-block=true"]
benchmark_compilation_flags = [
"--iree-amdaie-enable-infinite-loop-around-core-block=true"
]

def __init__(
self,
Expand Down Expand Up @@ -750,7 +751,9 @@ def generate_aie_output(config, aie_vmfb, input_args, function_name, name, outpu
return np_from_binfile(aie_bin, output_type)


def benchmark_aie_kernel_time(config, aie_vmfb, input_args, function_name, name, n_repeats, n_kernel_runs):
def benchmark_aie_kernel_time(
config, aie_vmfb, input_args, function_name, name, n_repeats, n_kernel_runs
):
"""
Benchmark a compiled AIE module's (aie_vmfb) kernel time, average over the specified number of runs.
"""
Expand Down Expand Up @@ -1131,7 +1134,7 @@ def benchmark_aie(
lower_to_aie_pipeline:
The pipeline to be used for lowering to AIE (objectFifo, AIR).
n_repeats:
The number of repetitions to be used for getting statistics (mean, median, stddev)
The number of repetitions to be used for getting statistics (mean, median, stddev)
n_kernel_runs:
The number of invocations of the kernel, for averaging.
function_name:
Expand All @@ -1141,12 +1144,14 @@ def benchmark_aie(
The seed to be used for generating the inputs.
"""
if (
"--iree-amdaie-enable-infinite-loop-around-core-block=true"
"--iree-amdaie-enable-infinite-loop-around-core-block=true"
not in aie_compilation_flags
):
raise ValueError("To benchmark an AIE kernel module, the " \
"`--iree-amdaie-enable-infinite-loop-around-core-block=true` " \
"should be passed.")
raise ValueError(
"To benchmark an AIE kernel module, the "
"`--iree-amdaie-enable-infinite-loop-around-core-block=true` "
"should be passed."
)

name = name_from_mlir_filename(test_file)
input_args = generate_inputs(test_file, config.output_dir, seed)
Expand All @@ -1167,7 +1172,7 @@ def benchmark_aie(
if config.verbose:
print(f"Skipping AIE run for {test_file} because 'do_not_run_aie=True'.")
return

print(f"Performance benchmark: {test_file}")
benchmark_aie_kernel_time(
config,
Expand Down Expand Up @@ -1350,8 +1355,6 @@ def __init__(self):
)
)



# Some bf16 Performance tests:
for M, N, K, use_ukernel in [
(512, 512, 4096, False),
Expand Down Expand Up @@ -1571,9 +1574,8 @@ def all_tests(

for test in tests.tests:

skip = (
test.name in skip_test_set or
any((label in skip_test_set for label in test.labels))
skip = test.name in skip_test_set or any(
(label in skip_test_set for label in test.labels)
)
if skip:
not_match.append(test.name)
Expand Down

0 comments on commit 4f00ff9

Please sign in to comment.