Skip to content

Commit

Permalink
pre-commit: replace flake8 by ruff, rerun on all python
Browse files Browse the repository at this point in the history
  • Loading branch information
dev-zero committed May 17, 2023
1 parent fdba3c2 commit aee316e
Show file tree
Hide file tree
Showing 9 changed files with 33 additions and 30 deletions.
4 changes: 0 additions & 4 deletions .flake8

This file was deleted.

17 changes: 9 additions & 8 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,20 +4,21 @@ default_language_version:
exclude: '^tools/(build_utils/fypp)'
fail_fast: false
repos:
- repo: https://github.com/charliermarsh/ruff-pre-commit
rev: 'v0.0.261'
hooks:
- id: ruff
args: [ --fix, --exit-non-zero-on-fix ]
exclude: >-
(?x)^(
.cp2k/.*|
)$
- repo: https://github.com/ambv/black
rev: 22.6.0
hooks:
- id: black
name: Reformat Python files with the black code formatter
files: '^.*(/PACKAGE)|(\.py)$'
- repo: https://github.com/pycqa/flake8
rev: 5.0.4
hooks:
- id: flake8
exclude: >-
(?x)^(
.cp2k/.*|
)$
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.3.0
hooks:
Expand Down
3 changes: 3 additions & 0 deletions .ruff.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
select = ["E", "F", "B"]
line-length = 128
ignore = ["B905"]
2 changes: 1 addition & 1 deletion src/acc/libsmm_acc/kernels/smm_acc.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ def compatible_mnk(algo, m, n, k):
compatible = False
else:
if algo != "medium":
assert False, f"Cannot identify algorithm:{str(algo)}"
raise AssertionError(f"Cannot identify algorithm:{str(algo)}")

return compatible

Expand Down
6 changes: 3 additions & 3 deletions src/acc/libsmm_acc/kernels/smm_acc_predict.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,14 +247,14 @@ def get_baseline_performances_per_mnk(data, algorithm, gpu, autotuning):
& (data.v == baseline_pars["v"])
].index.tolist()
else:
assert False, f"Cannot recognize algorithm: {algorithm}"
raise AssertionError(f"Cannot recognize algorithm: {algorithm}")

if len(idx_baseline) == 1:
idx_baseline = idx_baseline[0]
baseline_perf[mnk] = data["perf (Gflop/s)"][idx_baseline]
elif len(idx_baseline) > 1:
assert False, "Found more than one corresponding index: " + str(
idx_baseline
raise AssertionError(
"Found more than one corresponding index: " + str(idx_baseline)
)
else:
pass # if none were found, they're in another data chunk. Do nothing.
Expand Down
2 changes: 1 addition & 1 deletion src/acc/libsmm_acc/notebooks/nb_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def get_folders_to_read(to_read, autotuning_data_path):
if to_read.match(f) is not None
]
else:
assert False, "Cannot recognize option: " + to_read
raise AssertionError("Cannot recognize option: " + to_read)

num_folders_to_read = len(folders_to_read)
assert num_folders_to_read > 0
Expand Down
8 changes: 5 additions & 3 deletions src/acc/libsmm_acc/predict/predict_train.py
Original file line number Diff line number Diff line change
Expand Up @@ -582,7 +582,7 @@ def get_hyperparameter_grid(algo, model_name, n_features):
"n_estimators": list(n_estimators),
}
else:
assert False, "Cannot recognize model: " + model_name
raise AssertionError("Cannot recognize model: " + model_name)

return param_grid

Expand Down Expand Up @@ -700,7 +700,9 @@ def get_model(model_to_train, algo, njobs, ntrees):
elif model_to_train == "xgb-RF":
model, model_name = get_xgb_RandomForest_model(algo, njobs, ntrees)
else:
assert False, "Cannot recognize model: " + model_to_train + ". Options: DT, RF"
raise AssertionError(
"Cannot recognize model: " + model_to_train + ". Options: DT, RF"
)
return model, model_name


Expand Down Expand Up @@ -849,7 +851,7 @@ def train_model(X, X_mnk, Y, algo, model_options, folder, log):
# Hyperparameter Optimization
param_grid = get_hyperparameter_grid(algo, model_name, n_features)
if param_grid is None:
assert False, "param_grid object is None. Please implement!"
raise AssertionError("param_grid object is None. Please implement!")

# At this point, we "cheat"/"take a shortcut" in 2 ways:
# - we split into train/test partitions using the simple default splitter, not one that is aware of mnk-groups
Expand Down
19 changes: 10 additions & 9 deletions src/acc/libsmm_acc/predict/prepare_training_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,10 +113,11 @@ def get_performance_closest_to_baseline(
if len(idx_baseline) > 0:
break
else:
assert False, (
'Could not find closest baseline for mnk=({}x{}x{}) and for algorithm "{}".'
"\nLast baseline parameters searched:\n{}"
"\nParameter sets searched:\n".format(m, n, k, algorithm, baseline_pars)
raise AssertionError(
(
'Could not find closest baseline for mnk=({}x{}x{}) and for algorithm "{}".\n'
"Last baseline parameters searched:\n{}\nParameter sets searched:\n"
).format(m, n, k, algorithm, baseline_pars)
)

idx_baseline = idx_baseline[0]
Expand All @@ -142,7 +143,7 @@ def process_chunk(data_chunk, algorithm, gpu_properties, autotuning_properties):
# For each (mnk), ...
baseline_performances = dict()
max_performances = dict()
for i, mnk in enumerate(mnks):
for _i, mnk in enumerate(mnks):

data_mnk = data_chunk[data_chunk["mnk"] == mnk]
m, n, k = mnk_pattern.match(mnk).groups()
Expand Down Expand Up @@ -433,7 +434,7 @@ def plot_baseline(baseline_perfs_by_algo, data_path, algorithms):

# Get all mnks
mnk_sequences = list()
for algo, baseline_dic in baseline_perfs_by_algo.items():
for _algo, baseline_dic in baseline_perfs_by_algo.items():
mnk_sequences += list(baseline_dic.keys())
all_mnks = list(set.union(set(mnk_sequences)))

Expand All @@ -451,9 +452,9 @@ def plot_baseline(baseline_perfs_by_algo, data_path, algorithms):
baseline_perfs[mnk] = baseline_perfs_by_algo[algo][mnk]
break
else:
assert (
False
), "NOOOO this is actually impossible by def of all_mnks, isn't it?"
raise AssertionError(
"NOOOO this is actually impossible by def of all_mnks, isn't it?"
)

# Sort
mnks = list()
Expand Down
2 changes: 1 addition & 1 deletion src/acc/opencl/smm/tune_multiply.py
Original file line number Diff line number Diff line change
Expand Up @@ -741,7 +741,7 @@ def handle_sigint(self, signum, frame):
)
args = argparser.parse_args()
# OPENCL_LIBSMM_SMM_xx=tune|enabled|on must be given to permit tuning)
if not os.getenv("OPENCL_LIBSMM_SMM_WS") in {"tune", "enabled", "on"}:
if os.getenv("OPENCL_LIBSMM_SMM_WS") not in {"tune", "enabled", "on"}:
os.environ["OPENCL_LIBSMM_SMM_WS"] = "{}".format(args.ws)
# if not os.getenv("OPENCL_LIBSMM_SMM_AL") in {"tune", "enabled", "on"}:
# os.environ["OPENCL_LIBSMM_SMM_AL"] = "{}".format(args.al)
Expand Down

0 comments on commit aee316e

Please sign in to comment.