Skip to content

Commit 49e8138

Browse files
committed
fix black
Signed-off-by: Jack Luar <[email protected]>
1 parent 3738075 commit 49e8138

File tree

2 files changed

+1
-2
lines changed

2 files changed

+1
-2
lines changed

tools/AutoTuner/src/autotuner/distributed.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1092,7 +1092,7 @@ def sweep():
10921092
local_dir=LOCAL_DIR,
10931093
resume=args.resume,
10941094
stop={"training_iteration": args.iterations},
1095-
resources_per_trial={"cpu": os.cpu_count()/args.jobs},
1095+
resources_per_trial={"cpu": os.cpu_count() / args.jobs},
10961096
log_to_file=["trail-out.log", "trail-err.log"],
10971097
trial_name_creator=lambda x: f"variant-{x.trainable_name}-{x.trial_id}-ray",
10981098
trial_dirname_creator=lambda x: f"variant-{x.trainable_name}-{x.trial_id}-ray",

tools/AutoTuner/test/resume_check.py

-1
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,6 @@ def setUp(self):
5858
for c in options
5959
]
6060

61-
6261
def test_tune_resume(self):
6362
# Goal is to first run the first config (without resume) and then run the second config (with resume)
6463
# and check if the run is able to complete.

0 commit comments

Comments
 (0)