Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Jun 19, 2023
1 parent 6052f1a commit 63950c3
Show file tree
Hide file tree
Showing 4 changed files with 40 additions and 53 deletions.
3 changes: 2 additions & 1 deletion batchspawner/api.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import json
from tornado import web

from jupyterhub.apihandlers import APIHandler, default_handlers
from tornado import web


class BatchSpawnerAPIHandler(APIHandler):
Expand Down
46 changes: 17 additions & 29 deletions batchspawner/batchspawner.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,20 +16,15 @@
* job names instead of PIDs
"""
import asyncio
import pwd
import os
import pwd
import re

import xml.etree.ElementTree as ET

from enum import Enum

from jinja2 import Template

from jupyterhub.spawner import Spawner
from traitlets import Integer, Unicode, Float, Dict, default

from jupyterhub.spawner import set_user_setuid
from jupyterhub.spawner import Spawner, set_user_setuid
from traitlets import Dict, Float, Integer, Unicode, default


def format_template(template, *args, **kwargs):
Expand Down Expand Up @@ -240,9 +235,7 @@ async def run_command(self, cmd, input=None, env=None):
self.log.error(out)
self.log.error("Stderr:")
self.log.error(eout)
raise RuntimeError(
"{} exit status {}: {}".format(cmd, proc.returncode, eout)
)
raise RuntimeError(f"{cmd} exit status {proc.returncode}: {eout}")
except asyncio.TimeoutError:
self.log.error(
"Encountered timeout trying to clean up command, process probably killed already: %s"
Expand Down Expand Up @@ -322,7 +315,7 @@ async def query_job_status(self):
except RuntimeError as e:
# e.args[0] is stderr from the process
self.job_status = e.args[0]
except Exception as e:
except Exception:
self.log.error("Error querying job " + self.job_id)
self.job_status = ""

Expand Down Expand Up @@ -354,13 +347,13 @@ async def cancel_batch_job(self):

def load_state(self, state):
"""load job_id from state"""
super(BatchSpawnerBase, self).load_state(state)
super().load_state(state)
self.job_id = state.get("job_id", "")
self.job_status = state.get("job_status", "")

def get_state(self):
"""add job_id to state"""
state = super(BatchSpawnerBase, self).get_state()
state = super().get_state()
if self.job_id:
state["job_id"] = self.job_id
if self.job_status:
Expand All @@ -369,7 +362,7 @@ def get_state(self):

def clear_state(self):
"""clear job_id state"""
super(BatchSpawnerBase, self).clear_state()
super().clear_state()
self.job_id = ""
self.job_status = ""

Expand Down Expand Up @@ -415,7 +408,7 @@ async def start(self):
if self.server:
self.server.port = self.port

job = await self.submit_batch_script()
await self.submit_batch_script()

# We are called with a timeout, and if the timeout expires this function will
# be interrupted at the next yield, and self.stop() will be called.
Expand Down Expand Up @@ -458,7 +451,7 @@ async def start(self):

self.db.commit()
self.log.info(
"Notebook server job {0} started at {1}:{2}".format(
"Notebook server job {} started at {}:{}".format(
self.job_id, self.ip, self.port
)
)
Expand All @@ -482,7 +475,7 @@ async def stop(self, now=False):
await asyncio.sleep(1)
if self.job_id:
self.log.warning(
"Notebook server job {0} at {1}:{2} possibly failed to terminate".format(
"Notebook server job {} at {}:{} possibly failed to terminate".format(
self.job_id, self.ip, self.port
)
)
Expand Down Expand Up @@ -799,7 +792,7 @@ def parse_job_id(self, output):
def state_ispending(self):
if self.job_status:
job_info = ET.fromstring(self.job_status).find(
".//job_list[JB_job_number='{0}']".format(self.job_id)
f".//job_list[JB_job_number='{self.job_id}']"
)
if job_info is not None:
return job_info.attrib.get("state") == "pending"
Expand All @@ -808,7 +801,7 @@ def state_ispending(self):
def state_isrunning(self):
if self.job_status:
job_info = ET.fromstring(self.job_status).find(
".//job_list[JB_job_number='{0}']".format(self.job_id)
f".//job_list[JB_job_number='{self.job_id}']"
)
if job_info is not None:
return job_info.attrib.get("state") == "running"
Expand All @@ -817,13 +810,13 @@ def state_isrunning(self):
def state_gethost(self):
if self.job_status:
queue_name = ET.fromstring(self.job_status).find(
".//job_list[JB_job_number='{0}']/queue_name".format(self.job_id)
f".//job_list[JB_job_number='{self.job_id}']/queue_name"
)
if queue_name is not None and queue_name.text:
return queue_name.text.split("@")[1]

self.log.error(
"Spawner unable to match host addr in job {0} with status {1}".format(
"Spawner unable to match host addr in job {} with status {}".format(
self.job_id, self.job_status
)
)
Expand Down Expand Up @@ -887,12 +880,7 @@ def parse_job_id(self, output):
raise Exception(error_msg)

def cmd_formatted_for_batch(self):
return (
super(CondorSpawner, self)
.cmd_formatted_for_batch()
.replace('"', '""')
.replace("'", "''")
)
return super().cmd_formatted_for_batch().replace('"', '""').replace("'", "''")


class LsfSpawner(BatchSpawnerBase):
Expand Down Expand Up @@ -957,7 +945,7 @@ def state_gethost(self):
return self.job_status.split(" ")[1].strip().split(":")[0]

self.log.error(
"Spawner unable to match host addr in job {0} with status {1}".format(
"Spawner unable to match host addr in job {} with status {}".format(
self.job_id, self.job_status
)
)
Expand Down
8 changes: 3 additions & 5 deletions batchspawner/singleuser.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,11 @@
import os
import sys

from runpy import run_path
from shutil import which

from jupyterhub.utils import random_port, url_path_join
from jupyterhub.services.auth import HubAuth

import requests
from jupyterhub.services.auth import HubAuth
from jupyterhub.utils import random_port, url_path_join


def main(argv=None):
Expand Down Expand Up @@ -35,7 +33,7 @@ def main(argv=None):
)

cmd_path = which(sys.argv[1])
sys.argv = sys.argv[1:] + ["--port={}".format(port)]
sys.argv = sys.argv[1:] + [f"--port={port}"]
run_path(cmd_path, run_name="__main__")


Expand Down
36 changes: 18 additions & 18 deletions batchspawner/tests/test_spawners.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@
import re
import time

import pytest
from jupyterhub import orm
from jupyterhub.objects import Hub, Server
from jupyterhub.user import User
from traitlets import Unicode
import pytest

from .. import BatchSpawnerRegexStates, JobStatus

Expand Down Expand Up @@ -41,7 +41,7 @@ async def run_command(self, *args, **kwargs):
print("run:", run_re)
assert (
run_re.search(cmd) is not None
), "Failed test: re={0} cmd={1}".format(run_re, cmd)
), f"Failed test: re={run_re} cmd={cmd}"
# Run command normally
out = await super().run_command(*args, **kwargs)
# Test that the command matches the expectations
Expand All @@ -51,7 +51,7 @@ async def run_command(self, *args, **kwargs):
print("out:", out_re)
assert (
out_re.search(cmd) is not None
), "Failed output: re={0} cmd={1} out={2}".format(out_re, cmd, out)
), f"Failed output: re={out_re} cmd={cmd} out={out}"
return out


Expand Down Expand Up @@ -282,10 +282,10 @@ async def run_command(self, cmd, input=None, env=None):
# Test the input
run_re = cmd_expectlist.pop(0)
if run_re:
print('run: "{}" [{}]'.format(cmd, run_re))
print(f'run: "{cmd}" [{run_re}]')
assert (
run_re.search(cmd) is not None
), "Failed test: re={0} cmd={1}".format(run_re, cmd)
), f"Failed test: re={run_re} cmd={cmd}"
# Test the stdin - will only be the batch script. For
# each regular expression in batch_script_re_list, assert that
# each re in that list matches the batch script.
Expand All @@ -294,7 +294,7 @@ async def run_command(self, cmd, input=None, env=None):
for match_re in batch_script_re_list:
assert (
match_re.search(batch_script) is not None
), "Batch script does not match {}".format(match_re)
), f"Batch script does not match {match_re}"
# Return expected output.
out = out_list.pop(0)
print(" --> " + out)
Expand Down Expand Up @@ -345,11 +345,11 @@ async def test_torque(db, event_loop):
), # pending
(
re.compile(r"sudo.*qstat"),
"<job_state>R</job_state><exec_host>{}/1</exec_host>".format(testhost),
f"<job_state>R</job_state><exec_host>{testhost}/1</exec_host>",
), # running
(
re.compile(r"sudo.*qstat"),
"<job_state>R</job_state><exec_host>{}/1</exec_host>".format(testhost),
f"<job_state>R</job_state><exec_host>{testhost}/1</exec_host>",
), # running
(re.compile(r"sudo.*qdel"), "STOP"),
(re.compile(r"sudo.*qstat"), ""),
Expand Down Expand Up @@ -387,11 +387,11 @@ async def test_moab(db, event_loop):
(re.compile(r"sudo.*mdiag"), 'State="Idle"'), # pending
(
re.compile(r"sudo.*mdiag"),
'State="Running" AllocNodeList="{}"'.format(testhost),
f'State="Running" AllocNodeList="{testhost}"',
), # running
(
re.compile(r"sudo.*mdiag"),
'State="Running" AllocNodeList="{}"'.format(testhost),
f'State="Running" AllocNodeList="{testhost}"',
), # running
(re.compile(r"sudo.*mjobctl.*-c"), "STOP"),
(re.compile(r"sudo.*mdiag"), ""),
Expand Down Expand Up @@ -429,11 +429,11 @@ async def test_pbs(db, event_loop):
(re.compile(r"sudo.*qstat"), "job_state = Q"), # pending
(
re.compile(r"sudo.*qstat"),
"job_state = R\nexec_host = {}/2*1".format(testhost),
f"job_state = R\nexec_host = {testhost}/2*1",
), # running
(
re.compile(r"sudo.*qstat"),
"job_state = R\nexec_host = {}/2*1".format(testhost),
f"job_state = R\nexec_host = {testhost}/2*1",
), # running
(re.compile(r"sudo.*qdel"), "STOP"),
(re.compile(r"sudo.*qstat"), ""),
Expand Down Expand Up @@ -556,11 +556,11 @@ async def test_condor(db, event_loop):
script = [
(
re.compile(r"sudo.*condor_submit"),
"submitted to cluster {}".format(str(testjob)),
f"submitted to cluster {str(testjob)}",
),
(re.compile(r"sudo.*condor_q"), "1,"), # pending
(re.compile(r"sudo.*condor_q"), "2, @{}".format(testhost)), # runing
(re.compile(r"sudo.*condor_q"), "2, @{}".format(testhost)),
(re.compile(r"sudo.*condor_q"), f"2, @{testhost}"), # runing
(re.compile(r"sudo.*condor_q"), f"2, @{testhost}"),
(re.compile(r"sudo.*condor_rm"), "STOP"),
(re.compile(r"sudo.*condor_q"), ""),
]
Expand Down Expand Up @@ -594,11 +594,11 @@ async def test_lfs(db, event_loop):
script = [
(
re.compile(r"sudo.*bsub"),
"Job <{}> is submitted to default queue <normal>".format(str(testjob)),
f"Job <{str(testjob)}> is submitted to default queue <normal>",
),
(re.compile(r"sudo.*bjobs"), "PEND "), # pending
(re.compile(r"sudo.*bjobs"), "RUN {}".format(testhost)), # running
(re.compile(r"sudo.*bjobs"), "RUN {}".format(testhost)),
(re.compile(r"sudo.*bjobs"), f"RUN {testhost}"), # running
(re.compile(r"sudo.*bjobs"), f"RUN {testhost}"),
(re.compile(r"sudo.*bkill"), "STOP"),
(re.compile(r"sudo.*bjobs"), ""),
]
Expand Down

0 comments on commit 63950c3

Please sign in to comment.