Skip to content

Commit

Permalink
Format code by ruff (#255)
Browse files Browse the repository at this point in the history
  • Loading branch information
kharus authored Jan 14, 2025
1 parent 8d068a2 commit 4ad4078
Show file tree
Hide file tree
Showing 9 changed files with 46 additions and 78 deletions.
9 changes: 4 additions & 5 deletions natural4-server/natural4_server/fastapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,21 +12,20 @@
template_dir: anyio.Path = basedir / "template"
temp_dir: anyio.Path = basedir / "temp"
static_dir: anyio.Path = basedir / "static"
natural4_dir: anyio.Path = anyio.Path(
os.environ.get("NL4_WORKDIR", temp_dir / "workdir")
)
natural4_dir: anyio.Path = anyio.Path(os.environ.get("NL4_WORKDIR", temp_dir / "workdir"))

logger = logging.getLogger('uvicorn.error')
logger = logging.getLogger("uvicorn.error")

app = FastAPI()

app.mount("/workdir", StaticFiles(directory=natural4_dir), name="static")


@app.get("/aasvg/{uuid}/{ssid}/{sid}/{image}")
async def show_aasvg_image(uuid: str, ssid: str, sid: str, image: str) -> FileResponse:
print("show_aasvg_image: handling request for /aasvg/ url", file=sys.stderr)

image_path = natural4_dir / uuid / ssid / sid / "aasvg" / "LATEST" / image
print(f"show_aasvg_image: sending path {image_path}", file=sys.stderr)

return FileResponse(image_path)
return FileResponse(image_path)
45 changes: 14 additions & 31 deletions natural4-server/natural4_server/hello.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,9 +58,7 @@ def set_max_runtime(seconds) -> None:


default_filenm_natL4exe_from_stack_install = "natural4-exe"
natural4_exe: str = os.environ.get(
"natural4_exe", default_filenm_natL4exe_from_stack_install
)
natural4_exe: str = os.environ.get("natural4_exe", default_filenm_natL4exe_from_stack_install)

try:
nl4exe_time_limit: float = float(os.environ["NL4EXE_TIME_LIMIT"])
Expand All @@ -71,9 +69,7 @@ def set_max_runtime(seconds) -> None:
nl4exe_time_limit: float = 20

# see gunicorn.conf.py for basedir, workdir, startport
natural4_dir: pathlib.Path = pathlib.Path(
os.environ.get("NL4_WORKDIR", pathlib.Path(os.getcwd()) / "temp" / "workdir/")
)
natural4_dir: pathlib.Path = pathlib.Path(os.environ.get("NL4_WORKDIR", pathlib.Path(os.getcwd()) / "temp" / "workdir/"))

app = Sanic("Larangan", dumps=orjson.dumps, loads=orjson.loads)

Expand All @@ -98,9 +94,7 @@ def set_max_runtime(seconds) -> None:


@app.route("/aasvg/<uuid>/<ssid>/<sid>/<image>")
async def show_aasvg_image(
request: Request, uuid: str, ssid: str, sid: str, image: str
) -> HTTPResponse:
async def show_aasvg_image(request: Request, uuid: str, ssid: str, sid: str, image: str) -> HTTPResponse:
print("show_aasvg_image: handling request for /aasvg/ url", file=sys.stderr)

image_path = natural4_dir / uuid / ssid / sid / "aasvg" / "LATEST" / image
Expand Down Expand Up @@ -186,11 +180,7 @@ async def process_csv(request: Request) -> HTTPResponse:
try:
(await nl4exe).terminate()
finally:
return json(
{
"nl4_err": f"natural4_exe timed out after {nl4exe_time_limit} seconds."
}
)
return json({"nl4_err": f"natural4_exe timed out after {nl4exe_time_limit} seconds."})

print(
f"hello.py main: back from natural4-exe (took {datetime.datetime.now() - start_time})",
Expand All @@ -200,12 +190,8 @@ async def process_csv(request: Request) -> HTTPResponse:
nl4_out, nl4_err = await nl4exe.communicate()
nl4_out, nl4_err = nl4_out.decode(), nl4_err.decode()

print(
f"hello.py main: natural4-exe stdout length = {len(nl4_out)}", file=sys.stderr
)
print(
f"hello.py main: natural4-exe stderr length = {len(nl4_err)}", file=sys.stderr
)
print(f"hello.py main: natural4-exe stdout length = {len(nl4_out)}", file=sys.stderr)
print(f"hello.py main: natural4-exe stderr length = {len(nl4_err)}", file=sys.stderr)

short_err_maxlen, long_err_maxlen = 2_000, 20_000
nl4_stdout, nl4_stderr = nl4_out[:long_err_maxlen], nl4_err[:long_err_maxlen]
Expand All @@ -227,9 +213,7 @@ async def process_csv(request: Request) -> HTTPResponse:
# postprocessing:
# Use pandoc to generate word and pdf docs from markdown.
# ---------------------------------------------
pandoc_tasks: AsyncGenerator[Task | None, None] = get_pandoc_tasks(
target_folder, timestamp
)
pandoc_tasks: AsyncGenerator[Task | None, None] = get_pandoc_tasks(target_folder, timestamp)

# Concurrently peform the following:
# - Write natural4-exe's stdout to a file.
Expand Down Expand Up @@ -265,9 +249,7 @@ async def process_csv(request: Request) -> HTTPResponse:
# - Wait for the flowcharts to be generated before returning to the sidebar.
# - Read in the aasvg html file to return to the sidebar.
async with (
await anyio.open_file(
target_folder / "aasvg" / "LATEST" / "index.html", "r"
) as aasvg_file,
await anyio.open_file(target_folder / "aasvg" / "LATEST" / "index.html", "r") as aasvg_file,
asyncio.TaskGroup() as taskgroup,
):
aasvg_index_task: asyncio.Task[str] = taskgroup.create_task(aasvg_file.read())
Expand All @@ -283,18 +265,18 @@ async def process_csv(request: Request) -> HTTPResponse:
}
)


async def petri_post_process(target_folder):
petri_folder = target_folder / "petri"
dot_path = anyio.Path(petri_folder / "LATEST.dot")
# dot_path resolves to something like 2025-01-06T03:00:52.dot
# stem is respectively a timestamp 2025-01-06T03:00:52
timestamp = (await dot_path.readlink()).stem

flowchart_tasks: asyncio.Task[None] = pipe(
get_flowchart_tasks(target_folder, timestamp), run_tasks
)
flowchart_tasks: asyncio.Task[None] = pipe(get_flowchart_tasks(target_folder, timestamp), run_tasks)

return timestamp, flowchart_tasks

return timestamp,flowchart_tasks

async def save_csv(request, target_folder, time_now):
target_path = target_folder / f"{time_now}.csv"
Expand All @@ -305,11 +287,12 @@ async def save_csv(request, target_folder, time_now):
await fout.write(request.form["csvString"][0])
return target_path


def extract_fields(data):
uuid: str = data["uuid"][0]
spreadsheet_id: str = data["spreadsheetId"][0]
sheet_id: str = data["sheetId"][0]
return uuid,spreadsheet_id,sheet_id
return uuid, spreadsheet_id, sheet_id

# ---------------------------------------------
# return to sidebar caller
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,7 @@ class PandocOutput(pyrs.PRecord):


pandoc_outputs: Collection[PandocOutput] = pyrs.s(
PandocOutput(
file_extension="docx", extra_args=("-f", "markdown+hard_line_breaks", "-s")
),
PandocOutput(file_extension="docx", extra_args=("-f", "markdown+hard_line_breaks", "-s")),
PandocOutput(
file_extension="pdf",
extra_args=(
Expand Down Expand Up @@ -76,10 +74,10 @@ async def pandoc_md_to_output(
stdout, stderr = await proc.communicate()

if stdout:
print(f'[pandoc out]\n{stdout.decode()}')
print(f"[pandoc out]\n{stdout.decode()}")

if stderr:
print(f'[pandoc err]\n{stderr.decode()}')
print(f"[pandoc err]\n{stderr.decode()}")
except RuntimeError as exc:
print(
f"Error occured while outputting to {file_extension}: {exc}",
Expand Down
5 changes: 2 additions & 3 deletions natural4-server/natural4_server/plugins/flowchart/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
if subprocess.check_output("which dot", shell=True).strip():
get_flowchart_tasks = _get_flowchart_tasks
else:
def get_flowchart_tasks(
uuid_ss_folder: str | os.PathLike, timestamp: str | os.PathLike
) -> AsyncGenerator[Task | None, None]:

def get_flowchart_tasks(uuid_ss_folder: str | os.PathLike, timestamp: str | os.PathLike) -> AsyncGenerator[Task | None, None]:
return aiostream.stream.empty()
Original file line number Diff line number Diff line change
Expand Up @@ -65,16 +65,12 @@ async def _dot_file_to_output(
output_file = anyio.Path(output_file)

graphviz_cmd: Sequence[str] = (
pyrse.sq("dot", f"-T{output_file.suffix[1:]}", f"{dot_file}")
+ pyrse.psequence(args)
+ pyrse.sq("-o", f"{output_file}")
pyrse.sq("dot", f"-T{output_file.suffix[1:]}", f"{dot_file}") + pyrse.psequence(args) + pyrse.sq("-o", f"{output_file}")
) # type: ignore

print(f'Calling graphviz with: {" ".join(graphviz_cmd)}', file=sys.stderr)

await asyncio.subprocess.create_subprocess_exec(
*graphviz_cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
await asyncio.subprocess.create_subprocess_exec(*graphviz_cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE)


async def flowchart_dot_to_output(
Expand All @@ -97,9 +93,7 @@ async def flowchart_dot_to_output(
print(f"Output file: {output_file}", file=sys.stderr)
await _dot_file_to_output(dot_file, output_file, args)

latest_file: anyio.Path = (
output_path / f"LATEST{suffix}.{file_extension}"
)
latest_file: anyio.Path = output_path / f"LATEST{suffix}.{file_extension}"
try:
await latest_file.unlink(missing_ok=True)
await latest_file.symlink_to(timestamp_file)
Expand All @@ -112,10 +106,6 @@ async def flowchart_dot_to_output(
print(f"hello.py main: {exc}", file=sys.stderr)


async def get_flowchart_tasks(
uuid_ss_folder: str | os.PathLike, timestamp: str | os.PathLike
) -> AsyncGenerator[Task, None]:
async def get_flowchart_tasks(uuid_ss_folder: str | os.PathLike, timestamp: str | os.PathLike) -> AsyncGenerator[Task, None]:
for output in flowchart_outputs:
yield Task(
func=flowchart_dot_to_output, args=(uuid_ss_folder, timestamp, output)
)
yield Task(func=flowchart_dot_to_output, args=(uuid_ss_folder, timestamp, output))
4 changes: 1 addition & 3 deletions natural4-server/natural4_server/task.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,7 @@ def task_to_coro(task: Task) -> Coroutine:
return _run_as_async(lambda: None, tuple())


async def run_tasks(
tasks: AsyncGenerator[Task, None] | Generator[Task, None, None]
) -> None:
async def run_tasks(tasks: AsyncGenerator[Task, None] | Generator[Task, None, None]) -> None:
"""
Runs tasks asynchronously in the background.
"""
Expand Down
9 changes: 4 additions & 5 deletions natural4-server/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,7 @@ dependencies = [


[project.optional-dependencies]
docgen = [
"pypandoc-binary == 1.13",]
docgen = ["pypandoc-binary == 1.13"]

[build-system]
requires = ["hatchling"]
Expand All @@ -44,13 +43,13 @@ dev-dependencies = [
line-length = 130
target-version = "py312"

ignore = ["E731", "E701"]
ignore = ["E731", "E701"]
# E731: we are FPers, assigning lambdas is OK
# E701: sometimes it looks better to have multiple statements after colon

[tool.ruff.per-file-ignores]
[tool.ruff.per-file-ignores]
"natural4_server/*.py" = ["F403", "E701", "F405"]
# quiet warning for Joe's unused `runvue` variable
"*joe*.py" = ["F403", "E701", "F405", "E402"]
"*8090*.py" = ["F403", "E701", "F405", "E402"]
# disable false positive linting warnings for joe's config gnunicorn files
# disable false positive linting warnings for joe's config gnunicorn files
5 changes: 3 additions & 2 deletions natural4-server/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,11 @@
def app():
return application_instance


@pytest.fixture
def post_data():
return {
"uuid": "e909063f-f7a2-4e6a-945c-f1b21314227d",
"spreadsheetId": "1GdDyNl6jWaeSwY_Ao2sA8yahQINPcnhRh9naGRIDGak",
"sheetId":"1206725099"
}
"sheetId": "1206725099",
}
19 changes: 10 additions & 9 deletions natural4-server/tests/test_endpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,37 +3,38 @@
from sanic_testing.reusable import ReusableClient
from sanic.application.constants import ServerStage


def test_post(app: Sanic, post_data):
with open('tests/data/rodents.csv') as f:
with open("tests/data/rodents.csv") as f:
input_data = f.read()
with ReusableClient(app) as client:
for info in app.state.server_info:
info.stage = ServerStage.SERVING

post_data['csvString'] = input_data
request, response_post = client.post('/post', data=post_data)
post_data["csvString"] = input_data
request, response_post = client.post("/post", data=post_data)
assert response_post.status == 200

workdir_url = f'/workdir/{post_data['uuid']}/{post_data['spreadsheetId']}/{post_data['sheetId']}'

request, response_json = client.get(f'{workdir_url}/aajson/LATEST.json')
request, response_json = client.get(f"{workdir_url}/aajson/LATEST.json")
assert response_json.status == 200

sleep(1)

request, response_json = client.get(f'{workdir_url}/petri/LATEST.png')
request, response_json = client.get(f"{workdir_url}/petri/LATEST.png")
assert response_json.status == 200

request, response_json = client.get(f'{workdir_url}/petri/LATEST.svg')
request, response_json = client.get(f"{workdir_url}/petri/LATEST.svg")
assert response_json.status == 200

request, response_json = client.get(f'{workdir_url}/petri/LATEST-small.png')
request, response_json = client.get(f"{workdir_url}/petri/LATEST-small.png")
assert response_json.status == 200

start_time = time()
while time() - start_time < 60:
print('Waiting for PDF...')
request, response_pdf = client.get(f'{workdir_url}/pdf/LATEST.pdf')
print("Waiting for PDF...")
request, response_pdf = client.get(f"{workdir_url}/pdf/LATEST.pdf")
if response_pdf.status != 200:
sleep(5)
else:
Expand Down

0 comments on commit 4ad4078

Please sign in to comment.