From 61c0aa8087f23a7f4f439733962ad03e2b3ca77e Mon Sep 17 00:00:00 2001 From: Chris van Run Date: Fri, 10 Jan 2025 11:18:41 +0100 Subject: [PATCH] Update grand_challenge_forge/partials/algorithm-template/inference.py.j2 Co-authored-by: Anne Mickan --- .../partials/algorithm-template/inference.py.j2 | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/grand_challenge_forge/partials/algorithm-template/inference.py.j2 b/grand_challenge_forge/partials/algorithm-template/inference.py.j2 index 1cc1415..ce6ac73 100644 --- a/grand_challenge_forge/partials/algorithm-template/inference.py.j2 +++ b/grand_challenge_forge/partials/algorithm-template/inference.py.j2 @@ -58,14 +58,14 @@ def run(): # Process the inputs: any way you'd like _show_torch_cuda_info() - # Some additional resources might be required, place these in one of two locations. + # Some additional resources might be required, include these in one of two ways. - # First location: part of the Docker-container image: resources/ + # Option 1: part of the Docker-container image: resources/ resource_dir = Path("/opt/app/resources") with open(resource_dir / "some_resource.txt", "r") as f: print(f.read()) - # Second location: part of the model tarball + # Option 2: upload them as a separate tarball to Grand Challenge (go to your Algorithm > Models). The resources in the tarball will be extracted to `model_dir` at runtime. model_dir = Path("/opt/ml/model") with open(model_dir / "a_tarball_subdirectory" / "some_tarball_resource.txt", "r") as f: print(f.read())