Skip to content

Commit

Permalink
Added more functional parameters and FS license
Browse files Browse the repository at this point in the history
  • Loading branch information
Sandip117 committed Feb 15, 2024
1 parent 343b353 commit 4475a08
Show file tree
Hide file tree
Showing 2 changed files with 77 additions and 84 deletions.
157 changes: 73 additions & 84 deletions fastsurfer_inference.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from pftag import pftag
from pflog import pflog
from datetime import datetime
from typing import List
import sys
import os
import subprocess
Expand All @@ -24,7 +25,7 @@
logger.opt(colors = True)
logger.add(sys.stderr, format=logger_format)

__version__ = '1.3.6'
__version__ = '1.3.7'

DISPLAY_TITLE = r"""
__ _ __ _ __
Expand All @@ -35,104 +36,77 @@
|_| \__,_|___/\__|___/\__,_|_| |_| \___|_| |_|_| |_|_| \___|_| \___|_| |_|\___\___|
______
|______|
""" + "\t\t -- version " + __version__ + " --\n\n"
""" + "\t\t\t\t -- version " + __version__ + " --\n\n"


parser = ArgumentParser(description='A ChRIS plugin to send DICOMs to a remote PACS store',
parser = ArgumentParser(description='A ChRIS plugin to run FastSurfer for creating segmentation and surfaces',
formatter_class=ArgumentDefaultsHelpFormatter)
# Required options
# 1. Directory information (where to read from, where to write to)
parser.add_argument('--subjectDir',"--sd",
dest='subjectDir',
parser.add_argument('--fs_license',
dest='fs_license',
type=str,
help="directory (relative to <inputDir>) of subjects to process",
default="")

# 2. Options for the MRI volumes
# (name of in and output, order of interpolation if not conformed)
parser.add_argument('--iname', '--t1',
help=" Path to FreeSurfer license key file.",
default="/fastsurfer/fs_license.txt")
parser.add_argument('--t1',
type=str,
dest='iname',
dest='t1',
help='name of the input (raw) file to process (default: brain.mgz)',
default='brain.mgz')
parser.add_argument('--out_name', '--seg',
dest='oname',
parser.add_argument('--sid',
dest='sid',
type=str,
default='aparc.DKTatlas+aseg.deep.mgz',
help='name of the output segmented file')
parser.add_argument('--order',
dest='order',
type=int,
default=1,
help="interpolation order")

# 3. Options for log-file and search-tag
parser.add_argument('--subject',
dest='subject',
default="subject-000",
help='Subject ID for directory inside output dir to be created')
parser.add_argument('--seg',
dest='seg',
type=str,
default="*",
help='subject(s) to process. This expression is globbed.')
parser.add_argument('--log',
dest='logfile',
default="",
help='Path to segmented file inside inputdir')
parser.add_argument('--seg_log',
dest='seg_log',
type=str,
help='name of logfile (default: deep-seg.log)',
default='deep-seg.log')

# 4. Pre-trained weights -- NB NB NB -- currently (Jan 2021) these CANNOT
# be set by an enduser. These weight files are RELATIVE/INTERNAL to the
# container
parser.add_argument('--network_sagittal_path',
dest='network_sagittal_path',
type=str,
help="path to pre-trained sagittal network weights",
default='./checkpoints/Sagittal_Weights_FastSurferCNN/ckpts/Epoch_30_training_state.pkl')
parser.add_argument('--network_coronal_path',
dest='network_coronal_path',
type=str,
help="path to pre-trained coronal network weights",
default='./checkpoints/Coronal_Weights_FastSurferCNN/ckpts/Epoch_30_training_state.pkl')
parser.add_argument('--network_axial_path',
dest='network_axial_path',
type=str,
help="path to pre-trained axial network weights",
default='./checkpoints/Axial_Weights_FastSurferCNN/ckpts/Epoch_30_training_state.pkl')

# 5. Clean up and GPU/CPU options (disable cuda, change batchsize)
parser.add_argument('--clean',
dest='cleanup',
type=bool,
default=True,
parser.add_argument('--clean_seg',
dest='clean_seg',
action = 'store_true',
default=False,
help="if specified, clean up segmentation")
parser.add_argument('--no_cuda',
dest='no_cuda',
type=bool,
action = 'store_true',
default=False,
help='if specified, do not use GPU')
parser.add_argument('--batch_size',
dest='batch_size',
type=int,
default=8,
parser.add_argument('--batch',
dest='batch',
type=str,
default='8',
help="batch size for inference (default: 8")
parser.add_argument('--simple_run',
dest='simple_run',
default=False,
type=bool,
action = 'store_true',
help='simplified run: only analyze one subject')

# Adding check to parallel processing, default = false
parser.add_argument('--run_parallel',
dest='run_parallel',
type=bool,
parser.add_argument('--parallel',
dest='parallel',
action = 'store_true',
default=False,
help='if specified, allows for execute on multiple GPUs')

parser.add_argument('--copyInputFiles',
dest='copyInputFiles',
type=str,
default="",
help="if specified, copy i/p files matching the input regex to o/p dir")
parser.add_argument('-f', '--fileFilter', default='dcm', type=str,
help='input file filter glob')
help='Run both hemispheres in parallel')
parser.add_argument('--seg_only',
dest='seg_only',
action = 'store_true',
default=False,
help='Run only FastSurferCNN (generate segmentation, do not run surface pipeline)')
parser.add_argument('--surf_only',
dest='surf_only',
action = 'store_true',
default=False,
help='Run surface pipeline only. The segmentation input has to exist already in this case.')
parser.add_argument('--seg_with_cc_only',
dest='seg_with_cc_only',
action = 'store_true',
default=False,
help=' Run FastSurferCNN (generate segmentation) and recon_surf until corpus callosum'
' (CC) is added in (no surface models will be created in this case!)')
parser.add_argument('-V', '--version', action='version',
version=f'%(prog)s {__version__}')
parser.add_argument( '--pftelDB',
Expand Down Expand Up @@ -168,7 +142,7 @@ def preamble_show(options) -> None:
parser=parser,
title='An app to efficiently perform cortical parcellation and segmentation on raw brain MRI images',
category='', # ref. https://chrisstore.co/plugins
min_memory_limit='4Gi', # supported units: Mi, Gi
min_memory_limit='16Gi', # supported units: Mi, Gi
min_cpu_limit='8000m', # millicores, e.g. "1000m" = 1 CPU core
min_gpu_limit=0 # set min_gpu_limit=1 to enable GPU
)
Expand All @@ -188,20 +162,35 @@ def main(options: Namespace, inputdir: Path, outputdir: Path):
"""
FS_SCRIPT = "/fastsurfer/run_fastsurfer.sh"
preamble_show(options)
mapper = PathMapper.file_mapper(inputdir, outputdir, glob=f"**/{options.iname}", fail_if_empty=False)
l_cli_params =[FS_SCRIPT]
l_cli_params.extend(get_param_list(options))
mapper = PathMapper.file_mapper(inputdir, outputdir, glob=f"**/{options.t1}", fail_if_empty=False)
for input_file, output_file in mapper:
l_cli_params = [FS_SCRIPT]
l_cli_params.extend(["--sd", outputdir,
"--t1", f"{input_file}",
"--sid", f"{options.subject}",
"--seg_only",
"--parallel"])
l_cli_params.extend(["--t1",f"{input_file}",
"--sd",f"{outputdir}"])
LOG(f"Running {FS_SCRIPT} on input: {input_file.name}")
try:
LOG(l_cli_params)
subprocess.call(l_cli_params)
except Exception as ex:
subprocess.call(FS_SCRIPT)

def get_param_list(options) -> List[str]:
"""
A dirty hack to transform CLI params from this module to
the FastSurfer shell script running inside the docker
container.
"""
list_param = []
for k,v in options.__dict__.items():
if k not in ["t1", "inputdir", "outputdir"] and options.__dict__[k]:
list_param.append(f"--{k}")
if options.__dict__[k]!=True:
list_param.append(v)

return list_param



if __name__ == '__main__':
main()
4 changes: 4 additions & 0 deletions fs_license.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
[email protected]
47396
*C/n61HFX9xDY
FSy/TplrCz2mM

0 comments on commit 4475a08

Please sign in to comment.